1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "arch/arm-get-next-pcs.h"
51 #include "gdb/sim-arm.h"
54 #include "coff/internal.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self,
245 /* get_next_pcs operations. */
246 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
250 arm_get_next_pcs_is_thumb,
254 struct arm_prologue_cache
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
267 /* The register used to hold the frame pointer for this frame. */
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
274 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
279 /* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
282 #define DISPLACED_STEPPING_ARCH_VERSION 5
284 /* Set to true if the 32-bit mode is in use. */
288 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
291 arm_psr_thumb_bit (struct gdbarch *gdbarch)
293 if (gdbarch_tdep (gdbarch)->is_m)
299 /* Determine if the processor is currently executing in Thumb mode. */
302 arm_is_thumb (struct regcache *regcache)
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
309 return (cpsr & t_bit) != 0;
312 /* Determine if FRAME is executing in Thumb mode. */
315 arm_frame_is_thumb (struct frame_info *frame)
318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
326 return (cpsr & t_bit) != 0;
329 /* Callback for VEC_lower_bound. */
332 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
335 return lhs->value < rhs->value;
338 /* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
343 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
345 struct obj_section *sec;
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
364 struct arm_mapping_symbol *map_sym;
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
398 /* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
403 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
405 struct bound_minimal_symbol sym;
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
426 /* Respect internal mode override if active. */
427 if (arm_override_mode != -1)
428 return arm_override_mode;
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
464 /* Otherwise we're out of luck; we assume ARM. */
468 /* Remove useless bits from addresses in a running program. */
470 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
472 /* On M-profile devices, do not strip the low bit from EXC_RETURN
473 (the magic exception return address). */
474 if (gdbarch_tdep (gdbarch)->is_m
475 && (val & 0xfffffff0) == 0xfffffff0)
479 return UNMAKE_THUMB_ADDR (val);
481 return (val & 0x03fffffc);
484 /* Return 1 if PC is the start of a compiler helper function which
485 can be safely ignored during prologue skipping. IS_THUMB is true
486 if the function is known to be a Thumb function due to the way it
489 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
492 struct bound_minimal_symbol msym;
494 msym = lookup_minimal_symbol_by_pc (pc);
495 if (msym.minsym != NULL
496 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
497 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
499 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
501 /* The GNU linker's Thumb call stub to foo is named
503 if (strstr (name, "_from_thumb") != NULL)
506 /* On soft-float targets, __truncdfsf2 is called to convert promoted
507 arguments to their argument types in non-prototyped
509 if (startswith (name, "__truncdfsf2"))
511 if (startswith (name, "__aeabi_d2f"))
514 /* Internal functions related to thread-local storage. */
515 if (startswith (name, "__tls_get_addr"))
517 if (startswith (name, "__aeabi_read_tp"))
522 /* If we run against a stripped glibc, we may be unable to identify
523 special functions by name. Check for one important case,
524 __aeabi_read_tp, by comparing the *code* against the default
525 implementation (this is hand-written ARM assembler in glibc). */
528 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
529 == 0xe3e00a0f /* mov r0, #0xffff0fff */
530 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
531 == 0xe240f01f) /* sub pc, r0, #31 */
538 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
539 the first 16-bit of instruction, and INSN2 is the second 16-bit of
541 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
542 ((bits ((insn1), 0, 3) << 12) \
543 | (bits ((insn1), 10, 10) << 11) \
544 | (bits ((insn2), 12, 14) << 8) \
545 | bits ((insn2), 0, 7))
547 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
548 the 32-bit instruction. */
549 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
550 ((bits ((insn), 16, 19) << 12) \
551 | bits ((insn), 0, 11))
553 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
556 thumb_expand_immediate (unsigned int imm)
558 unsigned int count = imm >> 7;
566 return (imm & 0xff) | ((imm & 0xff) << 16);
568 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
570 return (imm & 0xff) | ((imm & 0xff) << 8)
571 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
574 return (0x80 | (imm & 0x7f)) << (32 - count);
577 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
578 epilogue, 0 otherwise. */
581 thumb_instruction_restores_sp (unsigned short insn)
583 return (insn == 0x46bd /* mov sp, r7 */
584 || (insn & 0xff80) == 0xb000 /* add sp, imm */
585 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
588 /* Analyze a Thumb prologue, looking for a recognizable stack frame
589 and frame pointer. Scan until we encounter a store that could
590 clobber the stack frame unexpectedly, or an unknown instruction.
591 Return the last address which is definitely safe to skip for an
592 initial breakpoint. */
595 thumb_analyze_prologue (struct gdbarch *gdbarch,
596 CORE_ADDR start, CORE_ADDR limit,
597 struct arm_prologue_cache *cache)
599 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
600 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
603 struct pv_area *stack;
604 struct cleanup *back_to;
606 CORE_ADDR unrecognized_pc = 0;
608 for (i = 0; i < 16; i++)
609 regs[i] = pv_register (i, 0);
610 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
611 back_to = make_cleanup_free_pv_area (stack);
613 while (start < limit)
617 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
619 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
624 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
627 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
628 whether to save LR (R14). */
629 mask = (insn & 0xff) | ((insn & 0x100) << 6);
631 /* Calculate offsets of saved R0-R7 and LR. */
632 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
633 if (mask & (1 << regno))
635 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
637 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
640 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
642 offset = (insn & 0x7f) << 2; /* get scaled offset */
643 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
646 else if (thumb_instruction_restores_sp (insn))
648 /* Don't scan past the epilogue. */
651 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
652 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
654 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
655 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
656 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
658 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
659 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
660 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
662 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
663 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
664 && pv_is_constant (regs[bits (insn, 3, 5)]))
665 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
666 regs[bits (insn, 6, 8)]);
667 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
668 && pv_is_constant (regs[bits (insn, 3, 6)]))
670 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
671 int rm = bits (insn, 3, 6);
672 regs[rd] = pv_add (regs[rd], regs[rm]);
674 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
676 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
677 int src_reg = (insn & 0x78) >> 3;
678 regs[dst_reg] = regs[src_reg];
680 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
682 /* Handle stores to the stack. Normally pushes are used,
683 but with GCC -mtpcs-frame, there may be other stores
684 in the prologue to create the frame. */
685 int regno = (insn >> 8) & 0x7;
688 offset = (insn & 0xff) << 2;
689 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
691 if (pv_area_store_would_trash (stack, addr))
694 pv_area_store (stack, addr, 4, regs[regno]);
696 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
698 int rd = bits (insn, 0, 2);
699 int rn = bits (insn, 3, 5);
702 offset = bits (insn, 6, 10) << 2;
703 addr = pv_add_constant (regs[rn], offset);
705 if (pv_area_store_would_trash (stack, addr))
708 pv_area_store (stack, addr, 4, regs[rd]);
710 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
711 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
712 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
713 /* Ignore stores of argument registers to the stack. */
715 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
716 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
717 /* Ignore block loads from the stack, potentially copying
718 parameters from memory. */
720 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
721 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
722 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
723 /* Similarly ignore single loads from the stack. */
725 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
726 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
727 /* Skip register copies, i.e. saves to another register
728 instead of the stack. */
730 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
731 /* Recognize constant loads; even with small stacks these are necessary
733 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
734 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
736 /* Constant pool loads, for the same reason. */
737 unsigned int constant;
740 loc = start + 4 + bits (insn, 0, 7) * 4;
741 constant = read_memory_unsigned_integer (loc, 4, byte_order);
742 regs[bits (insn, 8, 10)] = pv_constant (constant);
744 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
746 unsigned short inst2;
748 inst2 = read_memory_unsigned_integer (start + 2, 2,
749 byte_order_for_code);
751 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
753 /* BL, BLX. Allow some special function calls when
754 skipping the prologue; GCC generates these before
755 storing arguments to the stack. */
757 int j1, j2, imm1, imm2;
759 imm1 = sbits (insn, 0, 10);
760 imm2 = bits (inst2, 0, 10);
761 j1 = bit (inst2, 13);
762 j2 = bit (inst2, 11);
764 offset = ((imm1 << 12) + (imm2 << 1));
765 offset ^= ((!j2) << 22) | ((!j1) << 23);
767 nextpc = start + 4 + offset;
768 /* For BLX make sure to clear the low bits. */
769 if (bit (inst2, 12) == 0)
770 nextpc = nextpc & 0xfffffffc;
772 if (!skip_prologue_function (gdbarch, nextpc,
773 bit (inst2, 12) != 0))
777 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
779 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
781 pv_t addr = regs[bits (insn, 0, 3)];
784 if (pv_area_store_would_trash (stack, addr))
787 /* Calculate offsets of saved registers. */
788 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
789 if (inst2 & (1 << regno))
791 addr = pv_add_constant (addr, -4);
792 pv_area_store (stack, addr, 4, regs[regno]);
796 regs[bits (insn, 0, 3)] = addr;
799 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
801 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
803 int regno1 = bits (inst2, 12, 15);
804 int regno2 = bits (inst2, 8, 11);
805 pv_t addr = regs[bits (insn, 0, 3)];
807 offset = inst2 & 0xff;
809 addr = pv_add_constant (addr, offset);
811 addr = pv_add_constant (addr, -offset);
813 if (pv_area_store_would_trash (stack, addr))
816 pv_area_store (stack, addr, 4, regs[regno1]);
817 pv_area_store (stack, pv_add_constant (addr, 4),
821 regs[bits (insn, 0, 3)] = addr;
824 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
825 && (inst2 & 0x0c00) == 0x0c00
826 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
828 int regno = bits (inst2, 12, 15);
829 pv_t addr = regs[bits (insn, 0, 3)];
831 offset = inst2 & 0xff;
833 addr = pv_add_constant (addr, offset);
835 addr = pv_add_constant (addr, -offset);
837 if (pv_area_store_would_trash (stack, addr))
840 pv_area_store (stack, addr, 4, regs[regno]);
843 regs[bits (insn, 0, 3)] = addr;
846 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
849 int regno = bits (inst2, 12, 15);
852 offset = inst2 & 0xfff;
853 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
855 if (pv_area_store_would_trash (stack, addr))
858 pv_area_store (stack, addr, 4, regs[regno]);
861 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
863 /* Ignore stores of argument registers to the stack. */
866 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
867 && (inst2 & 0x0d00) == 0x0c00
868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
869 /* Ignore stores of argument registers to the stack. */
872 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
874 && (inst2 & 0x8000) == 0x0000
875 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
876 /* Ignore block loads from the stack, potentially copying
877 parameters from memory. */
880 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 /* Similarly ignore dual loads from the stack. */
886 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
887 && (inst2 & 0x0d00) == 0x0c00
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
889 /* Similarly ignore single loads from the stack. */
892 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
893 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 /* Similarly ignore single loads from the stack. */
897 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
898 && (inst2 & 0x8000) == 0x0000)
900 unsigned int imm = ((bits (insn, 10, 10) << 11)
901 | (bits (inst2, 12, 14) << 8)
902 | bits (inst2, 0, 7));
904 regs[bits (inst2, 8, 11)]
905 = pv_add_constant (regs[bits (insn, 0, 3)],
906 thumb_expand_immediate (imm));
909 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
910 && (inst2 & 0x8000) == 0x0000)
912 unsigned int imm = ((bits (insn, 10, 10) << 11)
913 | (bits (inst2, 12, 14) << 8)
914 | bits (inst2, 0, 7));
916 regs[bits (inst2, 8, 11)]
917 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
920 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
921 && (inst2 & 0x8000) == 0x0000)
923 unsigned int imm = ((bits (insn, 10, 10) << 11)
924 | (bits (inst2, 12, 14) << 8)
925 | bits (inst2, 0, 7));
927 regs[bits (inst2, 8, 11)]
928 = pv_add_constant (regs[bits (insn, 0, 3)],
929 - (CORE_ADDR) thumb_expand_immediate (imm));
932 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
933 && (inst2 & 0x8000) == 0x0000)
935 unsigned int imm = ((bits (insn, 10, 10) << 11)
936 | (bits (inst2, 12, 14) << 8)
937 | bits (inst2, 0, 7));
939 regs[bits (inst2, 8, 11)]
940 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
943 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_constant (thumb_expand_immediate (imm));
953 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
956 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
958 regs[bits (inst2, 8, 11)] = pv_constant (imm);
961 else if (insn == 0xea5f /* mov.w Rd,Rm */
962 && (inst2 & 0xf0f0) == 0)
964 int dst_reg = (inst2 & 0x0f00) >> 8;
965 int src_reg = inst2 & 0xf;
966 regs[dst_reg] = regs[src_reg];
969 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
971 /* Constant pool loads. */
972 unsigned int constant;
975 offset = bits (inst2, 0, 11);
977 loc = start + 4 + offset;
979 loc = start + 4 - offset;
981 constant = read_memory_unsigned_integer (loc, 4, byte_order);
982 regs[bits (inst2, 12, 15)] = pv_constant (constant);
985 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
987 /* Constant pool loads. */
988 unsigned int constant;
991 offset = bits (inst2, 0, 7) << 2;
993 loc = start + 4 + offset;
995 loc = start + 4 - offset;
997 constant = read_memory_unsigned_integer (loc, 4, byte_order);
998 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1000 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1001 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1004 else if (thumb2_instruction_changes_pc (insn, inst2))
1006 /* Don't scan past anything that might change control flow. */
1011 /* The optimizer might shove anything into the prologue,
1012 so we just skip what we don't recognize. */
1013 unrecognized_pc = start;
1018 else if (thumb_instruction_changes_pc (insn))
1020 /* Don't scan past anything that might change control flow. */
1025 /* The optimizer might shove anything into the prologue,
1026 so we just skip what we don't recognize. */
1027 unrecognized_pc = start;
1034 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1035 paddress (gdbarch, start));
1037 if (unrecognized_pc == 0)
1038 unrecognized_pc = start;
1042 do_cleanups (back_to);
1043 return unrecognized_pc;
1046 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1048 /* Frame pointer is fp. Frame size is constant. */
1049 cache->framereg = ARM_FP_REGNUM;
1050 cache->framesize = -regs[ARM_FP_REGNUM].k;
1052 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1054 /* Frame pointer is r7. Frame size is constant. */
1055 cache->framereg = THUMB_FP_REGNUM;
1056 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1060 /* Try the stack pointer... this is a bit desperate. */
1061 cache->framereg = ARM_SP_REGNUM;
1062 cache->framesize = -regs[ARM_SP_REGNUM].k;
1065 for (i = 0; i < 16; i++)
1066 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1067 cache->saved_regs[i].addr = offset;
1069 do_cleanups (back_to);
1070 return unrecognized_pc;
1074 /* Try to analyze the instructions starting from PC, which load symbol
1075 __stack_chk_guard. Return the address of instruction after loading this
1076 symbol, set the dest register number to *BASEREG, and set the size of
1077 instructions for loading symbol in OFFSET. Return 0 if instructions are
1081 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1082 unsigned int *destreg, int *offset)
1084 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1085 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1086 unsigned int low, high, address;
1091 unsigned short insn1
1092 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1094 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1096 *destreg = bits (insn1, 8, 10);
1098 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1099 address = read_memory_unsigned_integer (address, 4,
1100 byte_order_for_code);
1102 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1104 unsigned short insn2
1105 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1107 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1110 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1112 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1114 /* movt Rd, #const */
1115 if ((insn1 & 0xfbc0) == 0xf2c0)
1117 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1118 *destreg = bits (insn2, 8, 11);
1120 address = (high << 16 | low);
1127 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1129 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1131 address = bits (insn, 0, 11) + pc + 8;
1132 address = read_memory_unsigned_integer (address, 4,
1133 byte_order_for_code);
1135 *destreg = bits (insn, 12, 15);
1138 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1140 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1143 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1145 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1147 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1148 *destreg = bits (insn, 12, 15);
1150 address = (high << 16 | low);
1158 /* Try to skip a sequence of instructions used for stack protector. If PC
1159 points to the first instruction of this sequence, return the address of
1160 first instruction after this sequence, otherwise, return original PC.
1162 On arm, this sequence of instructions is composed of mainly three steps,
1163 Step 1: load symbol __stack_chk_guard,
1164 Step 2: load from address of __stack_chk_guard,
1165 Step 3: store it to somewhere else.
1167 Usually, instructions on step 2 and step 3 are the same on various ARM
1168 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1169 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1170 instructions in step 1 vary from different ARM architectures. On ARMv7,
1173 movw Rn, #:lower16:__stack_chk_guard
1174 movt Rn, #:upper16:__stack_chk_guard
1181 .word __stack_chk_guard
1183 Since ldr/str is a very popular instruction, we can't use them as
1184 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1185 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1186 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1189 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1191 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1192 unsigned int basereg;
1193 struct bound_minimal_symbol stack_chk_guard;
1195 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1198 /* Try to parse the instructions in Step 1. */
1199 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1204 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1205 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1206 Otherwise, this sequence cannot be for stack protector. */
1207 if (stack_chk_guard.minsym == NULL
1208 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1213 unsigned int destreg;
1215 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1217 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1218 if ((insn & 0xf800) != 0x6800)
1220 if (bits (insn, 3, 5) != basereg)
1222 destreg = bits (insn, 0, 2);
1224 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1225 byte_order_for_code);
1226 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1227 if ((insn & 0xf800) != 0x6000)
1229 if (destreg != bits (insn, 0, 2))
1234 unsigned int destreg;
1236 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1238 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1239 if ((insn & 0x0e500000) != 0x04100000)
1241 if (bits (insn, 16, 19) != basereg)
1243 destreg = bits (insn, 12, 15);
1244 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1245 insn = read_memory_unsigned_integer (pc + offset + 4,
1246 4, byte_order_for_code);
1247 if ((insn & 0x0e500000) != 0x04000000)
1249 if (bits (insn, 12, 15) != destreg)
1252 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1255 return pc + offset + 4;
1257 return pc + offset + 8;
1260 /* Advance the PC across any function entry prologue instructions to
1261 reach some "real" code.
1263 The APCS (ARM Procedure Call Standard) defines the following
1267 [stmfd sp!, {a1,a2,a3,a4}]
1268 stmfd sp!, {...,fp,ip,lr,pc}
1269 [stfe f7, [sp, #-12]!]
1270 [stfe f6, [sp, #-12]!]
1271 [stfe f5, [sp, #-12]!]
1272 [stfe f4, [sp, #-12]!]
1273 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1276 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1278 CORE_ADDR func_addr, limit_pc;
1280 /* See if we can determine the end of the prologue via the symbol table.
1281 If so, then return either PC, or the PC after the prologue, whichever
1283 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1285 CORE_ADDR post_prologue_pc
1286 = skip_prologue_using_sal (gdbarch, func_addr);
1287 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1289 if (post_prologue_pc)
1291 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1294 /* GCC always emits a line note before the prologue and another
1295 one after, even if the two are at the same address or on the
1296 same line. Take advantage of this so that we do not need to
1297 know every instruction that might appear in the prologue. We
1298 will have producer information for most binaries; if it is
1299 missing (e.g. for -gstabs), assuming the GNU tools. */
1300 if (post_prologue_pc
1302 || COMPUNIT_PRODUCER (cust) == NULL
1303 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1304 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1305 return post_prologue_pc;
1307 if (post_prologue_pc != 0)
1309 CORE_ADDR analyzed_limit;
1311 /* For non-GCC compilers, make sure the entire line is an
1312 acceptable prologue; GDB will round this function's
1313 return value up to the end of the following line so we
1314 can not skip just part of a line (and we do not want to).
1316 RealView does not treat the prologue specially, but does
1317 associate prologue code with the opening brace; so this
1318 lets us skip the first line if we think it is the opening
1320 if (arm_pc_is_thumb (gdbarch, func_addr))
1321 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1322 post_prologue_pc, NULL);
1324 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1325 post_prologue_pc, NULL);
1327 if (analyzed_limit != post_prologue_pc)
1330 return post_prologue_pc;
1334 /* Can't determine prologue from the symbol table, need to examine
1337 /* Find an upper limit on the function prologue using the debug
1338 information. If the debug information could not be used to provide
1339 that bound, then use an arbitrary large number as the upper bound. */
1340 /* Like arm_scan_prologue, stop no later than pc + 64. */
1341 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1343 limit_pc = pc + 64; /* Magic. */
1346 /* Check if this is Thumb code. */
1347 if (arm_pc_is_thumb (gdbarch, pc))
1348 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1350 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1354 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1355 This function decodes a Thumb function prologue to determine:
1356 1) the size of the stack frame
1357 2) which registers are saved on it
1358 3) the offsets of saved regs
1359 4) the offset from the stack pointer to the frame pointer
1361 A typical Thumb function prologue would create this stack frame
1362 (offsets relative to FP)
1363 old SP -> 24 stack parameters
1366 R7 -> 0 local variables (16 bytes)
1367 SP -> -12 additional stack space (12 bytes)
1368 The frame size would thus be 36 bytes, and the frame offset would be
1369 12 bytes. The frame register is R7.
1371 The comments for thumb_skip_prolog() describe the algorithm we use
1372 to detect the end of the prolog. */
1376 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1377 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1379 CORE_ADDR prologue_start;
1380 CORE_ADDR prologue_end;
1382 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1385 /* See comment in arm_scan_prologue for an explanation of
1387 if (prologue_end > prologue_start + 64)
1389 prologue_end = prologue_start + 64;
1393 /* We're in the boondocks: we have no idea where the start of the
1397 prologue_end = min (prologue_end, prev_pc);
1399 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1402 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1406 arm_instruction_restores_sp (unsigned int insn)
1408 if (bits (insn, 28, 31) != INST_NV)
1410 if ((insn & 0x0df0f000) == 0x0080d000
1411 /* ADD SP (register or immediate). */
1412 || (insn & 0x0df0f000) == 0x0040d000
1413 /* SUB SP (register or immediate). */
1414 || (insn & 0x0ffffff0) == 0x01a0d000
1416 || (insn & 0x0fff0000) == 0x08bd0000
1418 || (insn & 0x0fff0000) == 0x049d0000)
1419 /* POP of a single register. */
1426 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1427 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1428 fill it in. Return the first address not recognized as a prologue
1431 We recognize all the instructions typically found in ARM prologues,
1432 plus harmless instructions which can be skipped (either for analysis
1433 purposes, or a more restrictive set that can be skipped when finding
1434 the end of the prologue). */
1437 arm_analyze_prologue (struct gdbarch *gdbarch,
1438 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1439 struct arm_prologue_cache *cache)
1441 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1443 CORE_ADDR offset, current_pc;
1444 pv_t regs[ARM_FPS_REGNUM];
1445 struct pv_area *stack;
1446 struct cleanup *back_to;
1447 CORE_ADDR unrecognized_pc = 0;
1449 /* Search the prologue looking for instructions that set up the
1450 frame pointer, adjust the stack pointer, and save registers.
1452 Be careful, however, and if it doesn't look like a prologue,
1453 don't try to scan it. If, for instance, a frameless function
1454 begins with stmfd sp!, then we will tell ourselves there is
1455 a frame, which will confuse stack traceback, as well as "finish"
1456 and other operations that rely on a knowledge of the stack
1459 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1460 regs[regno] = pv_register (regno, 0);
1461 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1462 back_to = make_cleanup_free_pv_area (stack);
1464 for (current_pc = prologue_start;
1465 current_pc < prologue_end;
1469 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1471 if (insn == 0xe1a0c00d) /* mov ip, sp */
1473 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1476 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1477 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1479 unsigned imm = insn & 0xff; /* immediate value */
1480 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1481 int rd = bits (insn, 12, 15);
1482 imm = (imm >> rot) | (imm << (32 - rot));
1483 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1486 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1487 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1489 unsigned imm = insn & 0xff; /* immediate value */
1490 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1491 int rd = bits (insn, 12, 15);
1492 imm = (imm >> rot) | (imm << (32 - rot));
1493 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1496 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1499 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1501 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1502 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1503 regs[bits (insn, 12, 15)]);
1506 else if ((insn & 0xffff0000) == 0xe92d0000)
1507 /* stmfd sp!, {..., fp, ip, lr, pc}
1509 stmfd sp!, {a1, a2, a3, a4} */
1511 int mask = insn & 0xffff;
1513 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1516 /* Calculate offsets of saved registers. */
1517 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1518 if (mask & (1 << regno))
1521 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1522 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1525 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1526 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1527 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1529 /* No need to add this to saved_regs -- it's just an arg reg. */
1532 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1533 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1534 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1536 /* No need to add this to saved_regs -- it's just an arg reg. */
1539 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1541 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1543 /* No need to add this to saved_regs -- it's just arg regs. */
1546 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1548 unsigned imm = insn & 0xff; /* immediate value */
1549 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1550 imm = (imm >> rot) | (imm << (32 - rot));
1551 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1553 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1555 unsigned imm = insn & 0xff; /* immediate value */
1556 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1557 imm = (imm >> rot) | (imm << (32 - rot));
1558 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1560 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1562 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1564 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1567 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1568 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1569 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1571 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1573 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1575 int n_saved_fp_regs;
1576 unsigned int fp_start_reg, fp_bound_reg;
1578 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1581 if ((insn & 0x800) == 0x800) /* N0 is set */
1583 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1584 n_saved_fp_regs = 3;
1586 n_saved_fp_regs = 1;
1590 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1591 n_saved_fp_regs = 2;
1593 n_saved_fp_regs = 4;
1596 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1597 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1598 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1600 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1601 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1602 regs[fp_start_reg++]);
1605 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1607 /* Allow some special function calls when skipping the
1608 prologue; GCC generates these before storing arguments to
1610 CORE_ADDR dest = BranchDest (current_pc, insn);
1612 if (skip_prologue_function (gdbarch, dest, 0))
1617 else if ((insn & 0xf0000000) != 0xe0000000)
1618 break; /* Condition not true, exit early. */
1619 else if (arm_instruction_changes_pc (insn))
1620 /* Don't scan past anything that might change control flow. */
1622 else if (arm_instruction_restores_sp (insn))
1624 /* Don't scan past the epilogue. */
1627 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1628 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1629 /* Ignore block loads from the stack, potentially copying
1630 parameters from memory. */
1632 else if ((insn & 0xfc500000) == 0xe4100000
1633 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1634 /* Similarly ignore single loads from the stack. */
1636 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1637 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1638 register instead of the stack. */
1642 /* The optimizer might shove anything into the prologue, if
1643 we build up cache (cache != NULL) from scanning prologue,
1644 we just skip what we don't recognize and scan further to
1645 make cache as complete as possible. However, if we skip
1646 prologue, we'll stop immediately on unrecognized
1648 unrecognized_pc = current_pc;
1656 if (unrecognized_pc == 0)
1657 unrecognized_pc = current_pc;
1661 int framereg, framesize;
1663 /* The frame size is just the distance from the frame register
1664 to the original stack pointer. */
1665 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1667 /* Frame pointer is fp. */
1668 framereg = ARM_FP_REGNUM;
1669 framesize = -regs[ARM_FP_REGNUM].k;
1673 /* Try the stack pointer... this is a bit desperate. */
1674 framereg = ARM_SP_REGNUM;
1675 framesize = -regs[ARM_SP_REGNUM].k;
1678 cache->framereg = framereg;
1679 cache->framesize = framesize;
1681 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1682 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1683 cache->saved_regs[regno].addr = offset;
1687 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1688 paddress (gdbarch, unrecognized_pc));
1690 do_cleanups (back_to);
1691 return unrecognized_pc;
1695 arm_scan_prologue (struct frame_info *this_frame,
1696 struct arm_prologue_cache *cache)
1698 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1700 CORE_ADDR prologue_start, prologue_end;
1701 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1702 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1704 /* Assume there is no frame until proven otherwise. */
1705 cache->framereg = ARM_SP_REGNUM;
1706 cache->framesize = 0;
1708 /* Check for Thumb prologue. */
1709 if (arm_frame_is_thumb (this_frame))
1711 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1715 /* Find the function prologue. If we can't find the function in
1716 the symbol table, peek in the stack frame to find the PC. */
1717 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1720 /* One way to find the end of the prologue (which works well
1721 for unoptimized code) is to do the following:
1723 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1726 prologue_end = prev_pc;
1727 else if (sal.end < prologue_end)
1728 prologue_end = sal.end;
1730 This mechanism is very accurate so long as the optimizer
1731 doesn't move any instructions from the function body into the
1732 prologue. If this happens, sal.end will be the last
1733 instruction in the first hunk of prologue code just before
1734 the first instruction that the scheduler has moved from
1735 the body to the prologue.
1737 In order to make sure that we scan all of the prologue
1738 instructions, we use a slightly less accurate mechanism which
1739 may scan more than necessary. To help compensate for this
1740 lack of accuracy, the prologue scanning loop below contains
1741 several clauses which'll cause the loop to terminate early if
1742 an implausible prologue instruction is encountered.
1748 is a suitable endpoint since it accounts for the largest
1749 possible prologue plus up to five instructions inserted by
1752 if (prologue_end > prologue_start + 64)
1754 prologue_end = prologue_start + 64; /* See above. */
1759 /* We have no symbol information. Our only option is to assume this
1760 function has a standard stack frame and the normal frame register.
1761 Then, we can find the value of our frame pointer on entrance to
1762 the callee (or at the present moment if this is the innermost frame).
1763 The value stored there should be the address of the stmfd + 8. */
1764 CORE_ADDR frame_loc;
1765 LONGEST return_value;
1767 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1768 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1772 prologue_start = gdbarch_addr_bits_remove
1773 (gdbarch, return_value) - 8;
1774 prologue_end = prologue_start + 64; /* See above. */
1778 if (prev_pc < prologue_end)
1779 prologue_end = prev_pc;
1781 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1784 static struct arm_prologue_cache *
1785 arm_make_prologue_cache (struct frame_info *this_frame)
1788 struct arm_prologue_cache *cache;
1789 CORE_ADDR unwound_fp;
1791 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1792 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1794 arm_scan_prologue (this_frame, cache);
1796 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1797 if (unwound_fp == 0)
1800 cache->prev_sp = unwound_fp + cache->framesize;
1802 /* Calculate actual addresses of saved registers using offsets
1803 determined by arm_scan_prologue. */
1804 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1805 if (trad_frame_addr_p (cache->saved_regs, reg))
1806 cache->saved_regs[reg].addr += cache->prev_sp;
1811 /* Implementation of the stop_reason hook for arm_prologue frames. */
1813 static enum unwind_stop_reason
1814 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1817 struct arm_prologue_cache *cache;
1820 if (*this_cache == NULL)
1821 *this_cache = arm_make_prologue_cache (this_frame);
1822 cache = (struct arm_prologue_cache *) *this_cache;
1824 /* This is meant to halt the backtrace at "_start". */
1825 pc = get_frame_pc (this_frame);
1826 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1827 return UNWIND_OUTERMOST;
1829 /* If we've hit a wall, stop. */
1830 if (cache->prev_sp == 0)
1831 return UNWIND_OUTERMOST;
1833 return UNWIND_NO_REASON;
1836 /* Our frame ID for a normal frame is the current function's starting PC
1837 and the caller's SP when we were called. */
1840 arm_prologue_this_id (struct frame_info *this_frame,
1842 struct frame_id *this_id)
1844 struct arm_prologue_cache *cache;
1848 if (*this_cache == NULL)
1849 *this_cache = arm_make_prologue_cache (this_frame);
1850 cache = (struct arm_prologue_cache *) *this_cache;
1852 /* Use function start address as part of the frame ID. If we cannot
1853 identify the start address (due to missing symbol information),
1854 fall back to just using the current PC. */
1855 pc = get_frame_pc (this_frame);
1856 func = get_frame_func (this_frame);
1860 id = frame_id_build (cache->prev_sp, func);
1864 static struct value *
1865 arm_prologue_prev_register (struct frame_info *this_frame,
1869 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1870 struct arm_prologue_cache *cache;
1872 if (*this_cache == NULL)
1873 *this_cache = arm_make_prologue_cache (this_frame);
1874 cache = (struct arm_prologue_cache *) *this_cache;
1876 /* If we are asked to unwind the PC, then we need to return the LR
1877 instead. The prologue may save PC, but it will point into this
1878 frame's prologue, not the next frame's resume location. Also
1879 strip the saved T bit. A valid LR may have the low bit set, but
1880 a valid PC never does. */
1881 if (prev_regnum == ARM_PC_REGNUM)
1885 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1886 return frame_unwind_got_constant (this_frame, prev_regnum,
1887 arm_addr_bits_remove (gdbarch, lr));
1890 /* SP is generally not saved to the stack, but this frame is
1891 identified by the next frame's stack pointer at the time of the call.
1892 The value was already reconstructed into PREV_SP. */
1893 if (prev_regnum == ARM_SP_REGNUM)
1894 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1896 /* The CPSR may have been changed by the call instruction and by the
1897 called function. The only bit we can reconstruct is the T bit,
1898 by checking the low bit of LR as of the call. This is a reliable
1899 indicator of Thumb-ness except for some ARM v4T pre-interworking
1900 Thumb code, which could get away with a clear low bit as long as
1901 the called function did not use bx. Guess that all other
1902 bits are unchanged; the condition flags are presumably lost,
1903 but the processor status is likely valid. */
1904 if (prev_regnum == ARM_PS_REGNUM)
1907 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1909 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1910 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1911 if (IS_THUMB_ADDR (lr))
1915 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1918 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1922 struct frame_unwind arm_prologue_unwind = {
1924 arm_prologue_unwind_stop_reason,
1925 arm_prologue_this_id,
1926 arm_prologue_prev_register,
1928 default_frame_sniffer
1931 /* Maintain a list of ARM exception table entries per objfile, similar to the
1932 list of mapping symbols. We only cache entries for standard ARM-defined
1933 personality routines; the cache will contain only the frame unwinding
1934 instructions associated with the entry (not the descriptors). */
1936 static const struct objfile_data *arm_exidx_data_key;
1938 struct arm_exidx_entry
1943 typedef struct arm_exidx_entry arm_exidx_entry_s;
1944 DEF_VEC_O(arm_exidx_entry_s);
1946 struct arm_exidx_data
1948 VEC(arm_exidx_entry_s) **section_maps;
1952 arm_exidx_data_free (struct objfile *objfile, void *arg)
1954 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1957 for (i = 0; i < objfile->obfd->section_count; i++)
1958 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1962 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1963 const struct arm_exidx_entry *rhs)
1965 return lhs->addr < rhs->addr;
1968 static struct obj_section *
1969 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1971 struct obj_section *osect;
1973 ALL_OBJFILE_OSECTIONS (objfile, osect)
1974 if (bfd_get_section_flags (objfile->obfd,
1975 osect->the_bfd_section) & SEC_ALLOC)
1977 bfd_vma start, size;
1978 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1979 size = bfd_get_section_size (osect->the_bfd_section);
1981 if (start <= vma && vma < start + size)
1988 /* Parse contents of exception table and exception index sections
1989 of OBJFILE, and fill in the exception table entry cache.
1991 For each entry that refers to a standard ARM-defined personality
1992 routine, extract the frame unwinding instructions (from either
1993 the index or the table section). The unwinding instructions
1995 - extracting them from the rest of the table data
1996 - converting to host endianness
1997 - appending the implicit 0xb0 ("Finish") code
1999 The extracted and normalized instructions are stored for later
2000 retrieval by the arm_find_exidx_entry routine. */
2003 arm_exidx_new_objfile (struct objfile *objfile)
2005 struct cleanup *cleanups;
2006 struct arm_exidx_data *data;
2007 asection *exidx, *extab;
2008 bfd_vma exidx_vma = 0, extab_vma = 0;
2009 bfd_size_type exidx_size = 0, extab_size = 0;
2010 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2013 /* If we've already touched this file, do nothing. */
2014 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2016 cleanups = make_cleanup (null_cleanup, NULL);
2018 /* Read contents of exception table and index. */
2019 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2022 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2023 exidx_size = bfd_get_section_size (exidx);
2024 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2025 make_cleanup (xfree, exidx_data);
2027 if (!bfd_get_section_contents (objfile->obfd, exidx,
2028 exidx_data, 0, exidx_size))
2030 do_cleanups (cleanups);
2035 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2038 extab_vma = bfd_section_vma (objfile->obfd, extab);
2039 extab_size = bfd_get_section_size (extab);
2040 extab_data = (gdb_byte *) xmalloc (extab_size);
2041 make_cleanup (xfree, extab_data);
2043 if (!bfd_get_section_contents (objfile->obfd, extab,
2044 extab_data, 0, extab_size))
2046 do_cleanups (cleanups);
2051 /* Allocate exception table data structure. */
2052 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2053 set_objfile_data (objfile, arm_exidx_data_key, data);
2054 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2055 objfile->obfd->section_count,
2056 VEC(arm_exidx_entry_s) *);
2058 /* Fill in exception table. */
2059 for (i = 0; i < exidx_size / 8; i++)
2061 struct arm_exidx_entry new_exidx_entry;
2062 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2063 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2064 bfd_vma addr = 0, word = 0;
2065 int n_bytes = 0, n_words = 0;
2066 struct obj_section *sec;
2067 gdb_byte *entry = NULL;
2069 /* Extract address of start of function. */
2070 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2071 idx += exidx_vma + i * 8;
2073 /* Find section containing function and compute section offset. */
2074 sec = arm_obj_section_from_vma (objfile, idx);
2077 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2079 /* Determine address of exception table entry. */
2082 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2084 else if ((val & 0xff000000) == 0x80000000)
2086 /* Exception table entry embedded in .ARM.exidx
2087 -- must be short form. */
2091 else if (!(val & 0x80000000))
2093 /* Exception table entry in .ARM.extab. */
2094 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2095 addr += exidx_vma + i * 8 + 4;
2097 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2099 word = bfd_h_get_32 (objfile->obfd,
2100 extab_data + addr - extab_vma);
2103 if ((word & 0xff000000) == 0x80000000)
2108 else if ((word & 0xff000000) == 0x81000000
2109 || (word & 0xff000000) == 0x82000000)
2113 n_words = ((word >> 16) & 0xff);
2115 else if (!(word & 0x80000000))
2118 struct obj_section *pers_sec;
2119 int gnu_personality = 0;
2121 /* Custom personality routine. */
2122 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2123 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2125 /* Check whether we've got one of the variants of the
2126 GNU personality routines. */
2127 pers_sec = arm_obj_section_from_vma (objfile, pers);
2130 static const char *personality[] =
2132 "__gcc_personality_v0",
2133 "__gxx_personality_v0",
2134 "__gcj_personality_v0",
2135 "__gnu_objc_personality_v0",
2139 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2142 for (k = 0; personality[k]; k++)
2143 if (lookup_minimal_symbol_by_pc_name
2144 (pc, personality[k], objfile))
2146 gnu_personality = 1;
2151 /* If so, the next word contains a word count in the high
2152 byte, followed by the same unwind instructions as the
2153 pre-defined forms. */
2155 && addr + 4 <= extab_vma + extab_size)
2157 word = bfd_h_get_32 (objfile->obfd,
2158 extab_data + addr - extab_vma);
2161 n_words = ((word >> 24) & 0xff);
2167 /* Sanity check address. */
2169 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2170 n_words = n_bytes = 0;
2172 /* The unwind instructions reside in WORD (only the N_BYTES least
2173 significant bytes are valid), followed by N_WORDS words in the
2174 extab section starting at ADDR. */
2175 if (n_bytes || n_words)
2178 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2179 n_bytes + n_words * 4 + 1);
2182 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2186 word = bfd_h_get_32 (objfile->obfd,
2187 extab_data + addr - extab_vma);
2190 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2192 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2193 *p++ = (gdb_byte) (word & 0xff);
2196 /* Implied "Finish" to terminate the list. */
2200 /* Push entry onto vector. They are guaranteed to always
2201 appear in order of increasing addresses. */
2202 new_exidx_entry.addr = idx;
2203 new_exidx_entry.entry = entry;
2204 VEC_safe_push (arm_exidx_entry_s,
2205 data->section_maps[sec->the_bfd_section->index],
2209 do_cleanups (cleanups);
2212 /* Search for the exception table entry covering MEMADDR. If one is found,
2213 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2214 set *START to the start of the region covered by this entry. */
2217 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2219 struct obj_section *sec;
2221 sec = find_pc_section (memaddr);
2224 struct arm_exidx_data *data;
2225 VEC(arm_exidx_entry_s) *map;
2226 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2229 data = ((struct arm_exidx_data *)
2230 objfile_data (sec->objfile, arm_exidx_data_key));
2233 map = data->section_maps[sec->the_bfd_section->index];
2234 if (!VEC_empty (arm_exidx_entry_s, map))
2236 struct arm_exidx_entry *map_sym;
2238 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2239 arm_compare_exidx_entries);
2241 /* VEC_lower_bound finds the earliest ordered insertion
2242 point. If the following symbol starts at this exact
2243 address, we use that; otherwise, the preceding
2244 exception table entry covers this address. */
2245 if (idx < VEC_length (arm_exidx_entry_s, map))
2247 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2248 if (map_sym->addr == map_key.addr)
2251 *start = map_sym->addr + obj_section_addr (sec);
2252 return map_sym->entry;
2258 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2260 *start = map_sym->addr + obj_section_addr (sec);
2261 return map_sym->entry;
2270 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2271 instruction list from the ARM exception table entry ENTRY, allocate and
2272 return a prologue cache structure describing how to unwind this frame.
2274 Return NULL if the unwinding instruction list contains a "spare",
2275 "reserved" or "refuse to unwind" instruction as defined in section
2276 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2277 for the ARM Architecture" document. */
2279 static struct arm_prologue_cache *
2280 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2285 struct arm_prologue_cache *cache;
2286 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2287 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2293 /* Whenever we reload SP, we actually have to retrieve its
2294 actual value in the current frame. */
2297 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2299 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2300 vsp = get_frame_register_unsigned (this_frame, reg);
2304 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2305 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2311 /* Decode next unwind instruction. */
2314 if ((insn & 0xc0) == 0)
2316 int offset = insn & 0x3f;
2317 vsp += (offset << 2) + 4;
2319 else if ((insn & 0xc0) == 0x40)
2321 int offset = insn & 0x3f;
2322 vsp -= (offset << 2) + 4;
2324 else if ((insn & 0xf0) == 0x80)
2326 int mask = ((insn & 0xf) << 8) | *entry++;
2329 /* The special case of an all-zero mask identifies
2330 "Refuse to unwind". We return NULL to fall back
2331 to the prologue analyzer. */
2335 /* Pop registers r4..r15 under mask. */
2336 for (i = 0; i < 12; i++)
2337 if (mask & (1 << i))
2339 cache->saved_regs[4 + i].addr = vsp;
2343 /* Special-case popping SP -- we need to reload vsp. */
2344 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2347 else if ((insn & 0xf0) == 0x90)
2349 int reg = insn & 0xf;
2351 /* Reserved cases. */
2352 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2355 /* Set SP from another register and mark VSP for reload. */
2356 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2359 else if ((insn & 0xf0) == 0xa0)
2361 int count = insn & 0x7;
2362 int pop_lr = (insn & 0x8) != 0;
2365 /* Pop r4..r[4+count]. */
2366 for (i = 0; i <= count; i++)
2368 cache->saved_regs[4 + i].addr = vsp;
2372 /* If indicated by flag, pop LR as well. */
2375 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2379 else if (insn == 0xb0)
2381 /* We could only have updated PC by popping into it; if so, it
2382 will show up as address. Otherwise, copy LR into PC. */
2383 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2384 cache->saved_regs[ARM_PC_REGNUM]
2385 = cache->saved_regs[ARM_LR_REGNUM];
2390 else if (insn == 0xb1)
2392 int mask = *entry++;
2395 /* All-zero mask and mask >= 16 is "spare". */
2396 if (mask == 0 || mask >= 16)
2399 /* Pop r0..r3 under mask. */
2400 for (i = 0; i < 4; i++)
2401 if (mask & (1 << i))
2403 cache->saved_regs[i].addr = vsp;
2407 else if (insn == 0xb2)
2409 ULONGEST offset = 0;
2414 offset |= (*entry & 0x7f) << shift;
2417 while (*entry++ & 0x80);
2419 vsp += 0x204 + (offset << 2);
2421 else if (insn == 0xb3)
2423 int start = *entry >> 4;
2424 int count = (*entry++) & 0xf;
2427 /* Only registers D0..D15 are valid here. */
2428 if (start + count >= 16)
2431 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2432 for (i = 0; i <= count; i++)
2434 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2438 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2441 else if ((insn & 0xf8) == 0xb8)
2443 int count = insn & 0x7;
2446 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2447 for (i = 0; i <= count; i++)
2449 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2453 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2456 else if (insn == 0xc6)
2458 int start = *entry >> 4;
2459 int count = (*entry++) & 0xf;
2462 /* Only registers WR0..WR15 are valid. */
2463 if (start + count >= 16)
2466 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2467 for (i = 0; i <= count; i++)
2469 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2473 else if (insn == 0xc7)
2475 int mask = *entry++;
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask == 0 || mask >= 16)
2482 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2483 for (i = 0; i < 4; i++)
2484 if (mask & (1 << i))
2486 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2490 else if ((insn & 0xf8) == 0xc0)
2492 int count = insn & 0x7;
2495 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2496 for (i = 0; i <= count; i++)
2498 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2502 else if (insn == 0xc8)
2504 int start = *entry >> 4;
2505 int count = (*entry++) & 0xf;
2508 /* Only registers D0..D31 are valid. */
2509 if (start + count >= 16)
2512 /* Pop VFP double-precision registers
2513 D[16+start]..D[16+start+count]. */
2514 for (i = 0; i <= count; i++)
2516 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2520 else if (insn == 0xc9)
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2526 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2527 for (i = 0; i <= count; i++)
2529 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2533 else if ((insn & 0xf8) == 0xd0)
2535 int count = insn & 0x7;
2538 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2539 for (i = 0; i <= count; i++)
2541 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2547 /* Everything else is "spare". */
2552 /* If we restore SP from a register, assume this was the frame register.
2553 Otherwise just fall back to SP as frame register. */
2554 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2555 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2557 cache->framereg = ARM_SP_REGNUM;
2559 /* Determine offset to previous frame. */
2561 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2563 /* We already got the previous SP. */
2564 cache->prev_sp = vsp;
2569 /* Unwinding via ARM exception table entries. Note that the sniffer
2570 already computes a filled-in prologue cache, which is then used
2571 with the same arm_prologue_this_id and arm_prologue_prev_register
2572 routines also used for prologue-parsing based unwinding. */
2575 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2576 struct frame_info *this_frame,
2577 void **this_prologue_cache)
2579 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2580 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2581 CORE_ADDR addr_in_block, exidx_region, func_start;
2582 struct arm_prologue_cache *cache;
2585 /* See if we have an ARM exception table entry covering this address. */
2586 addr_in_block = get_frame_address_in_block (this_frame);
2587 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2591 /* The ARM exception table does not describe unwind information
2592 for arbitrary PC values, but is guaranteed to be correct only
2593 at call sites. We have to decide here whether we want to use
2594 ARM exception table information for this frame, or fall back
2595 to using prologue parsing. (Note that if we have DWARF CFI,
2596 this sniffer isn't even called -- CFI is always preferred.)
2598 Before we make this decision, however, we check whether we
2599 actually have *symbol* information for the current frame.
2600 If not, prologue parsing would not work anyway, so we might
2601 as well use the exception table and hope for the best. */
2602 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2606 /* If the next frame is "normal", we are at a call site in this
2607 frame, so exception information is guaranteed to be valid. */
2608 if (get_next_frame (this_frame)
2609 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2612 /* We also assume exception information is valid if we're currently
2613 blocked in a system call. The system library is supposed to
2614 ensure this, so that e.g. pthread cancellation works. */
2615 if (arm_frame_is_thumb (this_frame))
2619 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2620 byte_order_for_code, &insn)
2621 && (insn & 0xff00) == 0xdf00 /* svc */)
2628 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2629 byte_order_for_code, &insn)
2630 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2634 /* Bail out if we don't know that exception information is valid. */
2638 /* The ARM exception index does not mark the *end* of the region
2639 covered by the entry, and some functions will not have any entry.
2640 To correctly recognize the end of the covered region, the linker
2641 should have inserted dummy records with a CANTUNWIND marker.
2643 Unfortunately, current versions of GNU ld do not reliably do
2644 this, and thus we may have found an incorrect entry above.
2645 As a (temporary) sanity check, we only use the entry if it
2646 lies *within* the bounds of the function. Note that this check
2647 might reject perfectly valid entries that just happen to cover
2648 multiple functions; therefore this check ought to be removed
2649 once the linker is fixed. */
2650 if (func_start > exidx_region)
2654 /* Decode the list of unwinding instructions into a prologue cache.
2655 Note that this may fail due to e.g. a "refuse to unwind" code. */
2656 cache = arm_exidx_fill_cache (this_frame, entry);
2660 *this_prologue_cache = cache;
2664 struct frame_unwind arm_exidx_unwind = {
2666 default_frame_unwind_stop_reason,
2667 arm_prologue_this_id,
2668 arm_prologue_prev_register,
2670 arm_exidx_unwind_sniffer
2673 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2674 trampoline, return the target PC. Otherwise return 0.
2676 void call0a (char c, short s, int i, long l) {}
2680 (*pointer_to_call0a) (c, s, i, l);
2683 Instead of calling a stub library function _call_via_xx (xx is
2684 the register name), GCC may inline the trampoline in the object
2685 file as below (register r2 has the address of call0a).
2688 .type main, %function
2697 The trampoline 'bx r2' doesn't belong to main. */
2700 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2702 /* The heuristics of recognizing such trampoline is that FRAME is
2703 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2704 if (arm_frame_is_thumb (frame))
2708 if (target_read_memory (pc, buf, 2) == 0)
2710 struct gdbarch *gdbarch = get_frame_arch (frame);
2711 enum bfd_endian byte_order_for_code
2712 = gdbarch_byte_order_for_code (gdbarch);
2714 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2716 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2719 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2721 /* Clear the LSB so that gdb core sets step-resume
2722 breakpoint at the right address. */
2723 return UNMAKE_THUMB_ADDR (dest);
2731 static struct arm_prologue_cache *
2732 arm_make_stub_cache (struct frame_info *this_frame)
2734 struct arm_prologue_cache *cache;
2736 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2737 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2739 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2744 /* Our frame ID for a stub frame is the current SP and LR. */
2747 arm_stub_this_id (struct frame_info *this_frame,
2749 struct frame_id *this_id)
2751 struct arm_prologue_cache *cache;
2753 if (*this_cache == NULL)
2754 *this_cache = arm_make_stub_cache (this_frame);
2755 cache = (struct arm_prologue_cache *) *this_cache;
2757 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2761 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2762 struct frame_info *this_frame,
2763 void **this_prologue_cache)
2765 CORE_ADDR addr_in_block;
2767 CORE_ADDR pc, start_addr;
2770 addr_in_block = get_frame_address_in_block (this_frame);
2771 pc = get_frame_pc (this_frame);
2772 if (in_plt_section (addr_in_block)
2773 /* We also use the stub winder if the target memory is unreadable
2774 to avoid having the prologue unwinder trying to read it. */
2775 || target_read_memory (pc, dummy, 4) != 0)
2778 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2779 && arm_skip_bx_reg (this_frame, pc) != 0)
2785 struct frame_unwind arm_stub_unwind = {
2787 default_frame_unwind_stop_reason,
2789 arm_prologue_prev_register,
2791 arm_stub_unwind_sniffer
2794 /* Put here the code to store, into CACHE->saved_regs, the addresses
2795 of the saved registers of frame described by THIS_FRAME. CACHE is
2798 static struct arm_prologue_cache *
2799 arm_m_exception_cache (struct frame_info *this_frame)
2801 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2802 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2803 struct arm_prologue_cache *cache;
2804 CORE_ADDR unwound_sp;
2807 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2808 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2810 unwound_sp = get_frame_register_unsigned (this_frame,
2813 /* The hardware saves eight 32-bit words, comprising xPSR,
2814 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2815 "B1.5.6 Exception entry behavior" in
2816 "ARMv7-M Architecture Reference Manual". */
2817 cache->saved_regs[0].addr = unwound_sp;
2818 cache->saved_regs[1].addr = unwound_sp + 4;
2819 cache->saved_regs[2].addr = unwound_sp + 8;
2820 cache->saved_regs[3].addr = unwound_sp + 12;
2821 cache->saved_regs[12].addr = unwound_sp + 16;
2822 cache->saved_regs[14].addr = unwound_sp + 20;
2823 cache->saved_regs[15].addr = unwound_sp + 24;
2824 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2826 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2827 aligner between the top of the 32-byte stack frame and the
2828 previous context's stack pointer. */
2829 cache->prev_sp = unwound_sp + 32;
2830 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2831 && (xpsr & (1 << 9)) != 0)
2832 cache->prev_sp += 4;
2837 /* Implementation of function hook 'this_id' in
2838 'struct frame_uwnind'. */
2841 arm_m_exception_this_id (struct frame_info *this_frame,
2843 struct frame_id *this_id)
2845 struct arm_prologue_cache *cache;
2847 if (*this_cache == NULL)
2848 *this_cache = arm_m_exception_cache (this_frame);
2849 cache = (struct arm_prologue_cache *) *this_cache;
2851 /* Our frame ID for a stub frame is the current SP and LR. */
2852 *this_id = frame_id_build (cache->prev_sp,
2853 get_frame_pc (this_frame));
2856 /* Implementation of function hook 'prev_register' in
2857 'struct frame_uwnind'. */
2859 static struct value *
2860 arm_m_exception_prev_register (struct frame_info *this_frame,
2864 struct arm_prologue_cache *cache;
2866 if (*this_cache == NULL)
2867 *this_cache = arm_m_exception_cache (this_frame);
2868 cache = (struct arm_prologue_cache *) *this_cache;
2870 /* The value was already reconstructed into PREV_SP. */
2871 if (prev_regnum == ARM_SP_REGNUM)
2872 return frame_unwind_got_constant (this_frame, prev_regnum,
2875 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2879 /* Implementation of function hook 'sniffer' in
2880 'struct frame_uwnind'. */
2883 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2884 struct frame_info *this_frame,
2885 void **this_prologue_cache)
2887 CORE_ADDR this_pc = get_frame_pc (this_frame);
2889 /* No need to check is_m; this sniffer is only registered for
2890 M-profile architectures. */
2892 /* Exception frames return to one of these magic PCs. Other values
2893 are not defined as of v7-M. See details in "B1.5.8 Exception
2894 return behavior" in "ARMv7-M Architecture Reference Manual". */
2895 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2896 || this_pc == 0xfffffffd)
2902 /* Frame unwinder for M-profile exceptions. */
2904 struct frame_unwind arm_m_exception_unwind =
2907 default_frame_unwind_stop_reason,
2908 arm_m_exception_this_id,
2909 arm_m_exception_prev_register,
2911 arm_m_exception_unwind_sniffer
2915 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2917 struct arm_prologue_cache *cache;
2919 if (*this_cache == NULL)
2920 *this_cache = arm_make_prologue_cache (this_frame);
2921 cache = (struct arm_prologue_cache *) *this_cache;
2923 return cache->prev_sp - cache->framesize;
2926 struct frame_base arm_normal_base = {
2927 &arm_prologue_unwind,
2928 arm_normal_frame_base,
2929 arm_normal_frame_base,
2930 arm_normal_frame_base
2933 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2934 dummy frame. The frame ID's base needs to match the TOS value
2935 saved by save_dummy_frame_tos() and returned from
2936 arm_push_dummy_call, and the PC needs to match the dummy frame's
2939 static struct frame_id
2940 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2942 return frame_id_build (get_frame_register_unsigned (this_frame,
2944 get_frame_pc (this_frame));
2947 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2948 be used to construct the previous frame's ID, after looking up the
2949 containing function). */
2952 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2955 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2956 return arm_addr_bits_remove (gdbarch, pc);
2960 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2962 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2965 static struct value *
2966 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2969 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2971 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2976 /* The PC is normally copied from the return column, which
2977 describes saves of LR. However, that version may have an
2978 extra bit set to indicate Thumb state. The bit is not
2980 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2981 return frame_unwind_got_constant (this_frame, regnum,
2982 arm_addr_bits_remove (gdbarch, lr));
2985 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2986 cpsr = get_frame_register_unsigned (this_frame, regnum);
2987 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2988 if (IS_THUMB_ADDR (lr))
2992 return frame_unwind_got_constant (this_frame, regnum, cpsr);
2995 internal_error (__FILE__, __LINE__,
2996 _("Unexpected register %d"), regnum);
3001 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3002 struct dwarf2_frame_state_reg *reg,
3003 struct frame_info *this_frame)
3009 reg->how = DWARF2_FRAME_REG_FN;
3010 reg->loc.fn = arm_dwarf2_prev_register;
3013 reg->how = DWARF2_FRAME_REG_CFA;
3018 /* Implement the stack_frame_destroyed_p gdbarch method. */
3021 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3023 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3024 unsigned int insn, insn2;
3025 int found_return = 0, found_stack_adjust = 0;
3026 CORE_ADDR func_start, func_end;
3030 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3033 /* The epilogue is a sequence of instructions along the following lines:
3035 - add stack frame size to SP or FP
3036 - [if frame pointer used] restore SP from FP
3037 - restore registers from SP [may include PC]
3038 - a return-type instruction [if PC wasn't already restored]
3040 In a first pass, we scan forward from the current PC and verify the
3041 instructions we find as compatible with this sequence, ending in a
3044 However, this is not sufficient to distinguish indirect function calls
3045 within a function from indirect tail calls in the epilogue in some cases.
3046 Therefore, if we didn't already find any SP-changing instruction during
3047 forward scan, we add a backward scanning heuristic to ensure we actually
3048 are in the epilogue. */
3051 while (scan_pc < func_end && !found_return)
3053 if (target_read_memory (scan_pc, buf, 2))
3057 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3059 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3061 else if (insn == 0x46f7) /* mov pc, lr */
3063 else if (thumb_instruction_restores_sp (insn))
3065 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3068 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3070 if (target_read_memory (scan_pc, buf, 2))
3074 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3076 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3078 if (insn2 & 0x8000) /* <registers> include PC. */
3081 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3082 && (insn2 & 0x0fff) == 0x0b04)
3084 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3087 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3088 && (insn2 & 0x0e00) == 0x0a00)
3100 /* Since any instruction in the epilogue sequence, with the possible
3101 exception of return itself, updates the stack pointer, we need to
3102 scan backwards for at most one instruction. Try either a 16-bit or
3103 a 32-bit instruction. This is just a heuristic, so we do not worry
3104 too much about false positives. */
3106 if (pc - 4 < func_start)
3108 if (target_read_memory (pc - 4, buf, 4))
3111 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3112 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3114 if (thumb_instruction_restores_sp (insn2))
3115 found_stack_adjust = 1;
3116 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3117 found_stack_adjust = 1;
3118 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3119 && (insn2 & 0x0fff) == 0x0b04)
3120 found_stack_adjust = 1;
3121 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3122 && (insn2 & 0x0e00) == 0x0a00)
3123 found_stack_adjust = 1;
3125 return found_stack_adjust;
3128 /* Implement the stack_frame_destroyed_p gdbarch method. */
3131 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3133 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3136 CORE_ADDR func_start, func_end;
3138 if (arm_pc_is_thumb (gdbarch, pc))
3139 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3141 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3144 /* We are in the epilogue if the previous instruction was a stack
3145 adjustment and the next instruction is a possible return (bx, mov
3146 pc, or pop). We could have to scan backwards to find the stack
3147 adjustment, or forwards to find the return, but this is a decent
3148 approximation. First scan forwards. */
3151 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3152 if (bits (insn, 28, 31) != INST_NV)
3154 if ((insn & 0x0ffffff0) == 0x012fff10)
3157 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3160 else if ((insn & 0x0fff0000) == 0x08bd0000
3161 && (insn & 0x0000c000) != 0)
3162 /* POP (LDMIA), including PC or LR. */
3169 /* Scan backwards. This is just a heuristic, so do not worry about
3170 false positives from mode changes. */
3172 if (pc < func_start + 4)
3175 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3176 if (arm_instruction_restores_sp (insn))
3183 /* When arguments must be pushed onto the stack, they go on in reverse
3184 order. The code below implements a FILO (stack) to do this. */
3189 struct stack_item *prev;
3193 static struct stack_item *
3194 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3196 struct stack_item *si;
3197 si = XNEW (struct stack_item);
3198 si->data = (gdb_byte *) xmalloc (len);
3201 memcpy (si->data, contents, len);
3205 static struct stack_item *
3206 pop_stack_item (struct stack_item *si)
3208 struct stack_item *dead = si;
3216 /* Return the alignment (in bytes) of the given type. */
3219 arm_type_align (struct type *t)
3225 t = check_typedef (t);
3226 switch (TYPE_CODE (t))
3229 /* Should never happen. */
3230 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3234 case TYPE_CODE_ENUM:
3238 case TYPE_CODE_RANGE:
3240 case TYPE_CODE_CHAR:
3241 case TYPE_CODE_BOOL:
3242 return TYPE_LENGTH (t);
3244 case TYPE_CODE_ARRAY:
3245 if (TYPE_VECTOR (t))
3247 /* Use the natural alignment for vector types (the same for
3248 scalar type), but the maximum alignment is 64-bit. */
3249 if (TYPE_LENGTH (t) > 8)
3252 return TYPE_LENGTH (t);
3255 return arm_type_align (TYPE_TARGET_TYPE (t));
3256 case TYPE_CODE_COMPLEX:
3257 return arm_type_align (TYPE_TARGET_TYPE (t));
3259 case TYPE_CODE_STRUCT:
3260 case TYPE_CODE_UNION:
3262 for (n = 0; n < TYPE_NFIELDS (t); n++)
3264 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3272 /* Possible base types for a candidate for passing and returning in
3275 enum arm_vfp_cprc_base_type
3284 /* The length of one element of base type B. */
3287 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3291 case VFP_CPRC_SINGLE:
3293 case VFP_CPRC_DOUBLE:
3295 case VFP_CPRC_VEC64:
3297 case VFP_CPRC_VEC128:
3300 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3305 /* The character ('s', 'd' or 'q') for the type of VFP register used
3306 for passing base type B. */
3309 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3313 case VFP_CPRC_SINGLE:
3315 case VFP_CPRC_DOUBLE:
3317 case VFP_CPRC_VEC64:
3319 case VFP_CPRC_VEC128:
3322 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3327 /* Determine whether T may be part of a candidate for passing and
3328 returning in VFP registers, ignoring the limit on the total number
3329 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3330 classification of the first valid component found; if it is not
3331 VFP_CPRC_UNKNOWN, all components must have the same classification
3332 as *BASE_TYPE. If it is found that T contains a type not permitted
3333 for passing and returning in VFP registers, a type differently
3334 classified from *BASE_TYPE, or two types differently classified
3335 from each other, return -1, otherwise return the total number of
3336 base-type elements found (possibly 0 in an empty structure or
3337 array). Vector types are not currently supported, matching the
3338 generic AAPCS support. */
3341 arm_vfp_cprc_sub_candidate (struct type *t,
3342 enum arm_vfp_cprc_base_type *base_type)
3344 t = check_typedef (t);
3345 switch (TYPE_CODE (t))
3348 switch (TYPE_LENGTH (t))
3351 if (*base_type == VFP_CPRC_UNKNOWN)
3352 *base_type = VFP_CPRC_SINGLE;
3353 else if (*base_type != VFP_CPRC_SINGLE)
3358 if (*base_type == VFP_CPRC_UNKNOWN)
3359 *base_type = VFP_CPRC_DOUBLE;
3360 else if (*base_type != VFP_CPRC_DOUBLE)
3369 case TYPE_CODE_COMPLEX:
3370 /* Arguments of complex T where T is one of the types float or
3371 double get treated as if they are implemented as:
3380 switch (TYPE_LENGTH (t))
3383 if (*base_type == VFP_CPRC_UNKNOWN)
3384 *base_type = VFP_CPRC_SINGLE;
3385 else if (*base_type != VFP_CPRC_SINGLE)
3390 if (*base_type == VFP_CPRC_UNKNOWN)
3391 *base_type = VFP_CPRC_DOUBLE;
3392 else if (*base_type != VFP_CPRC_DOUBLE)
3401 case TYPE_CODE_ARRAY:
3403 if (TYPE_VECTOR (t))
3405 /* A 64-bit or 128-bit containerized vector type are VFP
3407 switch (TYPE_LENGTH (t))
3410 if (*base_type == VFP_CPRC_UNKNOWN)
3411 *base_type = VFP_CPRC_VEC64;
3414 if (*base_type == VFP_CPRC_UNKNOWN)
3415 *base_type = VFP_CPRC_VEC128;
3426 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3430 if (TYPE_LENGTH (t) == 0)
3432 gdb_assert (count == 0);
3435 else if (count == 0)
3437 unitlen = arm_vfp_cprc_unit_length (*base_type);
3438 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3439 return TYPE_LENGTH (t) / unitlen;
3444 case TYPE_CODE_STRUCT:
3449 for (i = 0; i < TYPE_NFIELDS (t); i++)
3451 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3453 if (sub_count == -1)
3457 if (TYPE_LENGTH (t) == 0)
3459 gdb_assert (count == 0);
3462 else if (count == 0)
3464 unitlen = arm_vfp_cprc_unit_length (*base_type);
3465 if (TYPE_LENGTH (t) != unitlen * count)
3470 case TYPE_CODE_UNION:
3475 for (i = 0; i < TYPE_NFIELDS (t); i++)
3477 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3479 if (sub_count == -1)
3481 count = (count > sub_count ? count : sub_count);
3483 if (TYPE_LENGTH (t) == 0)
3485 gdb_assert (count == 0);
3488 else if (count == 0)
3490 unitlen = arm_vfp_cprc_unit_length (*base_type);
3491 if (TYPE_LENGTH (t) != unitlen * count)
3503 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3504 if passed to or returned from a non-variadic function with the VFP
3505 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3506 *BASE_TYPE to the base type for T and *COUNT to the number of
3507 elements of that base type before returning. */
3510 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3513 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3514 int c = arm_vfp_cprc_sub_candidate (t, &b);
3515 if (c <= 0 || c > 4)
3522 /* Return 1 if the VFP ABI should be used for passing arguments to and
3523 returning values from a function of type FUNC_TYPE, 0
3527 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3529 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3530 /* Variadic functions always use the base ABI. Assume that functions
3531 without debug info are not variadic. */
3532 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3534 /* The VFP ABI is only supported as a variant of AAPCS. */
3535 if (tdep->arm_abi != ARM_ABI_AAPCS)
3537 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3540 /* We currently only support passing parameters in integer registers, which
3541 conforms with GCC's default model, and VFP argument passing following
3542 the VFP variant of AAPCS. Several other variants exist and
3543 we should probably support some of them based on the selected ABI. */
3546 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3547 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3548 struct value **args, CORE_ADDR sp, int struct_return,
3549 CORE_ADDR struct_addr)
3551 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3555 struct stack_item *si = NULL;
3558 unsigned vfp_regs_free = (1 << 16) - 1;
3560 /* Determine the type of this function and whether the VFP ABI
3562 ftype = check_typedef (value_type (function));
3563 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3564 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3565 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3567 /* Set the return address. For the ARM, the return breakpoint is
3568 always at BP_ADDR. */
3569 if (arm_pc_is_thumb (gdbarch, bp_addr))
3571 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3573 /* Walk through the list of args and determine how large a temporary
3574 stack is required. Need to take care here as structs may be
3575 passed on the stack, and we have to push them. */
3578 argreg = ARM_A1_REGNUM;
3581 /* The struct_return pointer occupies the first parameter
3582 passing register. */
3586 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3587 gdbarch_register_name (gdbarch, argreg),
3588 paddress (gdbarch, struct_addr));
3589 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3593 for (argnum = 0; argnum < nargs; argnum++)
3596 struct type *arg_type;
3597 struct type *target_type;
3598 enum type_code typecode;
3599 const bfd_byte *val;
3601 enum arm_vfp_cprc_base_type vfp_base_type;
3603 int may_use_core_reg = 1;
3605 arg_type = check_typedef (value_type (args[argnum]));
3606 len = TYPE_LENGTH (arg_type);
3607 target_type = TYPE_TARGET_TYPE (arg_type);
3608 typecode = TYPE_CODE (arg_type);
3609 val = value_contents (args[argnum]);
3611 align = arm_type_align (arg_type);
3612 /* Round alignment up to a whole number of words. */
3613 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3614 /* Different ABIs have different maximum alignments. */
3615 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3617 /* The APCS ABI only requires word alignment. */
3618 align = INT_REGISTER_SIZE;
3622 /* The AAPCS requires at most doubleword alignment. */
3623 if (align > INT_REGISTER_SIZE * 2)
3624 align = INT_REGISTER_SIZE * 2;
3628 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3636 /* Because this is a CPRC it cannot go in a core register or
3637 cause a core register to be skipped for alignment.
3638 Either it goes in VFP registers and the rest of this loop
3639 iteration is skipped for this argument, or it goes on the
3640 stack (and the stack alignment code is correct for this
3642 may_use_core_reg = 0;
3644 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3645 shift = unit_length / 4;
3646 mask = (1 << (shift * vfp_base_count)) - 1;
3647 for (regno = 0; regno < 16; regno += shift)
3648 if (((vfp_regs_free >> regno) & mask) == mask)
3657 vfp_regs_free &= ~(mask << regno);
3658 reg_scaled = regno / shift;
3659 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3660 for (i = 0; i < vfp_base_count; i++)
3664 if (reg_char == 'q')
3665 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3666 val + i * unit_length);
3669 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3670 reg_char, reg_scaled + i);
3671 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3673 regcache_cooked_write (regcache, regnum,
3674 val + i * unit_length);
3681 /* This CPRC could not go in VFP registers, so all VFP
3682 registers are now marked as used. */
3687 /* Push stack padding for dowubleword alignment. */
3688 if (nstack & (align - 1))
3690 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3691 nstack += INT_REGISTER_SIZE;
3694 /* Doubleword aligned quantities must go in even register pairs. */
3695 if (may_use_core_reg
3696 && argreg <= ARM_LAST_ARG_REGNUM
3697 && align > INT_REGISTER_SIZE
3701 /* If the argument is a pointer to a function, and it is a
3702 Thumb function, create a LOCAL copy of the value and set
3703 the THUMB bit in it. */
3704 if (TYPE_CODE_PTR == typecode
3705 && target_type != NULL
3706 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3708 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3709 if (arm_pc_is_thumb (gdbarch, regval))
3711 bfd_byte *copy = (bfd_byte *) alloca (len);
3712 store_unsigned_integer (copy, len, byte_order,
3713 MAKE_THUMB_ADDR (regval));
3718 /* Copy the argument to general registers or the stack in
3719 register-sized pieces. Large arguments are split between
3720 registers and stack. */
3723 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3725 = extract_unsigned_integer (val, partial_len, byte_order);
3727 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3729 /* The argument is being passed in a general purpose
3731 if (byte_order == BFD_ENDIAN_BIG)
3732 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3734 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3736 gdbarch_register_name
3738 phex (regval, INT_REGISTER_SIZE));
3739 regcache_cooked_write_unsigned (regcache, argreg, regval);
3744 gdb_byte buf[INT_REGISTER_SIZE];
3746 memset (buf, 0, sizeof (buf));
3747 store_unsigned_integer (buf, partial_len, byte_order, regval);
3749 /* Push the arguments onto the stack. */
3751 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3753 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3754 nstack += INT_REGISTER_SIZE;
3761 /* If we have an odd number of words to push, then decrement the stack
3762 by one word now, so first stack argument will be dword aligned. */
3769 write_memory (sp, si->data, si->len);
3770 si = pop_stack_item (si);
3773 /* Finally, update teh SP register. */
3774 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3780 /* Always align the frame to an 8-byte boundary. This is required on
3781 some platforms and harmless on the rest. */
3784 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3786 /* Align the stack to eight bytes. */
3787 return sp & ~ (CORE_ADDR) 7;
3791 print_fpu_flags (struct ui_file *file, int flags)
3793 if (flags & (1 << 0))
3794 fputs_filtered ("IVO ", file);
3795 if (flags & (1 << 1))
3796 fputs_filtered ("DVZ ", file);
3797 if (flags & (1 << 2))
3798 fputs_filtered ("OFL ", file);
3799 if (flags & (1 << 3))
3800 fputs_filtered ("UFL ", file);
3801 if (flags & (1 << 4))
3802 fputs_filtered ("INX ", file);
3803 fputc_filtered ('\n', file);
3806 /* Print interesting information about the floating point processor
3807 (if present) or emulator. */
3809 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3810 struct frame_info *frame, const char *args)
3812 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3815 type = (status >> 24) & 127;
3816 if (status & (1 << 31))
3817 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3819 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3820 /* i18n: [floating point unit] mask */
3821 fputs_filtered (_("mask: "), file);
3822 print_fpu_flags (file, status >> 16);
3823 /* i18n: [floating point unit] flags */
3824 fputs_filtered (_("flags: "), file);
3825 print_fpu_flags (file, status);
3828 /* Construct the ARM extended floating point type. */
3829 static struct type *
3830 arm_ext_type (struct gdbarch *gdbarch)
3832 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3834 if (!tdep->arm_ext_type)
3836 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3837 floatformats_arm_ext);
3839 return tdep->arm_ext_type;
3842 static struct type *
3843 arm_neon_double_type (struct gdbarch *gdbarch)
3845 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3847 if (tdep->neon_double_type == NULL)
3849 struct type *t, *elem;
3851 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3853 elem = builtin_type (gdbarch)->builtin_uint8;
3854 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3855 elem = builtin_type (gdbarch)->builtin_uint16;
3856 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3857 elem = builtin_type (gdbarch)->builtin_uint32;
3858 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3859 elem = builtin_type (gdbarch)->builtin_uint64;
3860 append_composite_type_field (t, "u64", elem);
3861 elem = builtin_type (gdbarch)->builtin_float;
3862 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3863 elem = builtin_type (gdbarch)->builtin_double;
3864 append_composite_type_field (t, "f64", elem);
3866 TYPE_VECTOR (t) = 1;
3867 TYPE_NAME (t) = "neon_d";
3868 tdep->neon_double_type = t;
3871 return tdep->neon_double_type;
3874 /* FIXME: The vector types are not correctly ordered on big-endian
3875 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3876 bits of d0 - regardless of what unit size is being held in d0. So
3877 the offset of the first uint8 in d0 is 7, but the offset of the
3878 first float is 4. This code works as-is for little-endian
3881 static struct type *
3882 arm_neon_quad_type (struct gdbarch *gdbarch)
3884 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3886 if (tdep->neon_quad_type == NULL)
3888 struct type *t, *elem;
3890 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3892 elem = builtin_type (gdbarch)->builtin_uint8;
3893 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3894 elem = builtin_type (gdbarch)->builtin_uint16;
3895 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3896 elem = builtin_type (gdbarch)->builtin_uint32;
3897 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3898 elem = builtin_type (gdbarch)->builtin_uint64;
3899 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3900 elem = builtin_type (gdbarch)->builtin_float;
3901 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3902 elem = builtin_type (gdbarch)->builtin_double;
3903 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3905 TYPE_VECTOR (t) = 1;
3906 TYPE_NAME (t) = "neon_q";
3907 tdep->neon_quad_type = t;
3910 return tdep->neon_quad_type;
3913 /* Return the GDB type object for the "standard" data type of data in
3916 static struct type *
3917 arm_register_type (struct gdbarch *gdbarch, int regnum)
3919 int num_regs = gdbarch_num_regs (gdbarch);
3921 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3922 && regnum >= num_regs && regnum < num_regs + 32)
3923 return builtin_type (gdbarch)->builtin_float;
3925 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3926 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3927 return arm_neon_quad_type (gdbarch);
3929 /* If the target description has register information, we are only
3930 in this function so that we can override the types of
3931 double-precision registers for NEON. */
3932 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3934 struct type *t = tdesc_register_type (gdbarch, regnum);
3936 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3937 && TYPE_CODE (t) == TYPE_CODE_FLT
3938 && gdbarch_tdep (gdbarch)->have_neon)
3939 return arm_neon_double_type (gdbarch);
3944 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3946 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3947 return builtin_type (gdbarch)->builtin_void;
3949 return arm_ext_type (gdbarch);
3951 else if (regnum == ARM_SP_REGNUM)
3952 return builtin_type (gdbarch)->builtin_data_ptr;
3953 else if (regnum == ARM_PC_REGNUM)
3954 return builtin_type (gdbarch)->builtin_func_ptr;
3955 else if (regnum >= ARRAY_SIZE (arm_register_names))
3956 /* These registers are only supported on targets which supply
3957 an XML description. */
3958 return builtin_type (gdbarch)->builtin_int0;
3960 return builtin_type (gdbarch)->builtin_uint32;
3963 /* Map a DWARF register REGNUM onto the appropriate GDB register
3967 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3969 /* Core integer regs. */
3970 if (reg >= 0 && reg <= 15)
3973 /* Legacy FPA encoding. These were once used in a way which
3974 overlapped with VFP register numbering, so their use is
3975 discouraged, but GDB doesn't support the ARM toolchain
3976 which used them for VFP. */
3977 if (reg >= 16 && reg <= 23)
3978 return ARM_F0_REGNUM + reg - 16;
3980 /* New assignments for the FPA registers. */
3981 if (reg >= 96 && reg <= 103)
3982 return ARM_F0_REGNUM + reg - 96;
3984 /* WMMX register assignments. */
3985 if (reg >= 104 && reg <= 111)
3986 return ARM_WCGR0_REGNUM + reg - 104;
3988 if (reg >= 112 && reg <= 127)
3989 return ARM_WR0_REGNUM + reg - 112;
3991 if (reg >= 192 && reg <= 199)
3992 return ARM_WC0_REGNUM + reg - 192;
3994 /* VFP v2 registers. A double precision value is actually
3995 in d1 rather than s2, but the ABI only defines numbering
3996 for the single precision registers. This will "just work"
3997 in GDB for little endian targets (we'll read eight bytes,
3998 starting in s0 and then progressing to s1), but will be
3999 reversed on big endian targets with VFP. This won't
4000 be a problem for the new Neon quad registers; you're supposed
4001 to use DW_OP_piece for those. */
4002 if (reg >= 64 && reg <= 95)
4006 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4007 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4011 /* VFP v3 / Neon registers. This range is also used for VFP v2
4012 registers, except that it now describes d0 instead of s0. */
4013 if (reg >= 256 && reg <= 287)
4017 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4018 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4025 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4027 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4030 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4032 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4033 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4035 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4036 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4038 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4039 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4041 if (reg < NUM_GREGS)
4042 return SIM_ARM_R0_REGNUM + reg;
4045 if (reg < NUM_FREGS)
4046 return SIM_ARM_FP0_REGNUM + reg;
4049 if (reg < NUM_SREGS)
4050 return SIM_ARM_FPS_REGNUM + reg;
4053 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4056 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4057 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4058 It is thought that this is is the floating-point register format on
4059 little-endian systems. */
4062 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4063 void *dbl, int endianess)
4067 if (endianess == BFD_ENDIAN_BIG)
4068 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4070 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4072 floatformat_from_doublest (fmt, &d, dbl);
4076 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4081 floatformat_to_doublest (fmt, ptr, &d);
4082 if (endianess == BFD_ENDIAN_BIG)
4083 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4085 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4089 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4090 of the appropriate mode (as encoded in the PC value), even if this
4091 differs from what would be expected according to the symbol tables. */
4094 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4095 struct address_space *aspace,
4098 struct cleanup *old_chain
4099 = make_cleanup_restore_integer (&arm_override_mode);
4101 arm_override_mode = IS_THUMB_ADDR (pc);
4102 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4104 insert_single_step_breakpoint (gdbarch, aspace, pc);
4106 do_cleanups (old_chain);
4109 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4110 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4111 NULL if an error occurs. BUF is freed. */
4114 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4115 int old_len, int new_len)
4118 int bytes_to_read = new_len - old_len;
4120 new_buf = (gdb_byte *) xmalloc (new_len);
4121 memcpy (new_buf + bytes_to_read, buf, old_len);
4123 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4131 /* An IT block is at most the 2-byte IT instruction followed by
4132 four 4-byte instructions. The furthest back we must search to
4133 find an IT block that affects the current instruction is thus
4134 2 + 3 * 4 == 14 bytes. */
4135 #define MAX_IT_BLOCK_PREFIX 14
4137 /* Use a quick scan if there are more than this many bytes of
4139 #define IT_SCAN_THRESHOLD 32
4141 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4142 A breakpoint in an IT block may not be hit, depending on the
4145 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4149 CORE_ADDR boundary, func_start;
4151 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4152 int i, any, last_it, last_it_count;
4154 /* If we are using BKPT breakpoints, none of this is necessary. */
4155 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4158 /* ARM mode does not have this problem. */
4159 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4162 /* We are setting a breakpoint in Thumb code that could potentially
4163 contain an IT block. The first step is to find how much Thumb
4164 code there is; we do not need to read outside of known Thumb
4166 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4168 /* Thumb-2 code must have mapping symbols to have a chance. */
4171 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4173 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4174 && func_start > boundary)
4175 boundary = func_start;
4177 /* Search for a candidate IT instruction. We have to do some fancy
4178 footwork to distinguish a real IT instruction from the second
4179 half of a 32-bit instruction, but there is no need for that if
4180 there's no candidate. */
4181 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4183 /* No room for an IT instruction. */
4186 buf = (gdb_byte *) xmalloc (buf_len);
4187 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4190 for (i = 0; i < buf_len; i += 2)
4192 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4193 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4206 /* OK, the code bytes before this instruction contain at least one
4207 halfword which resembles an IT instruction. We know that it's
4208 Thumb code, but there are still two possibilities. Either the
4209 halfword really is an IT instruction, or it is the second half of
4210 a 32-bit Thumb instruction. The only way we can tell is to
4211 scan forwards from a known instruction boundary. */
4212 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 /* There's a lot of code before this instruction. Start with an
4217 optimistic search; it's easy to recognize halfwords that can
4218 not be the start of a 32-bit instruction, and use that to
4219 lock on to the instruction boundaries. */
4220 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4223 buf_len = IT_SCAN_THRESHOLD;
4226 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4228 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4229 if (thumb_insn_size (inst1) == 2)
4236 /* At this point, if DEFINITE, BUF[I] is the first place we
4237 are sure that we know the instruction boundaries, and it is far
4238 enough from BPADDR that we could not miss an IT instruction
4239 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 buf_len = bpaddr - boundary;
4253 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4256 buf_len = bpaddr - boundary;
4260 /* Scan forwards. Find the last IT instruction before BPADDR. */
4265 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4267 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4272 else if (inst1 & 0x0002)
4274 else if (inst1 & 0x0004)
4279 i += thumb_insn_size (inst1);
4285 /* There wasn't really an IT instruction after all. */
4288 if (last_it_count < 1)
4289 /* It was too far away. */
4292 /* This really is a trouble spot. Move the breakpoint to the IT
4294 return bpaddr - buf_len + last_it;
4297 /* ARM displaced stepping support.
4299 Generally ARM displaced stepping works as follows:
4301 1. When an instruction is to be single-stepped, it is first decoded by
4302 arm_process_displaced_insn. Depending on the type of instruction, it is
4303 then copied to a scratch location, possibly in a modified form. The
4304 copy_* set of functions performs such modification, as necessary. A
4305 breakpoint is placed after the modified instruction in the scratch space
4306 to return control to GDB. Note in particular that instructions which
4307 modify the PC will no longer do so after modification.
4309 2. The instruction is single-stepped, by setting the PC to the scratch
4310 location address, and resuming. Control returns to GDB when the
4313 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4314 function used for the current instruction. This function's job is to
4315 put the CPU/memory state back to what it would have been if the
4316 instruction had been executed unmodified in its original location. */
4318 /* NOP instruction (mov r0, r0). */
4319 #define ARM_NOP 0xe1a00000
4320 #define THUMB_NOP 0x4600
4322 /* Helper for register reads for displaced stepping. In particular, this
4323 returns the PC as it would be seen by the instruction at its original
4327 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4331 CORE_ADDR from = dsc->insn_addr;
4333 if (regno == ARM_PC_REGNUM)
4335 /* Compute pipeline offset:
4336 - When executing an ARM instruction, PC reads as the address of the
4337 current instruction plus 8.
4338 - When executing a Thumb instruction, PC reads as the address of the
4339 current instruction plus 4. */
4346 if (debug_displaced)
4347 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4348 (unsigned long) from);
4349 return (ULONGEST) from;
4353 regcache_cooked_read_unsigned (regs, regno, &ret);
4354 if (debug_displaced)
4355 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4356 regno, (unsigned long) ret);
4362 displaced_in_arm_mode (struct regcache *regs)
4365 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4367 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4369 return (ps & t_bit) == 0;
4372 /* Write to the PC as from a branch instruction. */
4375 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4379 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4380 architecture versions < 6. */
4381 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4382 val & ~(ULONGEST) 0x3);
4384 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4385 val & ~(ULONGEST) 0x1);
4388 /* Write to the PC as from a branch-exchange instruction. */
4391 bx_write_pc (struct regcache *regs, ULONGEST val)
4394 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4396 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4401 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4403 else if ((val & 2) == 0)
4405 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4406 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4410 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4411 mode, align dest to 4 bytes). */
4412 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4413 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4414 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4418 /* Write to the PC as if from a load instruction. */
4421 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4424 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4425 bx_write_pc (regs, val);
4427 branch_write_pc (regs, dsc, val);
4430 /* Write to the PC as if from an ALU instruction. */
4433 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4436 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4437 bx_write_pc (regs, val);
4439 branch_write_pc (regs, dsc, val);
4442 /* Helper for writing to registers for displaced stepping. Writing to the PC
4443 has a varying effects depending on the instruction which does the write:
4444 this is controlled by the WRITE_PC argument. */
4447 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4448 int regno, ULONGEST val, enum pc_write_style write_pc)
4450 if (regno == ARM_PC_REGNUM)
4452 if (debug_displaced)
4453 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4454 (unsigned long) val);
4457 case BRANCH_WRITE_PC:
4458 branch_write_pc (regs, dsc, val);
4462 bx_write_pc (regs, val);
4466 load_write_pc (regs, dsc, val);
4470 alu_write_pc (regs, dsc, val);
4473 case CANNOT_WRITE_PC:
4474 warning (_("Instruction wrote to PC in an unexpected way when "
4475 "single-stepping"));
4479 internal_error (__FILE__, __LINE__,
4480 _("Invalid argument to displaced_write_reg"));
4483 dsc->wrote_to_pc = 1;
4487 if (debug_displaced)
4488 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4489 regno, (unsigned long) val);
4490 regcache_cooked_write_unsigned (regs, regno, val);
4494 /* This function is used to concisely determine if an instruction INSN
4495 references PC. Register fields of interest in INSN should have the
4496 corresponding fields of BITMASK set to 0b1111. The function
4497 returns return 1 if any of these fields in INSN reference the PC
4498 (also 0b1111, r15), else it returns 0. */
4501 insn_references_pc (uint32_t insn, uint32_t bitmask)
4503 uint32_t lowbit = 1;
4505 while (bitmask != 0)
4509 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4515 mask = lowbit * 0xf;
4517 if ((insn & mask) == mask)
4526 /* The simplest copy function. Many instructions have the same effect no
4527 matter what address they are executed at: in those cases, use this. */
4530 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4531 const char *iname, struct displaced_step_closure *dsc)
4533 if (debug_displaced)
4534 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4535 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4538 dsc->modinsn[0] = insn;
4544 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4545 uint16_t insn2, const char *iname,
4546 struct displaced_step_closure *dsc)
4548 if (debug_displaced)
4549 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4550 "opcode/class '%s' unmodified\n", insn1, insn2,
4553 dsc->modinsn[0] = insn1;
4554 dsc->modinsn[1] = insn2;
4560 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4563 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4565 struct displaced_step_closure *dsc)
4567 if (debug_displaced)
4568 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4569 "opcode/class '%s' unmodified\n", insn,
4572 dsc->modinsn[0] = insn;
4577 /* Preload instructions with immediate offset. */
4580 cleanup_preload (struct gdbarch *gdbarch,
4581 struct regcache *regs, struct displaced_step_closure *dsc)
4583 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4584 if (!dsc->u.preload.immed)
4585 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4589 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4590 struct displaced_step_closure *dsc, unsigned int rn)
4593 /* Preload instructions:
4595 {pli/pld} [rn, #+/-imm]
4597 {pli/pld} [r0, #+/-imm]. */
4599 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4600 rn_val = displaced_read_reg (regs, dsc, rn);
4601 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4602 dsc->u.preload.immed = 1;
4604 dsc->cleanup = &cleanup_preload;
4608 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4609 struct displaced_step_closure *dsc)
4611 unsigned int rn = bits (insn, 16, 19);
4613 if (!insn_references_pc (insn, 0x000f0000ul))
4614 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4616 if (debug_displaced)
4617 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4618 (unsigned long) insn);
4620 dsc->modinsn[0] = insn & 0xfff0ffff;
4622 install_preload (gdbarch, regs, dsc, rn);
4628 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4629 struct regcache *regs, struct displaced_step_closure *dsc)
4631 unsigned int rn = bits (insn1, 0, 3);
4632 unsigned int u_bit = bit (insn1, 7);
4633 int imm12 = bits (insn2, 0, 11);
4636 if (rn != ARM_PC_REGNUM)
4637 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4639 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4640 PLD (literal) Encoding T1. */
4641 if (debug_displaced)
4642 fprintf_unfiltered (gdb_stdlog,
4643 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4644 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4650 /* Rewrite instruction {pli/pld} PC imm12 into:
4651 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4655 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4657 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4658 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4660 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4662 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4663 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4664 dsc->u.preload.immed = 0;
4666 /* {pli/pld} [r0, r1] */
4667 dsc->modinsn[0] = insn1 & 0xfff0;
4668 dsc->modinsn[1] = 0xf001;
4671 dsc->cleanup = &cleanup_preload;
4675 /* Preload instructions with register offset. */
4678 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4679 struct displaced_step_closure *dsc, unsigned int rn,
4682 ULONGEST rn_val, rm_val;
4684 /* Preload register-offset instructions:
4686 {pli/pld} [rn, rm {, shift}]
4688 {pli/pld} [r0, r1 {, shift}]. */
4690 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4691 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4692 rn_val = displaced_read_reg (regs, dsc, rn);
4693 rm_val = displaced_read_reg (regs, dsc, rm);
4694 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4695 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4696 dsc->u.preload.immed = 0;
4698 dsc->cleanup = &cleanup_preload;
4702 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4703 struct regcache *regs,
4704 struct displaced_step_closure *dsc)
4706 unsigned int rn = bits (insn, 16, 19);
4707 unsigned int rm = bits (insn, 0, 3);
4710 if (!insn_references_pc (insn, 0x000f000ful))
4711 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4713 if (debug_displaced)
4714 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4715 (unsigned long) insn);
4717 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4719 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4723 /* Copy/cleanup coprocessor load and store instructions. */
4726 cleanup_copro_load_store (struct gdbarch *gdbarch,
4727 struct regcache *regs,
4728 struct displaced_step_closure *dsc)
4730 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4732 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4734 if (dsc->u.ldst.writeback)
4735 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4739 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4740 struct displaced_step_closure *dsc,
4741 int writeback, unsigned int rn)
4745 /* Coprocessor load/store instructions:
4747 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4749 {stc/stc2} [r0, #+/-imm].
4751 ldc/ldc2 are handled identically. */
4753 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4754 rn_val = displaced_read_reg (regs, dsc, rn);
4755 /* PC should be 4-byte aligned. */
4756 rn_val = rn_val & 0xfffffffc;
4757 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4759 dsc->u.ldst.writeback = writeback;
4760 dsc->u.ldst.rn = rn;
4762 dsc->cleanup = &cleanup_copro_load_store;
4766 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4767 struct regcache *regs,
4768 struct displaced_step_closure *dsc)
4770 unsigned int rn = bits (insn, 16, 19);
4772 if (!insn_references_pc (insn, 0x000f0000ul))
4773 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4775 if (debug_displaced)
4776 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4777 "load/store insn %.8lx\n", (unsigned long) insn);
4779 dsc->modinsn[0] = insn & 0xfff0ffff;
4781 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4787 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4788 uint16_t insn2, struct regcache *regs,
4789 struct displaced_step_closure *dsc)
4791 unsigned int rn = bits (insn1, 0, 3);
4793 if (rn != ARM_PC_REGNUM)
4794 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4795 "copro load/store", dsc);
4797 if (debug_displaced)
4798 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4799 "load/store insn %.4x%.4x\n", insn1, insn2);
4801 dsc->modinsn[0] = insn1 & 0xfff0;
4802 dsc->modinsn[1] = insn2;
4805 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4806 doesn't support writeback, so pass 0. */
4807 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4812 /* Clean up branch instructions (actually perform the branch, by setting
4816 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4817 struct displaced_step_closure *dsc)
4819 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4820 int branch_taken = condition_true (dsc->u.branch.cond, status);
4821 enum pc_write_style write_pc = dsc->u.branch.exchange
4822 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4827 if (dsc->u.branch.link)
4829 /* The value of LR should be the next insn of current one. In order
4830 not to confuse logic hanlding later insn `bx lr', if current insn mode
4831 is Thumb, the bit 0 of LR value should be set to 1. */
4832 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4835 next_insn_addr |= 0x1;
4837 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4841 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4844 /* Copy B/BL/BLX instructions with immediate destinations. */
4847 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4848 struct displaced_step_closure *dsc,
4849 unsigned int cond, int exchange, int link, long offset)
4851 /* Implement "BL<cond> <label>" as:
4853 Preparation: cond <- instruction condition
4854 Insn: mov r0, r0 (nop)
4855 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4857 B<cond> similar, but don't set r14 in cleanup. */
4859 dsc->u.branch.cond = cond;
4860 dsc->u.branch.link = link;
4861 dsc->u.branch.exchange = exchange;
4863 dsc->u.branch.dest = dsc->insn_addr;
4864 if (link && exchange)
4865 /* For BLX, offset is computed from the Align (PC, 4). */
4866 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4869 dsc->u.branch.dest += 4 + offset;
4871 dsc->u.branch.dest += 8 + offset;
4873 dsc->cleanup = &cleanup_branch;
4876 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4877 struct regcache *regs, struct displaced_step_closure *dsc)
4879 unsigned int cond = bits (insn, 28, 31);
4880 int exchange = (cond == 0xf);
4881 int link = exchange || bit (insn, 24);
4884 if (debug_displaced)
4885 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4886 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4887 (unsigned long) insn);
4889 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4890 then arrange the switch into Thumb mode. */
4891 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4893 offset = bits (insn, 0, 23) << 2;
4895 if (bit (offset, 25))
4896 offset = offset | ~0x3ffffff;
4898 dsc->modinsn[0] = ARM_NOP;
4900 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4905 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4906 uint16_t insn2, struct regcache *regs,
4907 struct displaced_step_closure *dsc)
4909 int link = bit (insn2, 14);
4910 int exchange = link && !bit (insn2, 12);
4913 int j1 = bit (insn2, 13);
4914 int j2 = bit (insn2, 11);
4915 int s = sbits (insn1, 10, 10);
4916 int i1 = !(j1 ^ bit (insn1, 10));
4917 int i2 = !(j2 ^ bit (insn1, 10));
4919 if (!link && !exchange) /* B */
4921 offset = (bits (insn2, 0, 10) << 1);
4922 if (bit (insn2, 12)) /* Encoding T4 */
4924 offset |= (bits (insn1, 0, 9) << 12)
4930 else /* Encoding T3 */
4932 offset |= (bits (insn1, 0, 5) << 12)
4936 cond = bits (insn1, 6, 9);
4941 offset = (bits (insn1, 0, 9) << 12);
4942 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4943 offset |= exchange ?
4944 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4947 if (debug_displaced)
4948 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4949 "%.4x %.4x with offset %.8lx\n",
4950 link ? (exchange) ? "blx" : "bl" : "b",
4951 insn1, insn2, offset);
4953 dsc->modinsn[0] = THUMB_NOP;
4955 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4959 /* Copy B Thumb instructions. */
4961 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4962 struct displaced_step_closure *dsc)
4964 unsigned int cond = 0;
4966 unsigned short bit_12_15 = bits (insn, 12, 15);
4967 CORE_ADDR from = dsc->insn_addr;
4969 if (bit_12_15 == 0xd)
4971 /* offset = SignExtend (imm8:0, 32) */
4972 offset = sbits ((insn << 1), 0, 8);
4973 cond = bits (insn, 8, 11);
4975 else if (bit_12_15 == 0xe) /* Encoding T2 */
4977 offset = sbits ((insn << 1), 0, 11);
4981 if (debug_displaced)
4982 fprintf_unfiltered (gdb_stdlog,
4983 "displaced: copying b immediate insn %.4x "
4984 "with offset %d\n", insn, offset);
4986 dsc->u.branch.cond = cond;
4987 dsc->u.branch.link = 0;
4988 dsc->u.branch.exchange = 0;
4989 dsc->u.branch.dest = from + 4 + offset;
4991 dsc->modinsn[0] = THUMB_NOP;
4993 dsc->cleanup = &cleanup_branch;
4998 /* Copy BX/BLX with register-specified destinations. */
5001 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5002 struct displaced_step_closure *dsc, int link,
5003 unsigned int cond, unsigned int rm)
5005 /* Implement {BX,BLX}<cond> <reg>" as:
5007 Preparation: cond <- instruction condition
5008 Insn: mov r0, r0 (nop)
5009 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5011 Don't set r14 in cleanup for BX. */
5013 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5015 dsc->u.branch.cond = cond;
5016 dsc->u.branch.link = link;
5018 dsc->u.branch.exchange = 1;
5020 dsc->cleanup = &cleanup_branch;
5024 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5025 struct regcache *regs, struct displaced_step_closure *dsc)
5027 unsigned int cond = bits (insn, 28, 31);
5030 int link = bit (insn, 5);
5031 unsigned int rm = bits (insn, 0, 3);
5033 if (debug_displaced)
5034 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5035 (unsigned long) insn);
5037 dsc->modinsn[0] = ARM_NOP;
5039 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5044 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5045 struct regcache *regs,
5046 struct displaced_step_closure *dsc)
5048 int link = bit (insn, 7);
5049 unsigned int rm = bits (insn, 3, 6);
5051 if (debug_displaced)
5052 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5053 (unsigned short) insn);
5055 dsc->modinsn[0] = THUMB_NOP;
5057 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5063 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5066 cleanup_alu_imm (struct gdbarch *gdbarch,
5067 struct regcache *regs, struct displaced_step_closure *dsc)
5069 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5070 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5071 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5072 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5076 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5077 struct displaced_step_closure *dsc)
5079 unsigned int rn = bits (insn, 16, 19);
5080 unsigned int rd = bits (insn, 12, 15);
5081 unsigned int op = bits (insn, 21, 24);
5082 int is_mov = (op == 0xd);
5083 ULONGEST rd_val, rn_val;
5085 if (!insn_references_pc (insn, 0x000ff000ul))
5086 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5088 if (debug_displaced)
5089 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5090 "%.8lx\n", is_mov ? "move" : "ALU",
5091 (unsigned long) insn);
5093 /* Instruction is of form:
5095 <op><cond> rd, [rn,] #imm
5099 Preparation: tmp1, tmp2 <- r0, r1;
5101 Insn: <op><cond> r0, r1, #imm
5102 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5105 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5106 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5107 rn_val = displaced_read_reg (regs, dsc, rn);
5108 rd_val = displaced_read_reg (regs, dsc, rd);
5109 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5110 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5114 dsc->modinsn[0] = insn & 0xfff00fff;
5116 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5118 dsc->cleanup = &cleanup_alu_imm;
5124 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5125 uint16_t insn2, struct regcache *regs,
5126 struct displaced_step_closure *dsc)
5128 unsigned int op = bits (insn1, 5, 8);
5129 unsigned int rn, rm, rd;
5130 ULONGEST rd_val, rn_val;
5132 rn = bits (insn1, 0, 3); /* Rn */
5133 rm = bits (insn2, 0, 3); /* Rm */
5134 rd = bits (insn2, 8, 11); /* Rd */
5136 /* This routine is only called for instruction MOV. */
5137 gdb_assert (op == 0x2 && rn == 0xf);
5139 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5140 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5142 if (debug_displaced)
5143 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5144 "ALU", insn1, insn2);
5146 /* Instruction is of form:
5148 <op><cond> rd, [rn,] #imm
5152 Preparation: tmp1, tmp2 <- r0, r1;
5154 Insn: <op><cond> r0, r1, #imm
5155 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5158 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5159 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5160 rn_val = displaced_read_reg (regs, dsc, rn);
5161 rd_val = displaced_read_reg (regs, dsc, rd);
5162 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5163 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5166 dsc->modinsn[0] = insn1;
5167 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5170 dsc->cleanup = &cleanup_alu_imm;
5175 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5178 cleanup_alu_reg (struct gdbarch *gdbarch,
5179 struct regcache *regs, struct displaced_step_closure *dsc)
5184 rd_val = displaced_read_reg (regs, dsc, 0);
5186 for (i = 0; i < 3; i++)
5187 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5189 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5193 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5194 struct displaced_step_closure *dsc,
5195 unsigned int rd, unsigned int rn, unsigned int rm)
5197 ULONGEST rd_val, rn_val, rm_val;
5199 /* Instruction is of form:
5201 <op><cond> rd, [rn,] rm [, <shift>]
5205 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5206 r0, r1, r2 <- rd, rn, rm
5207 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5208 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5211 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5212 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5213 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5214 rd_val = displaced_read_reg (regs, dsc, rd);
5215 rn_val = displaced_read_reg (regs, dsc, rn);
5216 rm_val = displaced_read_reg (regs, dsc, rm);
5217 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5218 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5219 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5222 dsc->cleanup = &cleanup_alu_reg;
5226 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5227 struct displaced_step_closure *dsc)
5229 unsigned int op = bits (insn, 21, 24);
5230 int is_mov = (op == 0xd);
5232 if (!insn_references_pc (insn, 0x000ff00ful))
5233 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5235 if (debug_displaced)
5236 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5237 is_mov ? "move" : "ALU", (unsigned long) insn);
5240 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5242 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5244 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5250 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5251 struct regcache *regs,
5252 struct displaced_step_closure *dsc)
5256 rm = bits (insn, 3, 6);
5257 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5259 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5260 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5262 if (debug_displaced)
5263 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5264 (unsigned short) insn);
5266 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5268 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5273 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5276 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5277 struct regcache *regs,
5278 struct displaced_step_closure *dsc)
5280 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5283 for (i = 0; i < 4; i++)
5284 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5286 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5291 struct displaced_step_closure *dsc,
5292 unsigned int rd, unsigned int rn, unsigned int rm,
5296 ULONGEST rd_val, rn_val, rm_val, rs_val;
5298 /* Instruction is of form:
5300 <op><cond> rd, [rn,] rm, <shift> rs
5304 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5305 r0, r1, r2, r3 <- rd, rn, rm, rs
5306 Insn: <op><cond> r0, r1, r2, <shift> r3
5308 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5312 for (i = 0; i < 4; i++)
5313 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5315 rd_val = displaced_read_reg (regs, dsc, rd);
5316 rn_val = displaced_read_reg (regs, dsc, rn);
5317 rm_val = displaced_read_reg (regs, dsc, rm);
5318 rs_val = displaced_read_reg (regs, dsc, rs);
5319 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5320 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5321 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5322 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5324 dsc->cleanup = &cleanup_alu_shifted_reg;
5328 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5329 struct regcache *regs,
5330 struct displaced_step_closure *dsc)
5332 unsigned int op = bits (insn, 21, 24);
5333 int is_mov = (op == 0xd);
5334 unsigned int rd, rn, rm, rs;
5336 if (!insn_references_pc (insn, 0x000fff0ful))
5337 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5339 if (debug_displaced)
5340 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5341 "%.8lx\n", is_mov ? "move" : "ALU",
5342 (unsigned long) insn);
5344 rn = bits (insn, 16, 19);
5345 rm = bits (insn, 0, 3);
5346 rs = bits (insn, 8, 11);
5347 rd = bits (insn, 12, 15);
5350 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5352 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5354 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5359 /* Clean up load instructions. */
5362 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5363 struct displaced_step_closure *dsc)
5365 ULONGEST rt_val, rt_val2 = 0, rn_val;
5367 rt_val = displaced_read_reg (regs, dsc, 0);
5368 if (dsc->u.ldst.xfersize == 8)
5369 rt_val2 = displaced_read_reg (regs, dsc, 1);
5370 rn_val = displaced_read_reg (regs, dsc, 2);
5372 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5373 if (dsc->u.ldst.xfersize > 4)
5374 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5375 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5376 if (!dsc->u.ldst.immed)
5377 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5379 /* Handle register writeback. */
5380 if (dsc->u.ldst.writeback)
5381 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5382 /* Put result in right place. */
5383 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5384 if (dsc->u.ldst.xfersize == 8)
5385 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5388 /* Clean up store instructions. */
5391 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5392 struct displaced_step_closure *dsc)
5394 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5396 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5397 if (dsc->u.ldst.xfersize > 4)
5398 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5399 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5400 if (!dsc->u.ldst.immed)
5401 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5402 if (!dsc->u.ldst.restore_r4)
5403 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5406 if (dsc->u.ldst.writeback)
5407 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5410 /* Copy "extra" load/store instructions. These are halfword/doubleword
5411 transfers, which have a different encoding to byte/word transfers. */
5414 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5415 struct regcache *regs, struct displaced_step_closure *dsc)
5417 unsigned int op1 = bits (insn, 20, 24);
5418 unsigned int op2 = bits (insn, 5, 6);
5419 unsigned int rt = bits (insn, 12, 15);
5420 unsigned int rn = bits (insn, 16, 19);
5421 unsigned int rm = bits (insn, 0, 3);
5422 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5423 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5424 int immed = (op1 & 0x4) != 0;
5426 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5428 if (!insn_references_pc (insn, 0x000ff00ful))
5429 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5431 if (debug_displaced)
5432 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5433 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5434 (unsigned long) insn);
5436 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5439 internal_error (__FILE__, __LINE__,
5440 _("copy_extra_ld_st: instruction decode error"));
5442 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5443 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5444 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5446 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5448 rt_val = displaced_read_reg (regs, dsc, rt);
5449 if (bytesize[opcode] == 8)
5450 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5451 rn_val = displaced_read_reg (regs, dsc, rn);
5453 rm_val = displaced_read_reg (regs, dsc, rm);
5455 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5456 if (bytesize[opcode] == 8)
5457 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5458 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5463 dsc->u.ldst.xfersize = bytesize[opcode];
5464 dsc->u.ldst.rn = rn;
5465 dsc->u.ldst.immed = immed;
5466 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5467 dsc->u.ldst.restore_r4 = 0;
5470 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5472 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5473 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5475 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5477 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5478 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5480 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5485 /* Copy byte/half word/word loads and stores. */
5488 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5489 struct displaced_step_closure *dsc, int load,
5490 int immed, int writeback, int size, int usermode,
5491 int rt, int rm, int rn)
5493 ULONGEST rt_val, rn_val, rm_val = 0;
5495 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5496 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5498 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5500 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5502 rt_val = displaced_read_reg (regs, dsc, rt);
5503 rn_val = displaced_read_reg (regs, dsc, rn);
5505 rm_val = displaced_read_reg (regs, dsc, rm);
5507 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5508 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5510 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5512 dsc->u.ldst.xfersize = size;
5513 dsc->u.ldst.rn = rn;
5514 dsc->u.ldst.immed = immed;
5515 dsc->u.ldst.writeback = writeback;
5517 /* To write PC we can do:
5519 Before this sequence of instructions:
5520 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5521 r2 is the Rn value got from dispalced_read_reg.
5523 Insn1: push {pc} Write address of STR instruction + offset on stack
5524 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5525 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5526 = addr(Insn1) + offset - addr(Insn3) - 8
5528 Insn4: add r4, r4, #8 r4 = offset - 8
5529 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5531 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5533 Otherwise we don't know what value to write for PC, since the offset is
5534 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5535 of this can be found in Section "Saving from r15" in
5536 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5538 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5543 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5544 uint16_t insn2, struct regcache *regs,
5545 struct displaced_step_closure *dsc, int size)
5547 unsigned int u_bit = bit (insn1, 7);
5548 unsigned int rt = bits (insn2, 12, 15);
5549 int imm12 = bits (insn2, 0, 11);
5552 if (debug_displaced)
5553 fprintf_unfiltered (gdb_stdlog,
5554 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5555 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5561 /* Rewrite instruction LDR Rt imm12 into:
5563 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5567 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5570 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5571 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5572 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5574 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5576 pc_val = pc_val & 0xfffffffc;
5578 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5579 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5583 dsc->u.ldst.xfersize = size;
5584 dsc->u.ldst.immed = 0;
5585 dsc->u.ldst.writeback = 0;
5586 dsc->u.ldst.restore_r4 = 0;
5588 /* LDR R0, R2, R3 */
5589 dsc->modinsn[0] = 0xf852;
5590 dsc->modinsn[1] = 0x3;
5593 dsc->cleanup = &cleanup_load;
5599 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5600 uint16_t insn2, struct regcache *regs,
5601 struct displaced_step_closure *dsc,
5602 int writeback, int immed)
5604 unsigned int rt = bits (insn2, 12, 15);
5605 unsigned int rn = bits (insn1, 0, 3);
5606 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5607 /* In LDR (register), there is also a register Rm, which is not allowed to
5608 be PC, so we don't have to check it. */
5610 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5611 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5614 if (debug_displaced)
5615 fprintf_unfiltered (gdb_stdlog,
5616 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5617 rt, rn, insn1, insn2);
5619 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5622 dsc->u.ldst.restore_r4 = 0;
5625 /* ldr[b]<cond> rt, [rn, #imm], etc.
5627 ldr[b]<cond> r0, [r2, #imm]. */
5629 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5630 dsc->modinsn[1] = insn2 & 0x0fff;
5633 /* ldr[b]<cond> rt, [rn, rm], etc.
5635 ldr[b]<cond> r0, [r2, r3]. */
5637 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5638 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5648 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5649 struct regcache *regs,
5650 struct displaced_step_closure *dsc,
5651 int load, int size, int usermode)
5653 int immed = !bit (insn, 25);
5654 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5655 unsigned int rt = bits (insn, 12, 15);
5656 unsigned int rn = bits (insn, 16, 19);
5657 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5659 if (!insn_references_pc (insn, 0x000ff00ful))
5660 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5662 if (debug_displaced)
5663 fprintf_unfiltered (gdb_stdlog,
5664 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5665 load ? (size == 1 ? "ldrb" : "ldr")
5666 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5668 (unsigned long) insn);
5670 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5671 usermode, rt, rm, rn);
5673 if (load || rt != ARM_PC_REGNUM)
5675 dsc->u.ldst.restore_r4 = 0;
5678 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5680 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5681 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5683 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5685 {ldr,str}[b]<cond> r0, [r2, r3]. */
5686 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5690 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5691 dsc->u.ldst.restore_r4 = 1;
5692 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5693 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5694 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5695 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5696 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5700 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5702 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5707 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5712 /* Cleanup LDM instructions with fully-populated register list. This is an
5713 unfortunate corner case: it's impossible to implement correctly by modifying
5714 the instruction. The issue is as follows: we have an instruction,
5718 which we must rewrite to avoid loading PC. A possible solution would be to
5719 do the load in two halves, something like (with suitable cleanup
5723 ldm[id][ab] r8!, {r0-r7}
5725 ldm[id][ab] r8, {r7-r14}
5728 but at present there's no suitable place for <temp>, since the scratch space
5729 is overwritten before the cleanup routine is called. For now, we simply
5730 emulate the instruction. */
5733 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5734 struct displaced_step_closure *dsc)
5736 int inc = dsc->u.block.increment;
5737 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5738 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5739 uint32_t regmask = dsc->u.block.regmask;
5740 int regno = inc ? 0 : 15;
5741 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5742 int exception_return = dsc->u.block.load && dsc->u.block.user
5743 && (regmask & 0x8000) != 0;
5744 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5745 int do_transfer = condition_true (dsc->u.block.cond, status);
5746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5751 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5752 sensible we can do here. Complain loudly. */
5753 if (exception_return)
5754 error (_("Cannot single-step exception return"));
5756 /* We don't handle any stores here for now. */
5757 gdb_assert (dsc->u.block.load != 0);
5759 if (debug_displaced)
5760 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5761 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5762 dsc->u.block.increment ? "inc" : "dec",
5763 dsc->u.block.before ? "before" : "after");
5770 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5773 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5776 xfer_addr += bump_before;
5778 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5779 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5781 xfer_addr += bump_after;
5783 regmask &= ~(1 << regno);
5786 if (dsc->u.block.writeback)
5787 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5791 /* Clean up an STM which included the PC in the register list. */
5794 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5795 struct displaced_step_closure *dsc)
5797 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5798 int store_executed = condition_true (dsc->u.block.cond, status);
5799 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5800 CORE_ADDR stm_insn_addr;
5803 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5805 /* If condition code fails, there's nothing else to do. */
5806 if (!store_executed)
5809 if (dsc->u.block.increment)
5811 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5813 if (dsc->u.block.before)
5818 pc_stored_at = dsc->u.block.xfer_addr;
5820 if (dsc->u.block.before)
5824 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5825 stm_insn_addr = dsc->scratch_base;
5826 offset = pc_val - stm_insn_addr;
5828 if (debug_displaced)
5829 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5830 "STM instruction\n", offset);
5832 /* Rewrite the stored PC to the proper value for the non-displaced original
5834 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5835 dsc->insn_addr + offset);
5838 /* Clean up an LDM which includes the PC in the register list. We clumped all
5839 the registers in the transferred list into a contiguous range r0...rX (to
5840 avoid loading PC directly and losing control of the debugged program), so we
5841 must undo that here. */
5844 cleanup_block_load_pc (struct gdbarch *gdbarch,
5845 struct regcache *regs,
5846 struct displaced_step_closure *dsc)
5848 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5849 int load_executed = condition_true (dsc->u.block.cond, status);
5850 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5851 unsigned int regs_loaded = bitcount (mask);
5852 unsigned int num_to_shuffle = regs_loaded, clobbered;
5854 /* The method employed here will fail if the register list is fully populated
5855 (we need to avoid loading PC directly). */
5856 gdb_assert (num_to_shuffle < 16);
5861 clobbered = (1 << num_to_shuffle) - 1;
5863 while (num_to_shuffle > 0)
5865 if ((mask & (1 << write_reg)) != 0)
5867 unsigned int read_reg = num_to_shuffle - 1;
5869 if (read_reg != write_reg)
5871 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5872 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5873 if (debug_displaced)
5874 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5875 "loaded register r%d to r%d\n"), read_reg,
5878 else if (debug_displaced)
5879 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5880 "r%d already in the right place\n"),
5883 clobbered &= ~(1 << write_reg);
5891 /* Restore any registers we scribbled over. */
5892 for (write_reg = 0; clobbered != 0; write_reg++)
5894 if ((clobbered & (1 << write_reg)) != 0)
5896 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5898 if (debug_displaced)
5899 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5900 "clobbered register r%d\n"), write_reg);
5901 clobbered &= ~(1 << write_reg);
5905 /* Perform register writeback manually. */
5906 if (dsc->u.block.writeback)
5908 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5910 if (dsc->u.block.increment)
5911 new_rn_val += regs_loaded * 4;
5913 new_rn_val -= regs_loaded * 4;
5915 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5921 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5924 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5925 struct regcache *regs,
5926 struct displaced_step_closure *dsc)
5928 int load = bit (insn, 20);
5929 int user = bit (insn, 22);
5930 int increment = bit (insn, 23);
5931 int before = bit (insn, 24);
5932 int writeback = bit (insn, 21);
5933 int rn = bits (insn, 16, 19);
5935 /* Block transfers which don't mention PC can be run directly
5937 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5938 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5940 if (rn == ARM_PC_REGNUM)
5942 warning (_("displaced: Unpredictable LDM or STM with "
5943 "base register r15"));
5944 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5947 if (debug_displaced)
5948 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5949 "%.8lx\n", (unsigned long) insn);
5951 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5952 dsc->u.block.rn = rn;
5954 dsc->u.block.load = load;
5955 dsc->u.block.user = user;
5956 dsc->u.block.increment = increment;
5957 dsc->u.block.before = before;
5958 dsc->u.block.writeback = writeback;
5959 dsc->u.block.cond = bits (insn, 28, 31);
5961 dsc->u.block.regmask = insn & 0xffff;
5965 if ((insn & 0xffff) == 0xffff)
5967 /* LDM with a fully-populated register list. This case is
5968 particularly tricky. Implement for now by fully emulating the
5969 instruction (which might not behave perfectly in all cases, but
5970 these instructions should be rare enough for that not to matter
5972 dsc->modinsn[0] = ARM_NOP;
5974 dsc->cleanup = &cleanup_block_load_all;
5978 /* LDM of a list of registers which includes PC. Implement by
5979 rewriting the list of registers to be transferred into a
5980 contiguous chunk r0...rX before doing the transfer, then shuffling
5981 registers into the correct places in the cleanup routine. */
5982 unsigned int regmask = insn & 0xffff;
5983 unsigned int num_in_list = bitcount (regmask), new_regmask;
5986 for (i = 0; i < num_in_list; i++)
5987 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5989 /* Writeback makes things complicated. We need to avoid clobbering
5990 the base register with one of the registers in our modified
5991 register list, but just using a different register can't work in
5994 ldm r14!, {r0-r13,pc}
5996 which would need to be rewritten as:
6000 but that can't work, because there's no free register for N.
6002 Solve this by turning off the writeback bit, and emulating
6003 writeback manually in the cleanup routine. */
6008 new_regmask = (1 << num_in_list) - 1;
6010 if (debug_displaced)
6011 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6012 "{..., pc}: original reg list %.4x, modified "
6013 "list %.4x\n"), rn, writeback ? "!" : "",
6014 (int) insn & 0xffff, new_regmask);
6016 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6018 dsc->cleanup = &cleanup_block_load_pc;
6023 /* STM of a list of registers which includes PC. Run the instruction
6024 as-is, but out of line: this will store the wrong value for the PC,
6025 so we must manually fix up the memory in the cleanup routine.
6026 Doing things this way has the advantage that we can auto-detect
6027 the offset of the PC write (which is architecture-dependent) in
6028 the cleanup routine. */
6029 dsc->modinsn[0] = insn;
6031 dsc->cleanup = &cleanup_block_store_pc;
6038 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6039 struct regcache *regs,
6040 struct displaced_step_closure *dsc)
6042 int rn = bits (insn1, 0, 3);
6043 int load = bit (insn1, 4);
6044 int writeback = bit (insn1, 5);
6046 /* Block transfers which don't mention PC can be run directly
6048 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6049 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6051 if (rn == ARM_PC_REGNUM)
6053 warning (_("displaced: Unpredictable LDM or STM with "
6054 "base register r15"));
6055 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6056 "unpredictable ldm/stm", dsc);
6059 if (debug_displaced)
6060 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6061 "%.4x%.4x\n", insn1, insn2);
6063 /* Clear bit 13, since it should be always zero. */
6064 dsc->u.block.regmask = (insn2 & 0xdfff);
6065 dsc->u.block.rn = rn;
6067 dsc->u.block.load = load;
6068 dsc->u.block.user = 0;
6069 dsc->u.block.increment = bit (insn1, 7);
6070 dsc->u.block.before = bit (insn1, 8);
6071 dsc->u.block.writeback = writeback;
6072 dsc->u.block.cond = INST_AL;
6073 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6077 if (dsc->u.block.regmask == 0xffff)
6079 /* This branch is impossible to happen. */
6084 unsigned int regmask = dsc->u.block.regmask;
6085 unsigned int num_in_list = bitcount (regmask), new_regmask;
6088 for (i = 0; i < num_in_list; i++)
6089 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094 new_regmask = (1 << num_in_list) - 1;
6096 if (debug_displaced)
6097 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6098 "{..., pc}: original reg list %.4x, modified "
6099 "list %.4x\n"), rn, writeback ? "!" : "",
6100 (int) dsc->u.block.regmask, new_regmask);
6102 dsc->modinsn[0] = insn1;
6103 dsc->modinsn[1] = (new_regmask & 0xffff);
6106 dsc->cleanup = &cleanup_block_load_pc;
6111 dsc->modinsn[0] = insn1;
6112 dsc->modinsn[1] = insn2;
6114 dsc->cleanup = &cleanup_block_store_pc;
6119 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6120 This is used to avoid a dependency on BFD's bfd_endian enum. */
6123 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6126 return read_memory_unsigned_integer (memaddr, len,
6127 (enum bfd_endian) byte_order);
6130 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6133 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6136 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6139 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6142 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self,
6148 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6151 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6153 return arm_is_thumb (self->regcache);
6156 /* single_step() is called just before we want to resume the inferior,
6157 if we want to single-step it but there is no hardware or kernel
6158 single-step support. We find the target of the coming instructions
6159 and breakpoint them. */
6162 arm_software_single_step (struct frame_info *frame)
6164 struct regcache *regcache = get_current_regcache ();
6165 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6166 struct address_space *aspace = get_regcache_aspace (regcache);
6167 struct arm_get_next_pcs next_pcs_ctx;
6170 VEC (CORE_ADDR) *next_pcs = NULL;
6171 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6173 arm_get_next_pcs_ctor (&next_pcs_ctx,
6174 &arm_get_next_pcs_ops,
6175 gdbarch_byte_order (gdbarch),
6176 gdbarch_byte_order_for_code (gdbarch),
6180 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6182 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6183 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6185 do_cleanups (old_chain);
6190 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6191 for Linux, where some SVC instructions must be treated specially. */
6194 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6195 struct displaced_step_closure *dsc)
6197 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6199 if (debug_displaced)
6200 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6201 "%.8lx\n", (unsigned long) resume_addr);
6203 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6207 /* Common copy routine for svc instruciton. */
6210 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6211 struct displaced_step_closure *dsc)
6213 /* Preparation: none.
6214 Insn: unmodified svc.
6215 Cleanup: pc <- insn_addr + insn_size. */
6217 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6219 dsc->wrote_to_pc = 1;
6221 /* Allow OS-specific code to override SVC handling. */
6222 if (dsc->u.svc.copy_svc_os)
6223 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6226 dsc->cleanup = &cleanup_svc;
6232 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6233 struct regcache *regs, struct displaced_step_closure *dsc)
6236 if (debug_displaced)
6237 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6238 (unsigned long) insn);
6240 dsc->modinsn[0] = insn;
6242 return install_svc (gdbarch, regs, dsc);
6246 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6247 struct regcache *regs, struct displaced_step_closure *dsc)
6250 if (debug_displaced)
6251 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6254 dsc->modinsn[0] = insn;
6256 return install_svc (gdbarch, regs, dsc);
6259 /* Copy undefined instructions. */
6262 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6263 struct displaced_step_closure *dsc)
6265 if (debug_displaced)
6266 fprintf_unfiltered (gdb_stdlog,
6267 "displaced: copying undefined insn %.8lx\n",
6268 (unsigned long) insn);
6270 dsc->modinsn[0] = insn;
6276 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6277 struct displaced_step_closure *dsc)
6280 if (debug_displaced)
6281 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6282 "%.4x %.4x\n", (unsigned short) insn1,
6283 (unsigned short) insn2);
6285 dsc->modinsn[0] = insn1;
6286 dsc->modinsn[1] = insn2;
6292 /* Copy unpredictable instructions. */
6295 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6296 struct displaced_step_closure *dsc)
6298 if (debug_displaced)
6299 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6300 "%.8lx\n", (unsigned long) insn);
6302 dsc->modinsn[0] = insn;
6307 /* The decode_* functions are instruction decoding helpers. They mostly follow
6308 the presentation in the ARM ARM. */
6311 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6312 struct regcache *regs,
6313 struct displaced_step_closure *dsc)
6315 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6316 unsigned int rn = bits (insn, 16, 19);
6318 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6319 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6320 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6321 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6322 else if ((op1 & 0x60) == 0x20)
6323 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6324 else if ((op1 & 0x71) == 0x40)
6325 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6327 else if ((op1 & 0x77) == 0x41)
6328 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6329 else if ((op1 & 0x77) == 0x45)
6330 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6331 else if ((op1 & 0x77) == 0x51)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6336 return arm_copy_unpred (gdbarch, insn, dsc);
6338 else if ((op1 & 0x77) == 0x55)
6339 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6340 else if (op1 == 0x57)
6343 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6344 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6345 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6346 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6347 default: return arm_copy_unpred (gdbarch, insn, dsc);
6349 else if ((op1 & 0x63) == 0x43)
6350 return arm_copy_unpred (gdbarch, insn, dsc);
6351 else if ((op2 & 0x1) == 0x0)
6352 switch (op1 & ~0x80)
6355 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6357 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6358 case 0x71: case 0x75:
6360 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6361 case 0x63: case 0x67: case 0x73: case 0x77:
6362 return arm_copy_unpred (gdbarch, insn, dsc);
6364 return arm_copy_undef (gdbarch, insn, dsc);
6367 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6371 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6372 struct regcache *regs,
6373 struct displaced_step_closure *dsc)
6375 if (bit (insn, 27) == 0)
6376 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6377 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6378 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6381 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6384 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6386 case 0x4: case 0x5: case 0x6: case 0x7:
6387 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6390 switch ((insn & 0xe00000) >> 21)
6392 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6394 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6397 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6400 return arm_copy_undef (gdbarch, insn, dsc);
6405 int rn_f = (bits (insn, 16, 19) == 0xf);
6406 switch ((insn & 0xe00000) >> 21)
6409 /* ldc/ldc2 imm (undefined for rn == pc). */
6410 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6411 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6414 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6416 case 0x4: case 0x5: case 0x6: case 0x7:
6417 /* ldc/ldc2 lit (undefined for rn != pc). */
6418 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6419 : arm_copy_undef (gdbarch, insn, dsc);
6422 return arm_copy_undef (gdbarch, insn, dsc);
6427 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6430 if (bits (insn, 16, 19) == 0xf)
6432 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6434 return arm_copy_undef (gdbarch, insn, dsc);
6438 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6440 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6444 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6446 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6449 return arm_copy_undef (gdbarch, insn, dsc);
6453 /* Decode miscellaneous instructions in dp/misc encoding space. */
6456 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6457 struct regcache *regs,
6458 struct displaced_step_closure *dsc)
6460 unsigned int op2 = bits (insn, 4, 6);
6461 unsigned int op = bits (insn, 21, 22);
6466 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6469 if (op == 0x1) /* bx. */
6470 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6472 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6474 return arm_copy_undef (gdbarch, insn, dsc);
6478 /* Not really supported. */
6479 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6481 return arm_copy_undef (gdbarch, insn, dsc);
6485 return arm_copy_bx_blx_reg (gdbarch, insn,
6486 regs, dsc); /* blx register. */
6488 return arm_copy_undef (gdbarch, insn, dsc);
6491 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6495 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6497 /* Not really supported. */
6498 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6501 return arm_copy_undef (gdbarch, insn, dsc);
6506 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6507 struct regcache *regs,
6508 struct displaced_step_closure *dsc)
6511 switch (bits (insn, 20, 24))
6514 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6517 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6519 case 0x12: case 0x16:
6520 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6523 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6527 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6529 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6530 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6532 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6533 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6534 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6535 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6536 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6537 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6538 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6539 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6540 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6541 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6542 /* 2nd arg means "unprivileged". */
6543 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6547 /* Should be unreachable. */
6552 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6553 struct regcache *regs,
6554 struct displaced_step_closure *dsc)
6556 int a = bit (insn, 25), b = bit (insn, 4);
6557 uint32_t op1 = bits (insn, 20, 24);
6559 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6560 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6561 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6562 else if ((!a && (op1 & 0x17) == 0x02)
6563 || (a && (op1 & 0x17) == 0x02 && !b))
6564 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6565 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6566 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6567 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6568 else if ((!a && (op1 & 0x17) == 0x03)
6569 || (a && (op1 & 0x17) == 0x03 && !b))
6570 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6571 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6572 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6573 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6574 else if ((!a && (op1 & 0x17) == 0x06)
6575 || (a && (op1 & 0x17) == 0x06 && !b))
6576 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6577 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6578 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6579 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6580 else if ((!a && (op1 & 0x17) == 0x07)
6581 || (a && (op1 & 0x17) == 0x07 && !b))
6582 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6584 /* Should be unreachable. */
6589 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6590 struct displaced_step_closure *dsc)
6592 switch (bits (insn, 20, 24))
6594 case 0x00: case 0x01: case 0x02: case 0x03:
6595 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6597 case 0x04: case 0x05: case 0x06: case 0x07:
6598 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6600 case 0x08: case 0x09: case 0x0a: case 0x0b:
6601 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6602 return arm_copy_unmodified (gdbarch, insn,
6603 "decode/pack/unpack/saturate/reverse", dsc);
6606 if (bits (insn, 5, 7) == 0) /* op2. */
6608 if (bits (insn, 12, 15) == 0xf)
6609 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6611 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6614 return arm_copy_undef (gdbarch, insn, dsc);
6616 case 0x1a: case 0x1b:
6617 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6618 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6620 return arm_copy_undef (gdbarch, insn, dsc);
6622 case 0x1c: case 0x1d:
6623 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6625 if (bits (insn, 0, 3) == 0xf)
6626 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6628 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6631 return arm_copy_undef (gdbarch, insn, dsc);
6633 case 0x1e: case 0x1f:
6634 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6635 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6637 return arm_copy_undef (gdbarch, insn, dsc);
6640 /* Should be unreachable. */
6645 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6646 struct regcache *regs,
6647 struct displaced_step_closure *dsc)
6650 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6652 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6656 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6657 struct regcache *regs,
6658 struct displaced_step_closure *dsc)
6660 unsigned int opcode = bits (insn, 20, 24);
6664 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6667 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6668 case 0x12: case 0x16:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6671 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6672 case 0x13: case 0x17:
6673 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6675 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6676 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6677 /* Note: no writeback for these instructions. Bit 25 will always be
6678 zero though (via caller), so the following works OK. */
6679 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6682 /* Should be unreachable. */
6686 /* Decode shifted register instructions. */
6689 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6690 uint16_t insn2, struct regcache *regs,
6691 struct displaced_step_closure *dsc)
6693 /* PC is only allowed to be used in instruction MOV. */
6695 unsigned int op = bits (insn1, 5, 8);
6696 unsigned int rn = bits (insn1, 0, 3);
6698 if (op == 0x2 && rn == 0xf) /* MOV */
6699 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6701 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6702 "dp (shift reg)", dsc);
6706 /* Decode extension register load/store. Exactly the same as
6707 arm_decode_ext_reg_ld_st. */
6710 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6711 uint16_t insn2, struct regcache *regs,
6712 struct displaced_step_closure *dsc)
6714 unsigned int opcode = bits (insn1, 4, 8);
6718 case 0x04: case 0x05:
6719 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6720 "vfp/neon vmov", dsc);
6722 case 0x08: case 0x0c: /* 01x00 */
6723 case 0x0a: case 0x0e: /* 01x10 */
6724 case 0x12: case 0x16: /* 10x10 */
6725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6726 "vfp/neon vstm/vpush", dsc);
6728 case 0x09: case 0x0d: /* 01x01 */
6729 case 0x0b: case 0x0f: /* 01x11 */
6730 case 0x13: case 0x17: /* 10x11 */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6732 "vfp/neon vldm/vpop", dsc);
6734 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6735 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6737 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6738 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6741 /* Should be unreachable. */
6746 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6747 struct regcache *regs, struct displaced_step_closure *dsc)
6749 unsigned int op1 = bits (insn, 20, 25);
6750 int op = bit (insn, 4);
6751 unsigned int coproc = bits (insn, 8, 11);
6753 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6754 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6760 && (coproc & 0xe) != 0xa)
6761 /* ldc/ldc2 imm/lit. */
6762 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6763 else if ((op1 & 0x3e) == 0x00)
6764 return arm_copy_undef (gdbarch, insn, dsc);
6765 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6767 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6768 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6769 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6771 else if ((op1 & 0x30) == 0x20 && !op)
6773 if ((coproc & 0xe) == 0xa)
6774 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6776 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6778 else if ((op1 & 0x30) == 0x20 && op)
6779 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6780 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6781 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6782 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6783 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6784 else if ((op1 & 0x30) == 0x30)
6785 return arm_copy_svc (gdbarch, insn, regs, dsc);
6787 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6791 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6792 uint16_t insn2, struct regcache *regs,
6793 struct displaced_step_closure *dsc)
6795 unsigned int coproc = bits (insn2, 8, 11);
6796 unsigned int bit_5_8 = bits (insn1, 5, 8);
6797 unsigned int bit_9 = bit (insn1, 9);
6798 unsigned int bit_4 = bit (insn1, 4);
6803 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6804 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6806 else if (bit_5_8 == 0) /* UNDEFINED. */
6807 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6810 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6811 if ((coproc & 0xe) == 0xa)
6812 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6814 else /* coproc is not 101x. */
6816 if (bit_4 == 0) /* STC/STC2. */
6817 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6819 else /* LDC/LDC2 {literal, immeidate}. */
6820 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6832 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6833 struct displaced_step_closure *dsc, int rd)
6839 Preparation: Rd <- PC
6845 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6846 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6850 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6851 struct displaced_step_closure *dsc,
6852 int rd, unsigned int imm)
6855 /* Encoding T2: ADDS Rd, #imm */
6856 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6858 install_pc_relative (gdbarch, regs, dsc, rd);
6864 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6865 struct regcache *regs,
6866 struct displaced_step_closure *dsc)
6868 unsigned int rd = bits (insn, 8, 10);
6869 unsigned int imm8 = bits (insn, 0, 7);
6871 if (debug_displaced)
6872 fprintf_unfiltered (gdb_stdlog,
6873 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6876 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6880 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6881 uint16_t insn2, struct regcache *regs,
6882 struct displaced_step_closure *dsc)
6884 unsigned int rd = bits (insn2, 8, 11);
6885 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6886 extract raw immediate encoding rather than computing immediate. When
6887 generating ADD or SUB instruction, we can simply perform OR operation to
6888 set immediate into ADD. */
6889 unsigned int imm_3_8 = insn2 & 0x70ff;
6890 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6892 if (debug_displaced)
6893 fprintf_unfiltered (gdb_stdlog,
6894 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6895 rd, imm_i, imm_3_8, insn1, insn2);
6897 if (bit (insn1, 7)) /* Encoding T2 */
6899 /* Encoding T3: SUB Rd, Rd, #imm */
6900 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6901 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6903 else /* Encoding T3 */
6905 /* Encoding T3: ADD Rd, Rd, #imm */
6906 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6907 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6911 install_pc_relative (gdbarch, regs, dsc, rd);
6917 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6918 struct regcache *regs,
6919 struct displaced_step_closure *dsc)
6921 unsigned int rt = bits (insn1, 8, 10);
6923 int imm8 = (bits (insn1, 0, 7) << 2);
6929 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6931 Insn: LDR R0, [R2, R3];
6932 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6934 if (debug_displaced)
6935 fprintf_unfiltered (gdb_stdlog,
6936 "displaced: copying thumb ldr r%d [pc #%d]\n"
6939 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6940 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6941 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6942 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6943 /* The assembler calculates the required value of the offset from the
6944 Align(PC,4) value of this instruction to the label. */
6945 pc = pc & 0xfffffffc;
6947 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6948 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6951 dsc->u.ldst.xfersize = 4;
6953 dsc->u.ldst.immed = 0;
6954 dsc->u.ldst.writeback = 0;
6955 dsc->u.ldst.restore_r4 = 0;
6957 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6959 dsc->cleanup = &cleanup_load;
6964 /* Copy Thumb cbnz/cbz insruction. */
6967 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6968 struct regcache *regs,
6969 struct displaced_step_closure *dsc)
6971 int non_zero = bit (insn1, 11);
6972 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6973 CORE_ADDR from = dsc->insn_addr;
6974 int rn = bits (insn1, 0, 2);
6975 int rn_val = displaced_read_reg (regs, dsc, rn);
6977 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6978 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6979 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6980 condition is false, let it be, cleanup_branch will do nothing. */
6981 if (dsc->u.branch.cond)
6983 dsc->u.branch.cond = INST_AL;
6984 dsc->u.branch.dest = from + 4 + imm5;
6987 dsc->u.branch.dest = from + 2;
6989 dsc->u.branch.link = 0;
6990 dsc->u.branch.exchange = 0;
6992 if (debug_displaced)
6993 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6994 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6995 rn, rn_val, insn1, dsc->u.branch.dest);
6997 dsc->modinsn[0] = THUMB_NOP;
6999 dsc->cleanup = &cleanup_branch;
7003 /* Copy Table Branch Byte/Halfword */
7005 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7006 uint16_t insn2, struct regcache *regs,
7007 struct displaced_step_closure *dsc)
7009 ULONGEST rn_val, rm_val;
7010 int is_tbh = bit (insn2, 4);
7011 CORE_ADDR halfwords = 0;
7012 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7014 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7015 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7021 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7022 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7028 target_read_memory (rn_val + rm_val, buf, 1);
7029 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7032 if (debug_displaced)
7033 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7034 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7035 (unsigned int) rn_val, (unsigned int) rm_val,
7036 (unsigned int) halfwords);
7038 dsc->u.branch.cond = INST_AL;
7039 dsc->u.branch.link = 0;
7040 dsc->u.branch.exchange = 0;
7041 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7043 dsc->cleanup = &cleanup_branch;
7049 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7050 struct displaced_step_closure *dsc)
7053 int val = displaced_read_reg (regs, dsc, 7);
7054 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7057 val = displaced_read_reg (regs, dsc, 8);
7058 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7061 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7066 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7067 struct regcache *regs,
7068 struct displaced_step_closure *dsc)
7070 dsc->u.block.regmask = insn1 & 0x00ff;
7072 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7075 (1) register list is full, that is, r0-r7 are used.
7076 Prepare: tmp[0] <- r8
7078 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7079 MOV r8, r7; Move value of r7 to r8;
7080 POP {r7}; Store PC value into r7.
7082 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7084 (2) register list is not full, supposing there are N registers in
7085 register list (except PC, 0 <= N <= 7).
7086 Prepare: for each i, 0 - N, tmp[i] <- ri.
7088 POP {r0, r1, ...., rN};
7090 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7091 from tmp[] properly.
7093 if (debug_displaced)
7094 fprintf_unfiltered (gdb_stdlog,
7095 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7096 dsc->u.block.regmask, insn1);
7098 if (dsc->u.block.regmask == 0xff)
7100 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7102 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7103 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7104 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7107 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7111 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7113 unsigned int new_regmask;
7115 for (i = 0; i < num_in_list + 1; i++)
7116 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7118 new_regmask = (1 << (num_in_list + 1)) - 1;
7120 if (debug_displaced)
7121 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7122 "{..., pc}: original reg list %.4x,"
7123 " modified list %.4x\n"),
7124 (int) dsc->u.block.regmask, new_regmask);
7126 dsc->u.block.regmask |= 0x8000;
7127 dsc->u.block.writeback = 0;
7128 dsc->u.block.cond = INST_AL;
7130 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7132 dsc->cleanup = &cleanup_block_load_pc;
7139 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7140 struct regcache *regs,
7141 struct displaced_step_closure *dsc)
7143 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7144 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7147 /* 16-bit thumb instructions. */
7148 switch (op_bit_12_15)
7150 /* Shift (imme), add, subtract, move and compare. */
7151 case 0: case 1: case 2: case 3:
7152 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7153 "shift/add/sub/mov/cmp",
7157 switch (op_bit_10_11)
7159 case 0: /* Data-processing */
7160 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7164 case 1: /* Special data instructions and branch and exchange. */
7166 unsigned short op = bits (insn1, 7, 9);
7167 if (op == 6 || op == 7) /* BX or BLX */
7168 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7169 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7170 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7172 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7176 default: /* LDR (literal) */
7177 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7180 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7181 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7184 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7185 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7186 else /* Generate SP-relative address */
7187 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7189 case 11: /* Misc 16-bit instructions */
7191 switch (bits (insn1, 8, 11))
7193 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7194 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7196 case 12: case 13: /* POP */
7197 if (bit (insn1, 8)) /* PC is in register list. */
7198 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7200 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7202 case 15: /* If-Then, and hints */
7203 if (bits (insn1, 0, 3))
7204 /* If-Then makes up to four following instructions conditional.
7205 IT instruction itself is not conditional, so handle it as a
7206 common unmodified instruction. */
7207 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7210 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7213 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7218 if (op_bit_10_11 < 2) /* Store multiple registers */
7219 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7220 else /* Load multiple registers */
7221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7223 case 13: /* Conditional branch and supervisor call */
7224 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7225 err = thumb_copy_b (gdbarch, insn1, dsc);
7227 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7229 case 14: /* Unconditional branch */
7230 err = thumb_copy_b (gdbarch, insn1, dsc);
7237 internal_error (__FILE__, __LINE__,
7238 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7242 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7243 uint16_t insn1, uint16_t insn2,
7244 struct regcache *regs,
7245 struct displaced_step_closure *dsc)
7247 int rt = bits (insn2, 12, 15);
7248 int rn = bits (insn1, 0, 3);
7249 int op1 = bits (insn1, 7, 8);
7251 switch (bits (insn1, 5, 6))
7253 case 0: /* Load byte and memory hints */
7254 if (rt == 0xf) /* PLD/PLI */
7257 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7258 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7260 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7265 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7266 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7269 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7270 "ldrb{reg, immediate}/ldrbt",
7275 case 1: /* Load halfword and memory hints. */
7276 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7277 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7278 "pld/unalloc memhint", dsc);
7282 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7289 case 2: /* Load word */
7291 int insn2_bit_8_11 = bits (insn2, 8, 11);
7294 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7295 else if (op1 == 0x1) /* Encoding T3 */
7296 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7298 else /* op1 == 0x0 */
7300 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7301 /* LDR (immediate) */
7302 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7303 dsc, bit (insn2, 8), 1);
7304 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7305 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7308 /* LDR (register) */
7309 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7315 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7322 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7323 uint16_t insn2, struct regcache *regs,
7324 struct displaced_step_closure *dsc)
7327 unsigned short op = bit (insn2, 15);
7328 unsigned int op1 = bits (insn1, 11, 12);
7334 switch (bits (insn1, 9, 10))
7339 /* Load/store {dual, execlusive}, table branch. */
7340 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7341 && bits (insn2, 5, 7) == 0)
7342 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7345 /* PC is not allowed to use in load/store {dual, exclusive}
7347 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7348 "load/store dual/ex", dsc);
7350 else /* load/store multiple */
7352 switch (bits (insn1, 7, 8))
7354 case 0: case 3: /* SRS, RFE */
7355 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7358 case 1: case 2: /* LDM/STM/PUSH/POP */
7359 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7366 /* Data-processing (shift register). */
7367 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7370 default: /* Coprocessor instructions. */
7371 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7376 case 2: /* op1 = 2 */
7377 if (op) /* Branch and misc control. */
7379 if (bit (insn2, 14) /* BLX/BL */
7380 || bit (insn2, 12) /* Unconditional branch */
7381 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7382 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7384 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7389 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7391 int op = bits (insn1, 4, 8);
7392 int rn = bits (insn1, 0, 3);
7393 if ((op == 0 || op == 0xa) && rn == 0xf)
7394 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7400 else /* Data processing (modified immeidate) */
7401 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7405 case 3: /* op1 = 3 */
7406 switch (bits (insn1, 9, 10))
7410 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7412 else /* NEON Load/Store and Store single data item */
7413 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7414 "neon elt/struct load/store",
7417 case 1: /* op1 = 3, bits (9, 10) == 1 */
7418 switch (bits (insn1, 7, 8))
7420 case 0: case 1: /* Data processing (register) */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 case 2: /* Multiply and absolute difference */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "mul/mua/diff", dsc);
7428 case 3: /* Long multiply and divide */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 default: /* Coprocessor instructions */
7435 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7444 internal_error (__FILE__, __LINE__,
7445 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7450 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7451 CORE_ADDR to, struct regcache *regs,
7452 struct displaced_step_closure *dsc)
7454 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7456 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7458 if (debug_displaced)
7459 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7460 "at %.8lx\n", insn1, (unsigned long) from);
7463 dsc->insn_size = thumb_insn_size (insn1);
7464 if (thumb_insn_size (insn1) == 4)
7467 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7468 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7471 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7475 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7476 CORE_ADDR to, struct regcache *regs,
7477 struct displaced_step_closure *dsc)
7480 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7483 /* Most displaced instructions use a 1-instruction scratch space, so set this
7484 here and override below if/when necessary. */
7486 dsc->insn_addr = from;
7487 dsc->scratch_base = to;
7488 dsc->cleanup = NULL;
7489 dsc->wrote_to_pc = 0;
7491 if (!displaced_in_arm_mode (regs))
7492 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
7496 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7497 if (debug_displaced)
7498 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7499 "at %.8lx\n", (unsigned long) insn,
7500 (unsigned long) from);
7502 if ((insn & 0xf0000000) == 0xf0000000)
7503 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7504 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7506 case 0x0: case 0x1: case 0x2: case 0x3:
7507 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7510 case 0x4: case 0x5: case 0x6:
7511 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7515 err = arm_decode_media (gdbarch, insn, dsc);
7518 case 0x8: case 0x9: case 0xa: case 0xb:
7519 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7522 case 0xc: case 0xd: case 0xe: case 0xf:
7523 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
7528 internal_error (__FILE__, __LINE__,
7529 _("arm_process_displaced_insn: Instruction decode error"));
7532 /* Actually set up the scratch space for a displaced instruction. */
7535 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7536 CORE_ADDR to, struct displaced_step_closure *dsc)
7538 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7539 unsigned int i, len, offset;
7540 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7541 int size = dsc->is_thumb? 2 : 4;
7542 const gdb_byte *bkp_insn;
7545 /* Poke modified instruction(s). */
7546 for (i = 0; i < dsc->numinsns; i++)
7548 if (debug_displaced)
7550 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7552 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7555 fprintf_unfiltered (gdb_stdlog, "%.4x",
7556 (unsigned short)dsc->modinsn[i]);
7558 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7559 (unsigned long) to + offset);
7562 write_memory_unsigned_integer (to + offset, size,
7563 byte_order_for_code,
7568 /* Choose the correct breakpoint instruction. */
7571 bkp_insn = tdep->thumb_breakpoint;
7572 len = tdep->thumb_breakpoint_size;
7576 bkp_insn = tdep->arm_breakpoint;
7577 len = tdep->arm_breakpoint_size;
7580 /* Put breakpoint afterwards. */
7581 write_memory (to + offset, bkp_insn, len);
7583 if (debug_displaced)
7584 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7585 paddress (gdbarch, from), paddress (gdbarch, to));
7588 /* Entry point for cleaning things up after a displaced instruction has been
7592 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7593 struct displaced_step_closure *dsc,
7594 CORE_ADDR from, CORE_ADDR to,
7595 struct regcache *regs)
7598 dsc->cleanup (gdbarch, regs, dsc);
7600 if (!dsc->wrote_to_pc)
7601 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7602 dsc->insn_addr + dsc->insn_size);
7606 #include "bfd-in2.h"
7607 #include "libcoff.h"
7610 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7612 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7622 if (csym.native == NULL)
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7643 info->symbols = NULL;
7645 if (info->endian == BFD_ENDIAN_BIG)
7646 return print_insn_big_arm (memaddr, info);
7648 return print_insn_little_arm (memaddr, info);
7651 /* The following define instruction sequences that will cause ARM
7652 cpu's to take an undefined instruction trap. These are used to
7653 signal a breakpoint to GDB.
7655 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7656 modes. A different instruction is required for each mode. The ARM
7657 cpu's can also be big or little endian. Thus four different
7658 instructions are needed to support all cases.
7660 Note: ARMv4 defines several new instructions that will take the
7661 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7662 not in fact add the new instructions. The new undefined
7663 instructions in ARMv4 are all instructions that had no defined
7664 behaviour in earlier chips. There is no guarantee that they will
7665 raise an exception, but may be treated as NOP's. In practice, it
7666 may only safe to rely on instructions matching:
7668 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7669 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7670 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7672 Even this may only true if the condition predicate is true. The
7673 following use a condition predicate of ALWAYS so it is always TRUE.
7675 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7676 and NetBSD all use a software interrupt rather than an undefined
7677 instruction to force a trap. This can be handled by by the
7678 abi-specific code during establishment of the gdbarch vector. */
7680 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7681 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7682 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7683 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7685 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7686 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7687 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7688 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7690 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7691 the program counter value to determine whether a 16-bit or 32-bit
7692 breakpoint should be used. It returns a pointer to a string of
7693 bytes that encode a breakpoint instruction, stores the length of
7694 the string to *lenptr, and adjusts the program counter (if
7695 necessary) to point to the actual memory location where the
7696 breakpoint should be inserted. */
7698 static const unsigned char *
7699 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7701 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7702 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7704 if (arm_pc_is_thumb (gdbarch, *pcptr))
7706 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7708 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7709 check whether we are replacing a 32-bit instruction. */
7710 if (tdep->thumb2_breakpoint != NULL)
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7715 unsigned short inst1;
7716 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7717 if (thumb_insn_size (inst1) == 4)
7719 *lenptr = tdep->thumb2_breakpoint_size;
7720 return tdep->thumb2_breakpoint;
7725 *lenptr = tdep->thumb_breakpoint_size;
7726 return tdep->thumb_breakpoint;
7730 *lenptr = tdep->arm_breakpoint_size;
7731 return tdep->arm_breakpoint;
7736 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7739 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7741 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7742 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7743 that this is not confused with a 32-bit ARM breakpoint. */
7747 /* Extract from an array REGBUF containing the (raw) register state a
7748 function return value of type TYPE, and copy that, in virtual
7749 format, into VALBUF. */
7752 arm_extract_return_value (struct type *type, struct regcache *regs,
7755 struct gdbarch *gdbarch = get_regcache_arch (regs);
7756 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7758 if (TYPE_CODE_FLT == TYPE_CODE (type))
7760 switch (gdbarch_tdep (gdbarch)->fp_model)
7764 /* The value is in register F0 in internal format. We need to
7765 extract the raw value and then convert it to the desired
7767 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7769 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7770 convert_from_extended (floatformat_from_type (type), tmpbuf,
7771 valbuf, gdbarch_byte_order (gdbarch));
7775 case ARM_FLOAT_SOFT_FPA:
7776 case ARM_FLOAT_SOFT_VFP:
7777 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7778 not using the VFP ABI code. */
7780 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7781 if (TYPE_LENGTH (type) > 4)
7782 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7783 valbuf + INT_REGISTER_SIZE);
7787 internal_error (__FILE__, __LINE__,
7788 _("arm_extract_return_value: "
7789 "Floating point model not supported"));
7793 else if (TYPE_CODE (type) == TYPE_CODE_INT
7794 || TYPE_CODE (type) == TYPE_CODE_CHAR
7795 || TYPE_CODE (type) == TYPE_CODE_BOOL
7796 || TYPE_CODE (type) == TYPE_CODE_PTR
7797 || TYPE_CODE (type) == TYPE_CODE_REF
7798 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7800 /* If the type is a plain integer, then the access is
7801 straight-forward. Otherwise we have to play around a bit
7803 int len = TYPE_LENGTH (type);
7804 int regno = ARM_A1_REGNUM;
7809 /* By using store_unsigned_integer we avoid having to do
7810 anything special for small big-endian values. */
7811 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7812 store_unsigned_integer (valbuf,
7813 (len > INT_REGISTER_SIZE
7814 ? INT_REGISTER_SIZE : len),
7816 len -= INT_REGISTER_SIZE;
7817 valbuf += INT_REGISTER_SIZE;
7822 /* For a structure or union the behaviour is as if the value had
7823 been stored to word-aligned memory and then loaded into
7824 registers with 32-bit load instruction(s). */
7825 int len = TYPE_LENGTH (type);
7826 int regno = ARM_A1_REGNUM;
7827 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7831 regcache_cooked_read (regs, regno++, tmpbuf);
7832 memcpy (valbuf, tmpbuf,
7833 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7834 len -= INT_REGISTER_SIZE;
7835 valbuf += INT_REGISTER_SIZE;
7841 /* Will a function return an aggregate type in memory or in a
7842 register? Return 0 if an aggregate type can be returned in a
7843 register, 1 if it must be returned in memory. */
7846 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7848 enum type_code code;
7850 type = check_typedef (type);
7852 /* Simple, non-aggregate types (ie not including vectors and
7853 complex) are always returned in a register (or registers). */
7854 code = TYPE_CODE (type);
7855 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7856 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7859 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7861 /* Vector values should be returned using ARM registers if they
7862 are not over 16 bytes. */
7863 return (TYPE_LENGTH (type) > 16);
7866 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7868 /* The AAPCS says all aggregates not larger than a word are returned
7870 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7879 /* All aggregate types that won't fit in a register must be returned
7881 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7884 /* In the ARM ABI, "integer" like aggregate types are returned in
7885 registers. For an aggregate type to be integer like, its size
7886 must be less than or equal to INT_REGISTER_SIZE and the
7887 offset of each addressable subfield must be zero. Note that bit
7888 fields are not addressable, and all addressable subfields of
7889 unions always start at offset zero.
7891 This function is based on the behaviour of GCC 2.95.1.
7892 See: gcc/arm.c: arm_return_in_memory() for details.
7894 Note: All versions of GCC before GCC 2.95.2 do not set up the
7895 parameters correctly for a function returning the following
7896 structure: struct { float f;}; This should be returned in memory,
7897 not a register. Richard Earnshaw sent me a patch, but I do not
7898 know of any way to detect if a function like the above has been
7899 compiled with the correct calling convention. */
7901 /* Assume all other aggregate types can be returned in a register.
7902 Run a check for structures, unions and arrays. */
7905 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7908 /* Need to check if this struct/union is "integer" like. For
7909 this to be true, its size must be less than or equal to
7910 INT_REGISTER_SIZE and the offset of each addressable
7911 subfield must be zero. Note that bit fields are not
7912 addressable, and unions always start at offset zero. If any
7913 of the subfields is a floating point type, the struct/union
7914 cannot be an integer type. */
7916 /* For each field in the object, check:
7917 1) Is it FP? --> yes, nRc = 1;
7918 2) Is it addressable (bitpos != 0) and
7919 not packed (bitsize == 0)?
7923 for (i = 0; i < TYPE_NFIELDS (type); i++)
7925 enum type_code field_type_code;
7928 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7931 /* Is it a floating point type field? */
7932 if (field_type_code == TYPE_CODE_FLT)
7938 /* If bitpos != 0, then we have to care about it. */
7939 if (TYPE_FIELD_BITPOS (type, i) != 0)
7941 /* Bitfields are not addressable. If the field bitsize is
7942 zero, then the field is not packed. Hence it cannot be
7943 a bitfield or any other packed type. */
7944 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7957 /* Write into appropriate registers a function return value of type
7958 TYPE, given in virtual format. */
7961 arm_store_return_value (struct type *type, struct regcache *regs,
7962 const gdb_byte *valbuf)
7964 struct gdbarch *gdbarch = get_regcache_arch (regs);
7965 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7967 if (TYPE_CODE (type) == TYPE_CODE_FLT)
7969 gdb_byte buf[MAX_REGISTER_SIZE];
7971 switch (gdbarch_tdep (gdbarch)->fp_model)
7975 convert_to_extended (floatformat_from_type (type), buf, valbuf,
7976 gdbarch_byte_order (gdbarch));
7977 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
7980 case ARM_FLOAT_SOFT_FPA:
7981 case ARM_FLOAT_SOFT_VFP:
7982 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7983 not using the VFP ABI code. */
7985 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
7986 if (TYPE_LENGTH (type) > 4)
7987 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7988 valbuf + INT_REGISTER_SIZE);
7992 internal_error (__FILE__, __LINE__,
7993 _("arm_store_return_value: Floating "
7994 "point model not supported"));
7998 else if (TYPE_CODE (type) == TYPE_CODE_INT
7999 || TYPE_CODE (type) == TYPE_CODE_CHAR
8000 || TYPE_CODE (type) == TYPE_CODE_BOOL
8001 || TYPE_CODE (type) == TYPE_CODE_PTR
8002 || TYPE_CODE (type) == TYPE_CODE_REF
8003 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8005 if (TYPE_LENGTH (type) <= 4)
8007 /* Values of one word or less are zero/sign-extended and
8009 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8010 LONGEST val = unpack_long (type, valbuf);
8012 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8013 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8017 /* Integral values greater than one word are stored in consecutive
8018 registers starting with r0. This will always be a multiple of
8019 the regiser size. */
8020 int len = TYPE_LENGTH (type);
8021 int regno = ARM_A1_REGNUM;
8025 regcache_cooked_write (regs, regno++, valbuf);
8026 len -= INT_REGISTER_SIZE;
8027 valbuf += INT_REGISTER_SIZE;
8033 /* For a structure or union the behaviour is as if the value had
8034 been stored to word-aligned memory and then loaded into
8035 registers with 32-bit load instruction(s). */
8036 int len = TYPE_LENGTH (type);
8037 int regno = ARM_A1_REGNUM;
8038 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8042 memcpy (tmpbuf, valbuf,
8043 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8044 regcache_cooked_write (regs, regno++, tmpbuf);
8045 len -= INT_REGISTER_SIZE;
8046 valbuf += INT_REGISTER_SIZE;
8052 /* Handle function return values. */
8054 static enum return_value_convention
8055 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8056 struct type *valtype, struct regcache *regcache,
8057 gdb_byte *readbuf, const gdb_byte *writebuf)
8059 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8060 struct type *func_type = function ? value_type (function) : NULL;
8061 enum arm_vfp_cprc_base_type vfp_base_type;
8064 if (arm_vfp_abi_for_function (gdbarch, func_type)
8065 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8067 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8068 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8070 for (i = 0; i < vfp_base_count; i++)
8072 if (reg_char == 'q')
8075 arm_neon_quad_write (gdbarch, regcache, i,
8076 writebuf + i * unit_length);
8079 arm_neon_quad_read (gdbarch, regcache, i,
8080 readbuf + i * unit_length);
8087 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8088 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8091 regcache_cooked_write (regcache, regnum,
8092 writebuf + i * unit_length);
8094 regcache_cooked_read (regcache, regnum,
8095 readbuf + i * unit_length);
8098 return RETURN_VALUE_REGISTER_CONVENTION;
8101 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8102 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8103 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8105 if (tdep->struct_return == pcc_struct_return
8106 || arm_return_in_memory (gdbarch, valtype))
8107 return RETURN_VALUE_STRUCT_CONVENTION;
8109 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8111 if (arm_return_in_memory (gdbarch, valtype))
8112 return RETURN_VALUE_STRUCT_CONVENTION;
8116 arm_store_return_value (valtype, regcache, writebuf);
8119 arm_extract_return_value (valtype, regcache, readbuf);
8121 return RETURN_VALUE_REGISTER_CONVENTION;
8126 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8128 struct gdbarch *gdbarch = get_frame_arch (frame);
8129 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8130 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8132 gdb_byte buf[INT_REGISTER_SIZE];
8134 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8136 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8140 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8144 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8145 return the target PC. Otherwise return 0. */
8148 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8152 CORE_ADDR start_addr;
8154 /* Find the starting address and name of the function containing the PC. */
8155 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8157 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8159 start_addr = arm_skip_bx_reg (frame, pc);
8160 if (start_addr != 0)
8166 /* If PC is in a Thumb call or return stub, return the address of the
8167 target PC, which is in a register. The thunk functions are called
8168 _call_via_xx, where x is the register name. The possible names
8169 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8170 functions, named __ARM_call_via_r[0-7]. */
8171 if (startswith (name, "_call_via_")
8172 || startswith (name, "__ARM_call_via_"))
8174 /* Use the name suffix to determine which register contains the
8176 static char *table[15] =
8177 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8178 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8181 int offset = strlen (name) - 2;
8183 for (regno = 0; regno <= 14; regno++)
8184 if (strcmp (&name[offset], table[regno]) == 0)
8185 return get_frame_register_unsigned (frame, regno);
8188 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8189 non-interworking calls to foo. We could decode the stubs
8190 to find the target but it's easier to use the symbol table. */
8191 namelen = strlen (name);
8192 if (name[0] == '_' && name[1] == '_'
8193 && ((namelen > 2 + strlen ("_from_thumb")
8194 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8195 || (namelen > 2 + strlen ("_from_arm")
8196 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8199 int target_len = namelen - 2;
8200 struct bound_minimal_symbol minsym;
8201 struct objfile *objfile;
8202 struct obj_section *sec;
8204 if (name[namelen - 1] == 'b')
8205 target_len -= strlen ("_from_thumb");
8207 target_len -= strlen ("_from_arm");
8209 target_name = (char *) alloca (target_len + 1);
8210 memcpy (target_name, name + 2, target_len);
8211 target_name[target_len] = '\0';
8213 sec = find_pc_section (pc);
8214 objfile = (sec == NULL) ? NULL : sec->objfile;
8215 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8216 if (minsym.minsym != NULL)
8217 return BMSYMBOL_VALUE_ADDRESS (minsym);
8222 return 0; /* not a stub */
8226 set_arm_command (char *args, int from_tty)
8228 printf_unfiltered (_("\
8229 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8230 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8234 show_arm_command (char *args, int from_tty)
8236 cmd_show_list (showarmcmdlist, from_tty, "");
8240 arm_update_current_architecture (void)
8242 struct gdbarch_info info;
8244 /* If the current architecture is not ARM, we have nothing to do. */
8245 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8248 /* Update the architecture. */
8249 gdbarch_info_init (&info);
8251 if (!gdbarch_update_p (info))
8252 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8256 set_fp_model_sfunc (char *args, int from_tty,
8257 struct cmd_list_element *c)
8261 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8262 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8264 arm_fp_model = (enum arm_float_model) fp_model;
8268 if (fp_model == ARM_FLOAT_LAST)
8269 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8272 arm_update_current_architecture ();
8276 show_fp_model (struct ui_file *file, int from_tty,
8277 struct cmd_list_element *c, const char *value)
8279 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8281 if (arm_fp_model == ARM_FLOAT_AUTO
8282 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8283 fprintf_filtered (file, _("\
8284 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8285 fp_model_strings[tdep->fp_model]);
8287 fprintf_filtered (file, _("\
8288 The current ARM floating point model is \"%s\".\n"),
8289 fp_model_strings[arm_fp_model]);
8293 arm_set_abi (char *args, int from_tty,
8294 struct cmd_list_element *c)
8298 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8299 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8301 arm_abi_global = (enum arm_abi_kind) arm_abi;
8305 if (arm_abi == ARM_ABI_LAST)
8306 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8309 arm_update_current_architecture ();
8313 arm_show_abi (struct ui_file *file, int from_tty,
8314 struct cmd_list_element *c, const char *value)
8316 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8318 if (arm_abi_global == ARM_ABI_AUTO
8319 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8320 fprintf_filtered (file, _("\
8321 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8322 arm_abi_strings[tdep->arm_abi]);
8324 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8329 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8330 struct cmd_list_element *c, const char *value)
8332 fprintf_filtered (file,
8333 _("The current execution mode assumed "
8334 "(when symbols are unavailable) is \"%s\".\n"),
8335 arm_fallback_mode_string);
8339 arm_show_force_mode (struct ui_file *file, int from_tty,
8340 struct cmd_list_element *c, const char *value)
8342 fprintf_filtered (file,
8343 _("The current execution mode assumed "
8344 "(even when symbols are available) is \"%s\".\n"),
8345 arm_force_mode_string);
8348 /* If the user changes the register disassembly style used for info
8349 register and other commands, we have to also switch the style used
8350 in opcodes for disassembly output. This function is run in the "set
8351 arm disassembly" command, and does that. */
8354 set_disassembly_style_sfunc (char *args, int from_tty,
8355 struct cmd_list_element *c)
8357 set_disassembly_style ();
8360 /* Return the ARM register name corresponding to register I. */
8362 arm_register_name (struct gdbarch *gdbarch, int i)
8364 const int num_regs = gdbarch_num_regs (gdbarch);
8366 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8367 && i >= num_regs && i < num_regs + 32)
8369 static const char *const vfp_pseudo_names[] = {
8370 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8371 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8372 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8373 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8376 return vfp_pseudo_names[i - num_regs];
8379 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8380 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8382 static const char *const neon_pseudo_names[] = {
8383 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8384 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8387 return neon_pseudo_names[i - num_regs - 32];
8390 if (i >= ARRAY_SIZE (arm_register_names))
8391 /* These registers are only supported on targets which supply
8392 an XML description. */
8395 return arm_register_names[i];
8399 set_disassembly_style (void)
8403 /* Find the style that the user wants. */
8404 for (current = 0; current < num_disassembly_options; current++)
8405 if (disassembly_style == valid_disassembly_styles[current])
8407 gdb_assert (current < num_disassembly_options);
8409 /* Synchronize the disassembler. */
8410 set_arm_regname_option (current);
8413 /* Test whether the coff symbol specific value corresponds to a Thumb
8417 coff_sym_is_thumb (int val)
8419 return (val == C_THUMBEXT
8420 || val == C_THUMBSTAT
8421 || val == C_THUMBEXTFUNC
8422 || val == C_THUMBSTATFUNC
8423 || val == C_THUMBLABEL);
8426 /* arm_coff_make_msymbol_special()
8427 arm_elf_make_msymbol_special()
8429 These functions test whether the COFF or ELF symbol corresponds to
8430 an address in thumb code, and set a "special" bit in a minimal
8431 symbol to indicate that it does. */
8434 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8436 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8437 == ST_BRANCH_TO_THUMB)
8438 MSYMBOL_SET_SPECIAL (msym);
8442 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8444 if (coff_sym_is_thumb (val))
8445 MSYMBOL_SET_SPECIAL (msym);
8449 arm_objfile_data_free (struct objfile *objfile, void *arg)
8451 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8454 for (i = 0; i < objfile->obfd->section_count; i++)
8455 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8459 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8462 const char *name = bfd_asymbol_name (sym);
8463 struct arm_per_objfile *data;
8464 VEC(arm_mapping_symbol_s) **map_p;
8465 struct arm_mapping_symbol new_map_sym;
8467 gdb_assert (name[0] == '$');
8468 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8471 data = (struct arm_per_objfile *) objfile_data (objfile,
8472 arm_objfile_data_key);
8475 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8476 struct arm_per_objfile);
8477 set_objfile_data (objfile, arm_objfile_data_key, data);
8478 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8479 objfile->obfd->section_count,
8480 VEC(arm_mapping_symbol_s) *);
8482 map_p = &data->section_maps[bfd_get_section (sym)->index];
8484 new_map_sym.value = sym->value;
8485 new_map_sym.type = name[1];
8487 /* Assume that most mapping symbols appear in order of increasing
8488 value. If they were randomly distributed, it would be faster to
8489 always push here and then sort at first use. */
8490 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8492 struct arm_mapping_symbol *prev_map_sym;
8494 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8495 if (prev_map_sym->value >= sym->value)
8498 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8499 arm_compare_mapping_symbols);
8500 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8505 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8509 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8511 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8512 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8514 /* If necessary, set the T bit. */
8517 ULONGEST val, t_bit;
8518 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8519 t_bit = arm_psr_thumb_bit (gdbarch);
8520 if (arm_pc_is_thumb (gdbarch, pc))
8521 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8524 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8529 /* Read the contents of a NEON quad register, by reading from two
8530 double registers. This is used to implement the quad pseudo
8531 registers, and for argument passing in case the quad registers are
8532 missing; vectors are passed in quad registers when using the VFP
8533 ABI, even if a NEON unit is not present. REGNUM is the index of
8534 the quad register, in [0, 15]. */
8536 static enum register_status
8537 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8538 int regnum, gdb_byte *buf)
8541 gdb_byte reg_buf[8];
8542 int offset, double_regnum;
8543 enum register_status status;
8545 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8546 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8549 /* d0 is always the least significant half of q0. */
8550 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8555 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8556 if (status != REG_VALID)
8558 memcpy (buf + offset, reg_buf, 8);
8560 offset = 8 - offset;
8561 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8562 if (status != REG_VALID)
8564 memcpy (buf + offset, reg_buf, 8);
8569 static enum register_status
8570 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8571 int regnum, gdb_byte *buf)
8573 const int num_regs = gdbarch_num_regs (gdbarch);
8575 gdb_byte reg_buf[8];
8576 int offset, double_regnum;
8578 gdb_assert (regnum >= num_regs);
8581 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8582 /* Quad-precision register. */
8583 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8586 enum register_status status;
8588 /* Single-precision register. */
8589 gdb_assert (regnum < 32);
8591 /* s0 is always the least significant half of d0. */
8592 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8593 offset = (regnum & 1) ? 0 : 4;
8595 offset = (regnum & 1) ? 4 : 0;
8597 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8598 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8601 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8602 if (status == REG_VALID)
8603 memcpy (buf, reg_buf + offset, 4);
8608 /* Store the contents of BUF to a NEON quad register, by writing to
8609 two double registers. This is used to implement the quad pseudo
8610 registers, and for argument passing in case the quad registers are
8611 missing; vectors are passed in quad registers when using the VFP
8612 ABI, even if a NEON unit is not present. REGNUM is the index
8613 of the quad register, in [0, 15]. */
8616 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8617 int regnum, const gdb_byte *buf)
8620 int offset, double_regnum;
8622 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8623 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8626 /* d0 is always the least significant half of q0. */
8627 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8632 regcache_raw_write (regcache, double_regnum, buf + offset);
8633 offset = 8 - offset;
8634 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8638 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8639 int regnum, const gdb_byte *buf)
8641 const int num_regs = gdbarch_num_regs (gdbarch);
8643 gdb_byte reg_buf[8];
8644 int offset, double_regnum;
8646 gdb_assert (regnum >= num_regs);
8649 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8650 /* Quad-precision register. */
8651 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8654 /* Single-precision register. */
8655 gdb_assert (regnum < 32);
8657 /* s0 is always the least significant half of d0. */
8658 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8659 offset = (regnum & 1) ? 0 : 4;
8661 offset = (regnum & 1) ? 4 : 0;
8663 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8664 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8667 regcache_raw_read (regcache, double_regnum, reg_buf);
8668 memcpy (reg_buf + offset, buf, 4);
8669 regcache_raw_write (regcache, double_regnum, reg_buf);
8673 static struct value *
8674 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8676 const int *reg_p = (const int *) baton;
8677 return value_of_register (*reg_p, frame);
8680 static enum gdb_osabi
8681 arm_elf_osabi_sniffer (bfd *abfd)
8683 unsigned int elfosabi;
8684 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8686 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8688 if (elfosabi == ELFOSABI_ARM)
8689 /* GNU tools use this value. Check note sections in this case,
8691 bfd_map_over_sections (abfd,
8692 generic_elf_osabi_sniff_abi_tag_sections,
8695 /* Anything else will be handled by the generic ELF sniffer. */
8700 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8701 struct reggroup *group)
8703 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8704 this, FPS register belongs to save_regroup, restore_reggroup, and
8705 all_reggroup, of course. */
8706 if (regnum == ARM_FPS_REGNUM)
8707 return (group == float_reggroup
8708 || group == save_reggroup
8709 || group == restore_reggroup
8710 || group == all_reggroup);
8712 return default_register_reggroup_p (gdbarch, regnum, group);
8716 /* For backward-compatibility we allow two 'g' packet lengths with
8717 the remote protocol depending on whether FPA registers are
8718 supplied. M-profile targets do not have FPA registers, but some
8719 stubs already exist in the wild which use a 'g' packet which
8720 supplies them albeit with dummy values. The packet format which
8721 includes FPA registers should be considered deprecated for
8722 M-profile targets. */
8725 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8727 if (gdbarch_tdep (gdbarch)->is_m)
8729 /* If we know from the executable this is an M-profile target,
8730 cater for remote targets whose register set layout is the
8731 same as the FPA layout. */
8732 register_remote_g_packet_guess (gdbarch,
8733 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8734 (16 * INT_REGISTER_SIZE)
8735 + (8 * FP_REGISTER_SIZE)
8736 + (2 * INT_REGISTER_SIZE),
8737 tdesc_arm_with_m_fpa_layout);
8739 /* The regular M-profile layout. */
8740 register_remote_g_packet_guess (gdbarch,
8741 /* r0-r12,sp,lr,pc; xpsr */
8742 (16 * INT_REGISTER_SIZE)
8743 + INT_REGISTER_SIZE,
8746 /* M-profile plus M4F VFP. */
8747 register_remote_g_packet_guess (gdbarch,
8748 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8749 (16 * INT_REGISTER_SIZE)
8750 + (16 * VFP_REGISTER_SIZE)
8751 + (2 * INT_REGISTER_SIZE),
8752 tdesc_arm_with_m_vfp_d16);
8755 /* Otherwise we don't have a useful guess. */
8759 /* Initialize the current architecture based on INFO. If possible,
8760 re-use an architecture from ARCHES, which is a list of
8761 architectures already created during this debugging session.
8763 Called e.g. at program startup, when reading a core file, and when
8764 reading a binary file. */
8766 static struct gdbarch *
8767 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8769 struct gdbarch_tdep *tdep;
8770 struct gdbarch *gdbarch;
8771 struct gdbarch_list *best_arch;
8772 enum arm_abi_kind arm_abi = arm_abi_global;
8773 enum arm_float_model fp_model = arm_fp_model;
8774 struct tdesc_arch_data *tdesc_data = NULL;
8776 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8777 int have_wmmx_registers = 0;
8779 int have_fpa_registers = 1;
8780 const struct target_desc *tdesc = info.target_desc;
8782 /* If we have an object to base this architecture on, try to determine
8785 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8787 int ei_osabi, e_flags;
8789 switch (bfd_get_flavour (info.abfd))
8791 case bfd_target_aout_flavour:
8792 /* Assume it's an old APCS-style ABI. */
8793 arm_abi = ARM_ABI_APCS;
8796 case bfd_target_coff_flavour:
8797 /* Assume it's an old APCS-style ABI. */
8799 arm_abi = ARM_ABI_APCS;
8802 case bfd_target_elf_flavour:
8803 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8804 e_flags = elf_elfheader (info.abfd)->e_flags;
8806 if (ei_osabi == ELFOSABI_ARM)
8808 /* GNU tools used to use this value, but do not for EABI
8809 objects. There's nowhere to tag an EABI version
8810 anyway, so assume APCS. */
8811 arm_abi = ARM_ABI_APCS;
8813 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8815 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8816 int attr_arch, attr_profile;
8820 case EF_ARM_EABI_UNKNOWN:
8821 /* Assume GNU tools. */
8822 arm_abi = ARM_ABI_APCS;
8825 case EF_ARM_EABI_VER4:
8826 case EF_ARM_EABI_VER5:
8827 arm_abi = ARM_ABI_AAPCS;
8828 /* EABI binaries default to VFP float ordering.
8829 They may also contain build attributes that can
8830 be used to identify if the VFP argument-passing
8832 if (fp_model == ARM_FLOAT_AUTO)
8835 switch (bfd_elf_get_obj_attr_int (info.abfd,
8839 case AEABI_VFP_args_base:
8840 /* "The user intended FP parameter/result
8841 passing to conform to AAPCS, base
8843 fp_model = ARM_FLOAT_SOFT_VFP;
8845 case AEABI_VFP_args_vfp:
8846 /* "The user intended FP parameter/result
8847 passing to conform to AAPCS, VFP
8849 fp_model = ARM_FLOAT_VFP;
8851 case AEABI_VFP_args_toolchain:
8852 /* "The user intended FP parameter/result
8853 passing to conform to tool chain-specific
8854 conventions" - we don't know any such
8855 conventions, so leave it as "auto". */
8857 case AEABI_VFP_args_compatible:
8858 /* "Code is compatible with both the base
8859 and VFP variants; the user did not permit
8860 non-variadic functions to pass FP
8861 parameters/results" - leave it as
8865 /* Attribute value not mentioned in the
8866 November 2012 ABI, so leave it as
8871 fp_model = ARM_FLOAT_SOFT_VFP;
8877 /* Leave it as "auto". */
8878 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8883 /* Detect M-profile programs. This only works if the
8884 executable file includes build attributes; GCC does
8885 copy them to the executable, but e.g. RealView does
8887 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8889 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8891 Tag_CPU_arch_profile);
8892 /* GCC specifies the profile for v6-M; RealView only
8893 specifies the profile for architectures starting with
8894 V7 (as opposed to architectures with a tag
8895 numerically greater than TAG_CPU_ARCH_V7). */
8896 if (!tdesc_has_registers (tdesc)
8897 && (attr_arch == TAG_CPU_ARCH_V6_M
8898 || attr_arch == TAG_CPU_ARCH_V6S_M
8899 || attr_profile == 'M'))
8904 if (fp_model == ARM_FLOAT_AUTO)
8906 int e_flags = elf_elfheader (info.abfd)->e_flags;
8908 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8911 /* Leave it as "auto". Strictly speaking this case
8912 means FPA, but almost nobody uses that now, and
8913 many toolchains fail to set the appropriate bits
8914 for the floating-point model they use. */
8916 case EF_ARM_SOFT_FLOAT:
8917 fp_model = ARM_FLOAT_SOFT_FPA;
8919 case EF_ARM_VFP_FLOAT:
8920 fp_model = ARM_FLOAT_VFP;
8922 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8923 fp_model = ARM_FLOAT_SOFT_VFP;
8928 if (e_flags & EF_ARM_BE8)
8929 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8934 /* Leave it as "auto". */
8939 /* Check any target description for validity. */
8940 if (tdesc_has_registers (tdesc))
8942 /* For most registers we require GDB's default names; but also allow
8943 the numeric names for sp / lr / pc, as a convenience. */
8944 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8945 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8946 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8948 const struct tdesc_feature *feature;
8951 feature = tdesc_find_feature (tdesc,
8952 "org.gnu.gdb.arm.core");
8953 if (feature == NULL)
8955 feature = tdesc_find_feature (tdesc,
8956 "org.gnu.gdb.arm.m-profile");
8957 if (feature == NULL)
8963 tdesc_data = tdesc_data_alloc ();
8966 for (i = 0; i < ARM_SP_REGNUM; i++)
8967 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8968 arm_register_names[i]);
8969 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8972 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8975 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
8979 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8980 ARM_PS_REGNUM, "xpsr");
8982 valid_p &= tdesc_numbered_register (feature, tdesc_data,
8983 ARM_PS_REGNUM, "cpsr");
8987 tdesc_data_cleanup (tdesc_data);
8991 feature = tdesc_find_feature (tdesc,
8992 "org.gnu.gdb.arm.fpa");
8993 if (feature != NULL)
8996 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
8997 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
8998 arm_register_names[i]);
9001 tdesc_data_cleanup (tdesc_data);
9006 have_fpa_registers = 0;
9008 feature = tdesc_find_feature (tdesc,
9009 "org.gnu.gdb.xscale.iwmmxt");
9010 if (feature != NULL)
9012 static const char *const iwmmxt_names[] = {
9013 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9014 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9015 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9016 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9020 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9022 &= tdesc_numbered_register (feature, tdesc_data, i,
9023 iwmmxt_names[i - ARM_WR0_REGNUM]);
9025 /* Check for the control registers, but do not fail if they
9027 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9028 tdesc_numbered_register (feature, tdesc_data, i,
9029 iwmmxt_names[i - ARM_WR0_REGNUM]);
9031 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9033 &= tdesc_numbered_register (feature, tdesc_data, i,
9034 iwmmxt_names[i - ARM_WR0_REGNUM]);
9038 tdesc_data_cleanup (tdesc_data);
9042 have_wmmx_registers = 1;
9045 /* If we have a VFP unit, check whether the single precision registers
9046 are present. If not, then we will synthesize them as pseudo
9048 feature = tdesc_find_feature (tdesc,
9049 "org.gnu.gdb.arm.vfp");
9050 if (feature != NULL)
9052 static const char *const vfp_double_names[] = {
9053 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9054 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9055 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9056 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9059 /* Require the double precision registers. There must be either
9062 for (i = 0; i < 32; i++)
9064 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9066 vfp_double_names[i]);
9070 if (!valid_p && i == 16)
9073 /* Also require FPSCR. */
9074 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9075 ARM_FPSCR_REGNUM, "fpscr");
9078 tdesc_data_cleanup (tdesc_data);
9082 if (tdesc_unnumbered_register (feature, "s0") == 0)
9083 have_vfp_pseudos = 1;
9085 vfp_register_count = i;
9087 /* If we have VFP, also check for NEON. The architecture allows
9088 NEON without VFP (integer vector operations only), but GDB
9089 does not support that. */
9090 feature = tdesc_find_feature (tdesc,
9091 "org.gnu.gdb.arm.neon");
9092 if (feature != NULL)
9094 /* NEON requires 32 double-precision registers. */
9097 tdesc_data_cleanup (tdesc_data);
9101 /* If there are quad registers defined by the stub, use
9102 their type; otherwise (normally) provide them with
9103 the default type. */
9104 if (tdesc_unnumbered_register (feature, "q0") == 0)
9105 have_neon_pseudos = 1;
9112 /* If there is already a candidate, use it. */
9113 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9115 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9117 if (arm_abi != ARM_ABI_AUTO
9118 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9121 if (fp_model != ARM_FLOAT_AUTO
9122 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9125 /* There are various other properties in tdep that we do not
9126 need to check here: those derived from a target description,
9127 since gdbarches with a different target description are
9128 automatically disqualified. */
9130 /* Do check is_m, though, since it might come from the binary. */
9131 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9134 /* Found a match. */
9138 if (best_arch != NULL)
9140 if (tdesc_data != NULL)
9141 tdesc_data_cleanup (tdesc_data);
9142 return best_arch->gdbarch;
9145 tdep = XCNEW (struct gdbarch_tdep);
9146 gdbarch = gdbarch_alloc (&info, tdep);
9148 /* Record additional information about the architecture we are defining.
9149 These are gdbarch discriminators, like the OSABI. */
9150 tdep->arm_abi = arm_abi;
9151 tdep->fp_model = fp_model;
9153 tdep->have_fpa_registers = have_fpa_registers;
9154 tdep->have_wmmx_registers = have_wmmx_registers;
9155 gdb_assert (vfp_register_count == 0
9156 || vfp_register_count == 16
9157 || vfp_register_count == 32);
9158 tdep->vfp_register_count = vfp_register_count;
9159 tdep->have_vfp_pseudos = have_vfp_pseudos;
9160 tdep->have_neon_pseudos = have_neon_pseudos;
9161 tdep->have_neon = have_neon;
9163 arm_register_g_packet_guesses (gdbarch);
9166 switch (info.byte_order_for_code)
9168 case BFD_ENDIAN_BIG:
9169 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9170 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9171 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9172 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9176 case BFD_ENDIAN_LITTLE:
9177 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9178 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9179 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9180 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9185 internal_error (__FILE__, __LINE__,
9186 _("arm_gdbarch_init: bad byte order for float format"));
9189 /* On ARM targets char defaults to unsigned. */
9190 set_gdbarch_char_signed (gdbarch, 0);
9192 /* Note: for displaced stepping, this includes the breakpoint, and one word
9193 of additional scratch space. This setting isn't used for anything beside
9194 displaced stepping at present. */
9195 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9197 /* This should be low enough for everything. */
9198 tdep->lowest_pc = 0x20;
9199 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9201 /* The default, for both APCS and AAPCS, is to return small
9202 structures in registers. */
9203 tdep->struct_return = reg_struct_return;
9205 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9206 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9208 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9210 /* Frame handling. */
9211 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9212 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9213 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9215 frame_base_set_default (gdbarch, &arm_normal_base);
9217 /* Address manipulation. */
9218 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9220 /* Advance PC across function entry code. */
9221 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9223 /* Detect whether PC is at a point where the stack has been destroyed. */
9224 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9226 /* Skip trampolines. */
9227 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9229 /* The stack grows downward. */
9230 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9232 /* Breakpoint manipulation. */
9233 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9234 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9235 arm_remote_breakpoint_from_pc);
9237 /* Information about registers, etc. */
9238 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9239 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9240 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9241 set_gdbarch_register_type (gdbarch, arm_register_type);
9242 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9244 /* This "info float" is FPA-specific. Use the generic version if we
9246 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9247 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9249 /* Internal <-> external register number maps. */
9250 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9251 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9253 set_gdbarch_register_name (gdbarch, arm_register_name);
9255 /* Returning results. */
9256 set_gdbarch_return_value (gdbarch, arm_return_value);
9259 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9261 /* Minsymbol frobbing. */
9262 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9263 set_gdbarch_coff_make_msymbol_special (gdbarch,
9264 arm_coff_make_msymbol_special);
9265 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9267 /* Thumb-2 IT block support. */
9268 set_gdbarch_adjust_breakpoint_address (gdbarch,
9269 arm_adjust_breakpoint_address);
9271 /* Virtual tables. */
9272 set_gdbarch_vbit_in_delta (gdbarch, 1);
9274 /* Hook in the ABI-specific overrides, if they have been registered. */
9275 gdbarch_init_osabi (info, gdbarch);
9277 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9279 /* Add some default predicates. */
9281 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9282 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9283 dwarf2_append_unwinders (gdbarch);
9284 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9285 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9287 /* Now we have tuned the configuration, set a few final things,
9288 based on what the OS ABI has told us. */
9290 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9291 binaries are always marked. */
9292 if (tdep->arm_abi == ARM_ABI_AUTO)
9293 tdep->arm_abi = ARM_ABI_APCS;
9295 /* Watchpoints are not steppable. */
9296 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9298 /* We used to default to FPA for generic ARM, but almost nobody
9299 uses that now, and we now provide a way for the user to force
9300 the model. So default to the most useful variant. */
9301 if (tdep->fp_model == ARM_FLOAT_AUTO)
9302 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9304 if (tdep->jb_pc >= 0)
9305 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9307 /* Floating point sizes and format. */
9308 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9309 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9311 set_gdbarch_double_format
9312 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9313 set_gdbarch_long_double_format
9314 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9318 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9319 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9322 if (have_vfp_pseudos)
9324 /* NOTE: These are the only pseudo registers used by
9325 the ARM target at the moment. If more are added, a
9326 little more care in numbering will be needed. */
9328 int num_pseudos = 32;
9329 if (have_neon_pseudos)
9331 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9332 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9333 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9338 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9340 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9342 /* Override tdesc_register_type to adjust the types of VFP
9343 registers for NEON. */
9344 set_gdbarch_register_type (gdbarch, arm_register_type);
9347 /* Add standard register aliases. We add aliases even for those
9348 nanes which are used by the current architecture - it's simpler,
9349 and does no harm, since nothing ever lists user registers. */
9350 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9351 user_reg_add (gdbarch, arm_register_aliases[i].name,
9352 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9358 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9360 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9365 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9366 (unsigned long) tdep->lowest_pc);
9369 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9372 _initialize_arm_tdep (void)
9374 struct ui_file *stb;
9376 const char *setname;
9377 const char *setdesc;
9378 const char *const *regnames;
9380 static char *helptext;
9381 char regdesc[1024], *rdptr = regdesc;
9382 size_t rest = sizeof (regdesc);
9384 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9386 arm_objfile_data_key
9387 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9389 /* Add ourselves to objfile event chain. */
9390 observer_attach_new_objfile (arm_exidx_new_objfile);
9392 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9394 /* Register an ELF OS ABI sniffer for ARM binaries. */
9395 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9396 bfd_target_elf_flavour,
9397 arm_elf_osabi_sniffer);
9399 /* Initialize the standard target descriptions. */
9400 initialize_tdesc_arm_with_m ();
9401 initialize_tdesc_arm_with_m_fpa_layout ();
9402 initialize_tdesc_arm_with_m_vfp_d16 ();
9403 initialize_tdesc_arm_with_iwmmxt ();
9404 initialize_tdesc_arm_with_vfpv2 ();
9405 initialize_tdesc_arm_with_vfpv3 ();
9406 initialize_tdesc_arm_with_neon ();
9408 /* Get the number of possible sets of register names defined in opcodes. */
9409 num_disassembly_options = get_arm_regname_num_options ();
9411 /* Add root prefix command for all "set arm"/"show arm" commands. */
9412 add_prefix_cmd ("arm", no_class, set_arm_command,
9413 _("Various ARM-specific commands."),
9414 &setarmcmdlist, "set arm ", 0, &setlist);
9416 add_prefix_cmd ("arm", no_class, show_arm_command,
9417 _("Various ARM-specific commands."),
9418 &showarmcmdlist, "show arm ", 0, &showlist);
9420 /* Sync the opcode insn printer with our register viewer. */
9421 parse_arm_disassembler_option ("reg-names-std");
9423 /* Initialize the array that will be passed to
9424 add_setshow_enum_cmd(). */
9425 valid_disassembly_styles = XNEWVEC (const char *,
9426 num_disassembly_options + 1);
9427 for (i = 0; i < num_disassembly_options; i++)
9429 get_arm_regnames (i, &setname, &setdesc, ®names);
9430 valid_disassembly_styles[i] = setname;
9431 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9434 /* When we find the default names, tell the disassembler to use
9436 if (!strcmp (setname, "std"))
9438 disassembly_style = setname;
9439 set_arm_regname_option (i);
9442 /* Mark the end of valid options. */
9443 valid_disassembly_styles[num_disassembly_options] = NULL;
9445 /* Create the help text. */
9446 stb = mem_fileopen ();
9447 fprintf_unfiltered (stb, "%s%s%s",
9448 _("The valid values are:\n"),
9450 _("The default is \"std\"."));
9451 helptext = ui_file_xstrdup (stb, NULL);
9452 ui_file_delete (stb);
9454 add_setshow_enum_cmd("disassembler", no_class,
9455 valid_disassembly_styles, &disassembly_style,
9456 _("Set the disassembly style."),
9457 _("Show the disassembly style."),
9459 set_disassembly_style_sfunc,
9460 NULL, /* FIXME: i18n: The disassembly style is
9462 &setarmcmdlist, &showarmcmdlist);
9464 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9465 _("Set usage of ARM 32-bit mode."),
9466 _("Show usage of ARM 32-bit mode."),
9467 _("When off, a 26-bit PC will be used."),
9469 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9471 &setarmcmdlist, &showarmcmdlist);
9473 /* Add a command to allow the user to force the FPU model. */
9474 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9475 _("Set the floating point type."),
9476 _("Show the floating point type."),
9477 _("auto - Determine the FP typefrom the OS-ABI.\n\
9478 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9479 fpa - FPA co-processor (GCC compiled).\n\
9480 softvfp - Software FP with pure-endian doubles.\n\
9481 vfp - VFP co-processor."),
9482 set_fp_model_sfunc, show_fp_model,
9483 &setarmcmdlist, &showarmcmdlist);
9485 /* Add a command to allow the user to force the ABI. */
9486 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9489 NULL, arm_set_abi, arm_show_abi,
9490 &setarmcmdlist, &showarmcmdlist);
9492 /* Add two commands to allow the user to force the assumed
9494 add_setshow_enum_cmd ("fallback-mode", class_support,
9495 arm_mode_strings, &arm_fallback_mode_string,
9496 _("Set the mode assumed when symbols are unavailable."),
9497 _("Show the mode assumed when symbols are unavailable."),
9498 NULL, NULL, arm_show_fallback_mode,
9499 &setarmcmdlist, &showarmcmdlist);
9500 add_setshow_enum_cmd ("force-mode", class_support,
9501 arm_mode_strings, &arm_force_mode_string,
9502 _("Set the mode assumed even when symbols are available."),
9503 _("Show the mode assumed even when symbols are available."),
9504 NULL, NULL, arm_show_force_mode,
9505 &setarmcmdlist, &showarmcmdlist);
9507 /* Debugging flag. */
9508 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9509 _("Set ARM debugging."),
9510 _("Show ARM debugging."),
9511 _("When on, arm-specific debugging is enabled."),
9513 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9514 &setdebuglist, &showdebuglist);
9517 /* ARM-reversible process record data structures. */
9519 #define ARM_INSN_SIZE_BYTES 4
9520 #define THUMB_INSN_SIZE_BYTES 2
9521 #define THUMB2_INSN_SIZE_BYTES 4
9524 /* Position of the bit within a 32-bit ARM instruction
9525 that defines whether the instruction is a load or store. */
9526 #define INSN_S_L_BIT_NUM 20
9528 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9531 unsigned int reg_len = LENGTH; \
9534 REGS = XNEWVEC (uint32_t, reg_len); \
9535 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9540 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9543 unsigned int mem_len = LENGTH; \
9546 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9547 memcpy(&MEMS->len, &RECORD_BUF[0], \
9548 sizeof(struct arm_mem_r) * LENGTH); \
9553 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9554 #define INSN_RECORDED(ARM_RECORD) \
9555 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9557 /* ARM memory record structure. */
9560 uint32_t len; /* Record length. */
9561 uint32_t addr; /* Memory address. */
9564 /* ARM instruction record contains opcode of current insn
9565 and execution state (before entry to decode_insn()),
9566 contains list of to-be-modified registers and
9567 memory blocks (on return from decode_insn()). */
9569 typedef struct insn_decode_record_t
9571 struct gdbarch *gdbarch;
9572 struct regcache *regcache;
9573 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9574 uint32_t arm_insn; /* Should accommodate thumb. */
9575 uint32_t cond; /* Condition code. */
9576 uint32_t opcode; /* Insn opcode. */
9577 uint32_t decode; /* Insn decode bits. */
9578 uint32_t mem_rec_count; /* No of mem records. */
9579 uint32_t reg_rec_count; /* No of reg records. */
9580 uint32_t *arm_regs; /* Registers to be saved for this record. */
9581 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9582 } insn_decode_record;
9585 /* Checks ARM SBZ and SBO mandatory fields. */
9588 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9590 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9609 enum arm_record_result
9611 ARM_RECORD_SUCCESS = 0,
9612 ARM_RECORD_FAILURE = 1
9619 } arm_record_strx_t;
9630 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9631 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9634 struct regcache *reg_cache = arm_insn_r->regcache;
9635 ULONGEST u_regval[2]= {0};
9637 uint32_t reg_src1 = 0, reg_src2 = 0;
9638 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9640 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9641 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9643 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9645 /* 1) Handle misc store, immediate offset. */
9646 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9647 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9648 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9649 regcache_raw_read_unsigned (reg_cache, reg_src1,
9651 if (ARM_PC_REGNUM == reg_src1)
9653 /* If R15 was used as Rn, hence current PC+8. */
9654 u_regval[0] = u_regval[0] + 8;
9656 offset_8 = (immed_high << 4) | immed_low;
9657 /* Calculate target store address. */
9658 if (14 == arm_insn_r->opcode)
9660 tgt_mem_addr = u_regval[0] + offset_8;
9664 tgt_mem_addr = u_regval[0] - offset_8;
9666 if (ARM_RECORD_STRH == str_type)
9668 record_buf_mem[0] = 2;
9669 record_buf_mem[1] = tgt_mem_addr;
9670 arm_insn_r->mem_rec_count = 1;
9672 else if (ARM_RECORD_STRD == str_type)
9674 record_buf_mem[0] = 4;
9675 record_buf_mem[1] = tgt_mem_addr;
9676 record_buf_mem[2] = 4;
9677 record_buf_mem[3] = tgt_mem_addr + 4;
9678 arm_insn_r->mem_rec_count = 2;
9681 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9683 /* 2) Store, register offset. */
9685 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9687 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9688 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9689 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9692 /* If R15 was used as Rn, hence current PC+8. */
9693 u_regval[0] = u_regval[0] + 8;
9695 /* Calculate target store address, Rn +/- Rm, register offset. */
9696 if (12 == arm_insn_r->opcode)
9698 tgt_mem_addr = u_regval[0] + u_regval[1];
9702 tgt_mem_addr = u_regval[1] - u_regval[0];
9704 if (ARM_RECORD_STRH == str_type)
9706 record_buf_mem[0] = 2;
9707 record_buf_mem[1] = tgt_mem_addr;
9708 arm_insn_r->mem_rec_count = 1;
9710 else if (ARM_RECORD_STRD == str_type)
9712 record_buf_mem[0] = 4;
9713 record_buf_mem[1] = tgt_mem_addr;
9714 record_buf_mem[2] = 4;
9715 record_buf_mem[3] = tgt_mem_addr + 4;
9716 arm_insn_r->mem_rec_count = 2;
9719 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9720 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9722 /* 3) Store, immediate pre-indexed. */
9723 /* 5) Store, immediate post-indexed. */
9724 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9725 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9726 offset_8 = (immed_high << 4) | immed_low;
9727 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9728 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9729 /* Calculate target store address, Rn +/- Rm, register offset. */
9730 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9732 tgt_mem_addr = u_regval[0] + offset_8;
9736 tgt_mem_addr = u_regval[0] - offset_8;
9738 if (ARM_RECORD_STRH == str_type)
9740 record_buf_mem[0] = 2;
9741 record_buf_mem[1] = tgt_mem_addr;
9742 arm_insn_r->mem_rec_count = 1;
9744 else if (ARM_RECORD_STRD == str_type)
9746 record_buf_mem[0] = 4;
9747 record_buf_mem[1] = tgt_mem_addr;
9748 record_buf_mem[2] = 4;
9749 record_buf_mem[3] = tgt_mem_addr + 4;
9750 arm_insn_r->mem_rec_count = 2;
9752 /* Record Rn also as it changes. */
9753 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9754 arm_insn_r->reg_rec_count = 1;
9756 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9757 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9759 /* 4) Store, register pre-indexed. */
9760 /* 6) Store, register post -indexed. */
9761 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9762 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9763 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9764 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9765 /* Calculate target store address, Rn +/- Rm, register offset. */
9766 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9768 tgt_mem_addr = u_regval[0] + u_regval[1];
9772 tgt_mem_addr = u_regval[1] - u_regval[0];
9774 if (ARM_RECORD_STRH == str_type)
9776 record_buf_mem[0] = 2;
9777 record_buf_mem[1] = tgt_mem_addr;
9778 arm_insn_r->mem_rec_count = 1;
9780 else if (ARM_RECORD_STRD == str_type)
9782 record_buf_mem[0] = 4;
9783 record_buf_mem[1] = tgt_mem_addr;
9784 record_buf_mem[2] = 4;
9785 record_buf_mem[3] = tgt_mem_addr + 4;
9786 arm_insn_r->mem_rec_count = 2;
9788 /* Record Rn also as it changes. */
9789 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9790 arm_insn_r->reg_rec_count = 1;
9795 /* Handling ARM extension space insns. */
9798 arm_record_extension_space (insn_decode_record *arm_insn_r)
9800 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9801 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9802 uint32_t record_buf[8], record_buf_mem[8];
9803 uint32_t reg_src1 = 0;
9804 struct regcache *reg_cache = arm_insn_r->regcache;
9805 ULONGEST u_regval = 0;
9807 gdb_assert (!INSN_RECORDED(arm_insn_r));
9808 /* Handle unconditional insn extension space. */
9810 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9811 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9812 if (arm_insn_r->cond)
9814 /* PLD has no affect on architectural state, it just affects
9816 if (5 == ((opcode1 & 0xE0) >> 5))
9819 record_buf[0] = ARM_PS_REGNUM;
9820 record_buf[1] = ARM_LR_REGNUM;
9821 arm_insn_r->reg_rec_count = 2;
9823 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9827 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9828 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9831 /* Undefined instruction on ARM V5; need to handle if later
9832 versions define it. */
9835 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9836 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9837 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9839 /* Handle arithmetic insn extension space. */
9840 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9841 && !INSN_RECORDED(arm_insn_r))
9843 /* Handle MLA(S) and MUL(S). */
9844 if (0 <= insn_op1 && 3 >= insn_op1)
9846 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9847 record_buf[1] = ARM_PS_REGNUM;
9848 arm_insn_r->reg_rec_count = 2;
9850 else if (4 <= insn_op1 && 15 >= insn_op1)
9852 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9853 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9854 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9855 record_buf[2] = ARM_PS_REGNUM;
9856 arm_insn_r->reg_rec_count = 3;
9860 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9861 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9862 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9864 /* Handle control insn extension space. */
9866 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9867 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9869 if (!bit (arm_insn_r->arm_insn,25))
9871 if (!bits (arm_insn_r->arm_insn, 4, 7))
9873 if ((0 == insn_op1) || (2 == insn_op1))
9876 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9877 arm_insn_r->reg_rec_count = 1;
9879 else if (1 == insn_op1)
9881 /* CSPR is going to be changed. */
9882 record_buf[0] = ARM_PS_REGNUM;
9883 arm_insn_r->reg_rec_count = 1;
9885 else if (3 == insn_op1)
9887 /* SPSR is going to be changed. */
9888 /* We need to get SPSR value, which is yet to be done. */
9889 printf_unfiltered (_("Process record does not support "
9890 "instruction 0x%0x at address %s.\n"),
9891 arm_insn_r->arm_insn,
9892 paddress (arm_insn_r->gdbarch,
9893 arm_insn_r->this_addr));
9897 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9902 record_buf[0] = ARM_PS_REGNUM;
9903 arm_insn_r->reg_rec_count = 1;
9905 else if (3 == insn_op1)
9908 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9909 arm_insn_r->reg_rec_count = 1;
9912 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9915 record_buf[0] = ARM_PS_REGNUM;
9916 record_buf[1] = ARM_LR_REGNUM;
9917 arm_insn_r->reg_rec_count = 2;
9919 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9921 /* QADD, QSUB, QDADD, QDSUB */
9922 record_buf[0] = ARM_PS_REGNUM;
9923 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9924 arm_insn_r->reg_rec_count = 2;
9926 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9929 record_buf[0] = ARM_PS_REGNUM;
9930 record_buf[1] = ARM_LR_REGNUM;
9931 arm_insn_r->reg_rec_count = 2;
9933 /* Save SPSR also;how? */
9934 printf_unfiltered (_("Process record does not support "
9935 "instruction 0x%0x at address %s.\n"),
9936 arm_insn_r->arm_insn,
9937 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
9940 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9941 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9942 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9943 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9946 if (0 == insn_op1 || 1 == insn_op1)
9948 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9949 /* We dont do optimization for SMULW<y> where we
9951 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9952 record_buf[1] = ARM_PS_REGNUM;
9953 arm_insn_r->reg_rec_count = 2;
9955 else if (2 == insn_op1)
9958 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9959 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9960 arm_insn_r->reg_rec_count = 2;
9962 else if (3 == insn_op1)
9965 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9966 arm_insn_r->reg_rec_count = 1;
9972 /* MSR : immediate form. */
9975 /* CSPR is going to be changed. */
9976 record_buf[0] = ARM_PS_REGNUM;
9977 arm_insn_r->reg_rec_count = 1;
9979 else if (3 == insn_op1)
9981 /* SPSR is going to be changed. */
9982 /* we need to get SPSR value, which is yet to be done */
9983 printf_unfiltered (_("Process record does not support "
9984 "instruction 0x%0x at address %s.\n"),
9985 arm_insn_r->arm_insn,
9986 paddress (arm_insn_r->gdbarch,
9987 arm_insn_r->this_addr));
9993 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9994 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
9995 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
9997 /* Handle load/store insn extension space. */
9999 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10000 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10001 && !INSN_RECORDED(arm_insn_r))
10006 /* These insn, changes register and memory as well. */
10007 /* SWP or SWPB insn. */
10008 /* Get memory address given by Rn. */
10009 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10010 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10011 /* SWP insn ?, swaps word. */
10012 if (8 == arm_insn_r->opcode)
10014 record_buf_mem[0] = 4;
10018 /* SWPB insn, swaps only byte. */
10019 record_buf_mem[0] = 1;
10021 record_buf_mem[1] = u_regval;
10022 arm_insn_r->mem_rec_count = 1;
10023 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10024 arm_insn_r->reg_rec_count = 1;
10026 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10029 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10032 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10036 record_buf[1] = record_buf[0] + 1;
10037 arm_insn_r->reg_rec_count = 2;
10039 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10042 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10045 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10047 /* LDRH, LDRSB, LDRSH. */
10048 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10049 arm_insn_r->reg_rec_count = 1;
10054 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10055 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10056 && !INSN_RECORDED(arm_insn_r))
10059 /* Handle coprocessor insn extension space. */
10062 /* To be done for ARMv5 and later; as of now we return -1. */
10064 printf_unfiltered (_("Process record does not support instruction x%0x "
10065 "at address %s.\n"),arm_insn_r->arm_insn,
10066 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10069 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10070 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10075 /* Handling opcode 000 insns. */
10078 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10080 struct regcache *reg_cache = arm_insn_r->regcache;
10081 uint32_t record_buf[8], record_buf_mem[8];
10082 ULONGEST u_regval[2] = {0};
10084 uint32_t reg_src1 = 0, reg_dest = 0;
10085 uint32_t opcode1 = 0;
10087 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10088 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10089 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10091 /* Data processing insn /multiply insn. */
10092 if (9 == arm_insn_r->decode
10093 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10094 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10096 /* Handle multiply instructions. */
10097 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10098 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10100 /* Handle MLA and MUL. */
10101 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10102 record_buf[1] = ARM_PS_REGNUM;
10103 arm_insn_r->reg_rec_count = 2;
10105 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10107 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10109 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10110 record_buf[2] = ARM_PS_REGNUM;
10111 arm_insn_r->reg_rec_count = 3;
10114 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10115 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10117 /* Handle misc load insns, as 20th bit (L = 1). */
10118 /* LDR insn has a capability to do branching, if
10119 MOV LR, PC is precceded by LDR insn having Rn as R15
10120 in that case, it emulates branch and link insn, and hence we
10121 need to save CSPR and PC as well. I am not sure this is right
10122 place; as opcode = 010 LDR insn make this happen, if R15 was
10124 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10125 if (15 != reg_dest)
10127 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10128 arm_insn_r->reg_rec_count = 1;
10132 record_buf[0] = reg_dest;
10133 record_buf[1] = ARM_PS_REGNUM;
10134 arm_insn_r->reg_rec_count = 2;
10137 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10138 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10139 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10140 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10142 /* Handle MSR insn. */
10143 if (9 == arm_insn_r->opcode)
10145 /* CSPR is going to be changed. */
10146 record_buf[0] = ARM_PS_REGNUM;
10147 arm_insn_r->reg_rec_count = 1;
10151 /* SPSR is going to be changed. */
10152 /* How to read SPSR value? */
10153 printf_unfiltered (_("Process record does not support instruction "
10154 "0x%0x at address %s.\n"),
10155 arm_insn_r->arm_insn,
10156 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10160 else if (9 == arm_insn_r->decode
10161 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10162 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10164 /* Handling SWP, SWPB. */
10165 /* These insn, changes register and memory as well. */
10166 /* SWP or SWPB insn. */
10168 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10169 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10170 /* SWP insn ?, swaps word. */
10171 if (8 == arm_insn_r->opcode)
10173 record_buf_mem[0] = 4;
10177 /* SWPB insn, swaps only byte. */
10178 record_buf_mem[0] = 1;
10180 record_buf_mem[1] = u_regval[0];
10181 arm_insn_r->mem_rec_count = 1;
10182 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10183 arm_insn_r->reg_rec_count = 1;
10185 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10186 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10188 /* Handle BLX, branch and link/exchange. */
10189 if (9 == arm_insn_r->opcode)
10191 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10192 and R14 stores the return address. */
10193 record_buf[0] = ARM_PS_REGNUM;
10194 record_buf[1] = ARM_LR_REGNUM;
10195 arm_insn_r->reg_rec_count = 2;
10198 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10200 /* Handle enhanced software breakpoint insn, BKPT. */
10201 /* CPSR is changed to be executed in ARM state, disabling normal
10202 interrupts, entering abort mode. */
10203 /* According to high vector configuration PC is set. */
10204 /* user hit breakpoint and type reverse, in
10205 that case, we need to go back with previous CPSR and
10206 Program Counter. */
10207 record_buf[0] = ARM_PS_REGNUM;
10208 record_buf[1] = ARM_LR_REGNUM;
10209 arm_insn_r->reg_rec_count = 2;
10211 /* Save SPSR also; how? */
10212 printf_unfiltered (_("Process record does not support instruction "
10213 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10214 paddress (arm_insn_r->gdbarch,
10215 arm_insn_r->this_addr));
10218 else if (11 == arm_insn_r->decode
10219 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10221 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10223 /* Handle str(x) insn */
10224 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10227 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10228 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10230 /* Handle BX, branch and link/exchange. */
10231 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10232 record_buf[0] = ARM_PS_REGNUM;
10233 arm_insn_r->reg_rec_count = 1;
10235 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10236 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10237 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10239 /* Count leading zeros: CLZ. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10241 arm_insn_r->reg_rec_count = 1;
10243 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10244 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10245 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10246 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10249 /* Handle MRS insn. */
10250 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10251 arm_insn_r->reg_rec_count = 1;
10253 else if (arm_insn_r->opcode <= 15)
10255 /* Normal data processing insns. */
10256 /* Out of 11 shifter operands mode, all the insn modifies destination
10257 register, which is specified by 13-16 decode. */
10258 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10259 record_buf[1] = ARM_PS_REGNUM;
10260 arm_insn_r->reg_rec_count = 2;
10267 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10268 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10272 /* Handling opcode 001 insns. */
10275 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10277 uint32_t record_buf[8], record_buf_mem[8];
10279 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10280 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10282 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10283 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10284 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10287 /* Handle MSR insn. */
10288 if (9 == arm_insn_r->opcode)
10290 /* CSPR is going to be changed. */
10291 record_buf[0] = ARM_PS_REGNUM;
10292 arm_insn_r->reg_rec_count = 1;
10296 /* SPSR is going to be changed. */
10299 else if (arm_insn_r->opcode <= 15)
10301 /* Normal data processing insns. */
10302 /* Out of 11 shifter operands mode, all the insn modifies destination
10303 register, which is specified by 13-16 decode. */
10304 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10305 record_buf[1] = ARM_PS_REGNUM;
10306 arm_insn_r->reg_rec_count = 2;
10313 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10314 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10318 /* Handle ARM mode instructions with opcode 010. */
10321 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10323 struct regcache *reg_cache = arm_insn_r->regcache;
10325 uint32_t reg_base , reg_dest;
10326 uint32_t offset_12, tgt_mem_addr;
10327 uint32_t record_buf[8], record_buf_mem[8];
10328 unsigned char wback;
10331 /* Calculate wback. */
10332 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10333 || (bit (arm_insn_r->arm_insn, 21) == 1);
10335 arm_insn_r->reg_rec_count = 0;
10336 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10338 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10340 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10343 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10344 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10346 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10347 preceeds a LDR instruction having R15 as reg_base, it
10348 emulates a branch and link instruction, and hence we need to save
10349 CPSR and PC as well. */
10350 if (ARM_PC_REGNUM == reg_dest)
10351 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10353 /* If wback is true, also save the base register, which is going to be
10356 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10360 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10362 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10363 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10365 /* Handle bit U. */
10366 if (bit (arm_insn_r->arm_insn, 23))
10368 /* U == 1: Add the offset. */
10369 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10373 /* U == 0: subtract the offset. */
10374 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10377 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10379 if (bit (arm_insn_r->arm_insn, 22))
10381 /* STRB and STRBT: 1 byte. */
10382 record_buf_mem[0] = 1;
10386 /* STR and STRT: 4 bytes. */
10387 record_buf_mem[0] = 4;
10390 /* Handle bit P. */
10391 if (bit (arm_insn_r->arm_insn, 24))
10392 record_buf_mem[1] = tgt_mem_addr;
10394 record_buf_mem[1] = (uint32_t) u_regval;
10396 arm_insn_r->mem_rec_count = 1;
10398 /* If wback is true, also save the base register, which is going to be
10401 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10404 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10405 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10409 /* Handling opcode 011 insns. */
10412 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10414 struct regcache *reg_cache = arm_insn_r->regcache;
10416 uint32_t shift_imm = 0;
10417 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10418 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10419 uint32_t record_buf[8], record_buf_mem[8];
10422 ULONGEST u_regval[2];
10424 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10425 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10427 /* Handle enhanced store insns and LDRD DSP insn,
10428 order begins according to addressing modes for store insns
10432 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10434 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10435 /* LDR insn has a capability to do branching, if
10436 MOV LR, PC is precedded by LDR insn having Rn as R15
10437 in that case, it emulates branch and link insn, and hence we
10438 need to save CSPR and PC as well. */
10439 if (15 != reg_dest)
10441 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10442 arm_insn_r->reg_rec_count = 1;
10446 record_buf[0] = reg_dest;
10447 record_buf[1] = ARM_PS_REGNUM;
10448 arm_insn_r->reg_rec_count = 2;
10453 if (! bits (arm_insn_r->arm_insn, 4, 11))
10455 /* Store insn, register offset and register pre-indexed,
10456 register post-indexed. */
10458 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10460 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10461 regcache_raw_read_unsigned (reg_cache, reg_src1
10463 regcache_raw_read_unsigned (reg_cache, reg_src2
10465 if (15 == reg_src2)
10467 /* If R15 was used as Rn, hence current PC+8. */
10468 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10469 u_regval[0] = u_regval[0] + 8;
10471 /* Calculate target store address, Rn +/- Rm, register offset. */
10473 if (bit (arm_insn_r->arm_insn, 23))
10475 tgt_mem_addr = u_regval[0] + u_regval[1];
10479 tgt_mem_addr = u_regval[1] - u_regval[0];
10482 switch (arm_insn_r->opcode)
10496 record_buf_mem[0] = 4;
10511 record_buf_mem[0] = 1;
10515 gdb_assert_not_reached ("no decoding pattern found");
10518 record_buf_mem[1] = tgt_mem_addr;
10519 arm_insn_r->mem_rec_count = 1;
10521 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10522 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10523 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10524 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10525 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10526 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10529 /* Rn is going to be changed in pre-indexed mode and
10530 post-indexed mode as well. */
10531 record_buf[0] = reg_src2;
10532 arm_insn_r->reg_rec_count = 1;
10537 /* Store insn, scaled register offset; scaled pre-indexed. */
10538 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10540 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10542 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10543 /* Get shift_imm. */
10544 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10545 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10546 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10547 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10548 /* Offset_12 used as shift. */
10552 /* Offset_12 used as index. */
10553 offset_12 = u_regval[0] << shift_imm;
10557 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10563 if (bit (u_regval[0], 31))
10565 offset_12 = 0xFFFFFFFF;
10574 /* This is arithmetic shift. */
10575 offset_12 = s_word >> shift_imm;
10582 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10584 /* Get C flag value and shift it by 31. */
10585 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10586 | (u_regval[0]) >> 1);
10590 offset_12 = (u_regval[0] >> shift_imm) \
10592 (sizeof(uint32_t) - shift_imm));
10597 gdb_assert_not_reached ("no decoding pattern found");
10601 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10603 if (bit (arm_insn_r->arm_insn, 23))
10605 tgt_mem_addr = u_regval[1] + offset_12;
10609 tgt_mem_addr = u_regval[1] - offset_12;
10612 switch (arm_insn_r->opcode)
10626 record_buf_mem[0] = 4;
10641 record_buf_mem[0] = 1;
10645 gdb_assert_not_reached ("no decoding pattern found");
10648 record_buf_mem[1] = tgt_mem_addr;
10649 arm_insn_r->mem_rec_count = 1;
10651 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10652 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10653 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10654 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10655 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10656 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10659 /* Rn is going to be changed in register scaled pre-indexed
10660 mode,and scaled post indexed mode. */
10661 record_buf[0] = reg_src2;
10662 arm_insn_r->reg_rec_count = 1;
10667 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10668 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10672 /* Handle ARM mode instructions with opcode 100. */
10675 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10677 struct regcache *reg_cache = arm_insn_r->regcache;
10678 uint32_t register_count = 0, register_bits;
10679 uint32_t reg_base, addr_mode;
10680 uint32_t record_buf[24], record_buf_mem[48];
10684 /* Fetch the list of registers. */
10685 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10686 arm_insn_r->reg_rec_count = 0;
10688 /* Fetch the base register that contains the address we are loading data
10690 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10692 /* Calculate wback. */
10693 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10695 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10697 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10699 /* Find out which registers are going to be loaded from memory. */
10700 while (register_bits)
10702 if (register_bits & 0x00000001)
10703 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10704 register_bits = register_bits >> 1;
10709 /* If wback is true, also save the base register, which is going to be
10712 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10714 /* Save the CPSR register. */
10715 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10719 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10721 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10723 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10725 /* Find out how many registers are going to be stored to memory. */
10726 while (register_bits)
10728 if (register_bits & 0x00000001)
10730 register_bits = register_bits >> 1;
10735 /* STMDA (STMED): Decrement after. */
10737 record_buf_mem[1] = (uint32_t) u_regval
10738 - register_count * INT_REGISTER_SIZE + 4;
10740 /* STM (STMIA, STMEA): Increment after. */
10742 record_buf_mem[1] = (uint32_t) u_regval;
10744 /* STMDB (STMFD): Decrement before. */
10746 record_buf_mem[1] = (uint32_t) u_regval
10747 - register_count * INT_REGISTER_SIZE;
10749 /* STMIB (STMFA): Increment before. */
10751 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10754 gdb_assert_not_reached ("no decoding pattern found");
10758 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10759 arm_insn_r->mem_rec_count = 1;
10761 /* If wback is true, also save the base register, which is going to be
10764 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10767 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10768 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10772 /* Handling opcode 101 insns. */
10775 arm_record_b_bl (insn_decode_record *arm_insn_r)
10777 uint32_t record_buf[8];
10779 /* Handle B, BL, BLX(1) insns. */
10780 /* B simply branches so we do nothing here. */
10781 /* Note: BLX(1) doesnt fall here but instead it falls into
10782 extension space. */
10783 if (bit (arm_insn_r->arm_insn, 24))
10785 record_buf[0] = ARM_LR_REGNUM;
10786 arm_insn_r->reg_rec_count = 1;
10789 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10794 /* Handling opcode 110 insns. */
10797 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10799 printf_unfiltered (_("Process record does not support instruction "
10800 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10801 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10806 /* Record handler for vector data transfer instructions. */
10809 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10811 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10812 uint32_t record_buf[4];
10814 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10815 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10816 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10817 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10818 bit_l = bit (arm_insn_r->arm_insn, 20);
10819 bit_c = bit (arm_insn_r->arm_insn, 8);
10821 /* Handle VMOV instruction. */
10822 if (bit_l && bit_c)
10824 record_buf[0] = reg_t;
10825 arm_insn_r->reg_rec_count = 1;
10827 else if (bit_l && !bit_c)
10829 /* Handle VMOV instruction. */
10830 if (bits_a == 0x00)
10832 if (bit (arm_insn_r->arm_insn, 20))
10833 record_buf[0] = reg_t;
10835 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10838 arm_insn_r->reg_rec_count = 1;
10840 /* Handle VMRS instruction. */
10841 else if (bits_a == 0x07)
10844 reg_t = ARM_PS_REGNUM;
10846 record_buf[0] = reg_t;
10847 arm_insn_r->reg_rec_count = 1;
10850 else if (!bit_l && !bit_c)
10852 /* Handle VMOV instruction. */
10853 if (bits_a == 0x00)
10855 if (bit (arm_insn_r->arm_insn, 20))
10856 record_buf[0] = reg_t;
10858 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10861 arm_insn_r->reg_rec_count = 1;
10863 /* Handle VMSR instruction. */
10864 else if (bits_a == 0x07)
10866 record_buf[0] = ARM_FPSCR_REGNUM;
10867 arm_insn_r->reg_rec_count = 1;
10870 else if (!bit_l && bit_c)
10872 /* Handle VMOV instruction. */
10873 if (!(bits_a & 0x04))
10875 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
10877 arm_insn_r->reg_rec_count = 1;
10879 /* Handle VDUP instruction. */
10882 if (bit (arm_insn_r->arm_insn, 21))
10884 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10885 record_buf[0] = reg_v + ARM_D0_REGNUM;
10886 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
10887 arm_insn_r->reg_rec_count = 2;
10891 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10892 record_buf[0] = reg_v + ARM_D0_REGNUM;
10893 arm_insn_r->reg_rec_count = 1;
10898 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10902 /* Record handler for extension register load/store instructions. */
10905 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
10907 uint32_t opcode, single_reg;
10908 uint8_t op_vldm_vstm;
10909 uint32_t record_buf[8], record_buf_mem[128];
10910 ULONGEST u_regval = 0;
10912 struct regcache *reg_cache = arm_insn_r->regcache;
10913 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10915 opcode = bits (arm_insn_r->arm_insn, 20, 24);
10916 single_reg = bit (arm_insn_r->arm_insn, 8);
10917 op_vldm_vstm = opcode & 0x1b;
10919 /* Handle VMOV instructions. */
10920 if ((opcode & 0x1e) == 0x04)
10922 if (bit (arm_insn_r->arm_insn, 4))
10924 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10925 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10926 arm_insn_r->reg_rec_count = 2;
10930 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
10931 | bit (arm_insn_r->arm_insn, 5);
10935 record_buf[0] = num_regs + reg_m;
10936 record_buf[1] = num_regs + reg_m + 1;
10937 arm_insn_r->reg_rec_count = 2;
10941 record_buf[0] = reg_m + ARM_D0_REGNUM;
10942 arm_insn_r->reg_rec_count = 1;
10946 /* Handle VSTM and VPUSH instructions. */
10947 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
10948 || op_vldm_vstm == 0x12)
10950 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
10951 uint32_t memory_index = 0;
10953 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
10954 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
10955 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
10956 imm_off32 = imm_off8 << 24;
10957 memory_count = imm_off8;
10959 if (bit (arm_insn_r->arm_insn, 23))
10960 start_address = u_regval;
10962 start_address = u_regval - imm_off32;
10964 if (bit (arm_insn_r->arm_insn, 21))
10966 record_buf[0] = reg_rn;
10967 arm_insn_r->reg_rec_count = 1;
10970 while (memory_count > 0)
10974 record_buf_mem[memory_index] = start_address;
10975 record_buf_mem[memory_index + 1] = 4;
10976 start_address = start_address + 4;
10977 memory_index = memory_index + 2;
10981 record_buf_mem[memory_index] = start_address;
10982 record_buf_mem[memory_index + 1] = 4;
10983 record_buf_mem[memory_index + 2] = start_address + 4;
10984 record_buf_mem[memory_index + 3] = 4;
10985 start_address = start_address + 8;
10986 memory_index = memory_index + 4;
10990 arm_insn_r->mem_rec_count = (memory_index >> 1);
10992 /* Handle VLDM instructions. */
10993 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
10994 || op_vldm_vstm == 0x13)
10996 uint32_t reg_count, reg_vd;
10997 uint32_t reg_index = 0;
10999 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11000 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11003 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11005 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11007 if (bit (arm_insn_r->arm_insn, 21))
11008 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11010 while (reg_count > 0)
11013 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
11015 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11019 arm_insn_r->reg_rec_count = reg_index;
11021 /* VSTR Vector store register. */
11022 else if ((opcode & 0x13) == 0x10)
11024 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11025 uint32_t memory_index = 0;
11027 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11028 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11029 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11030 imm_off32 = imm_off8 << 24;
11032 if (bit (arm_insn_r->arm_insn, 23))
11033 start_address = u_regval + imm_off32;
11035 start_address = u_regval - imm_off32;
11039 record_buf_mem[memory_index] = start_address;
11040 record_buf_mem[memory_index + 1] = 4;
11041 arm_insn_r->mem_rec_count = 1;
11045 record_buf_mem[memory_index] = start_address;
11046 record_buf_mem[memory_index + 1] = 4;
11047 record_buf_mem[memory_index + 2] = start_address + 4;
11048 record_buf_mem[memory_index + 3] = 4;
11049 arm_insn_r->mem_rec_count = 2;
11052 /* VLDR Vector load register. */
11053 else if ((opcode & 0x13) == 0x11)
11055 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11059 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11060 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11064 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11065 record_buf[0] = num_regs + reg_vd;
11067 arm_insn_r->reg_rec_count = 1;
11070 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11071 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11075 /* Record handler for arm/thumb mode VFP data processing instructions. */
11078 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11080 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11081 uint32_t record_buf[4];
11082 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11083 enum insn_types curr_insn_type = INSN_INV;
11085 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11086 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11087 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11088 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11089 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11090 bit_d = bit (arm_insn_r->arm_insn, 22);
11091 opc1 = opc1 & 0x04;
11093 /* Handle VMLA, VMLS. */
11096 if (bit (arm_insn_r->arm_insn, 10))
11098 if (bit (arm_insn_r->arm_insn, 6))
11099 curr_insn_type = INSN_T0;
11101 curr_insn_type = INSN_T1;
11106 curr_insn_type = INSN_T1;
11108 curr_insn_type = INSN_T2;
11111 /* Handle VNMLA, VNMLS, VNMUL. */
11112 else if (opc1 == 0x01)
11115 curr_insn_type = INSN_T1;
11117 curr_insn_type = INSN_T2;
11120 else if (opc1 == 0x02 && !(opc3 & 0x01))
11122 if (bit (arm_insn_r->arm_insn, 10))
11124 if (bit (arm_insn_r->arm_insn, 6))
11125 curr_insn_type = INSN_T0;
11127 curr_insn_type = INSN_T1;
11132 curr_insn_type = INSN_T1;
11134 curr_insn_type = INSN_T2;
11137 /* Handle VADD, VSUB. */
11138 else if (opc1 == 0x03)
11140 if (!bit (arm_insn_r->arm_insn, 9))
11142 if (bit (arm_insn_r->arm_insn, 6))
11143 curr_insn_type = INSN_T0;
11145 curr_insn_type = INSN_T1;
11150 curr_insn_type = INSN_T1;
11152 curr_insn_type = INSN_T2;
11156 else if (opc1 == 0x0b)
11159 curr_insn_type = INSN_T1;
11161 curr_insn_type = INSN_T2;
11163 /* Handle all other vfp data processing instructions. */
11164 else if (opc1 == 0x0b)
11167 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11169 if (bit (arm_insn_r->arm_insn, 4))
11171 if (bit (arm_insn_r->arm_insn, 6))
11172 curr_insn_type = INSN_T0;
11174 curr_insn_type = INSN_T1;
11179 curr_insn_type = INSN_T1;
11181 curr_insn_type = INSN_T2;
11184 /* Handle VNEG and VABS. */
11185 else if ((opc2 == 0x01 && opc3 == 0x01)
11186 || (opc2 == 0x00 && opc3 == 0x03))
11188 if (!bit (arm_insn_r->arm_insn, 11))
11190 if (bit (arm_insn_r->arm_insn, 6))
11191 curr_insn_type = INSN_T0;
11193 curr_insn_type = INSN_T1;
11198 curr_insn_type = INSN_T1;
11200 curr_insn_type = INSN_T2;
11203 /* Handle VSQRT. */
11204 else if (opc2 == 0x01 && opc3 == 0x03)
11207 curr_insn_type = INSN_T1;
11209 curr_insn_type = INSN_T2;
11212 else if (opc2 == 0x07 && opc3 == 0x03)
11215 curr_insn_type = INSN_T1;
11217 curr_insn_type = INSN_T2;
11219 else if (opc3 & 0x01)
11222 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11224 if (!bit (arm_insn_r->arm_insn, 18))
11225 curr_insn_type = INSN_T2;
11229 curr_insn_type = INSN_T1;
11231 curr_insn_type = INSN_T2;
11235 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11238 curr_insn_type = INSN_T1;
11240 curr_insn_type = INSN_T2;
11242 /* Handle VCVTB, VCVTT. */
11243 else if ((opc2 & 0x0e) == 0x02)
11244 curr_insn_type = INSN_T2;
11245 /* Handle VCMP, VCMPE. */
11246 else if ((opc2 & 0x0e) == 0x04)
11247 curr_insn_type = INSN_T3;
11251 switch (curr_insn_type)
11254 reg_vd = reg_vd | (bit_d << 4);
11255 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11256 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11257 arm_insn_r->reg_rec_count = 2;
11261 reg_vd = reg_vd | (bit_d << 4);
11262 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11263 arm_insn_r->reg_rec_count = 1;
11267 reg_vd = (reg_vd << 1) | bit_d;
11268 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11269 arm_insn_r->reg_rec_count = 1;
11273 record_buf[0] = ARM_FPSCR_REGNUM;
11274 arm_insn_r->reg_rec_count = 1;
11278 gdb_assert_not_reached ("no decoding pattern found");
11282 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11286 /* Handling opcode 110 insns. */
11289 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11291 uint32_t op1, op1_ebit, coproc;
11293 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11294 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11295 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11297 if ((coproc & 0x0e) == 0x0a)
11299 /* Handle extension register ld/st instructions. */
11301 return arm_record_exreg_ld_st_insn (arm_insn_r);
11303 /* 64-bit transfers between arm core and extension registers. */
11304 if ((op1 & 0x3e) == 0x04)
11305 return arm_record_exreg_ld_st_insn (arm_insn_r);
11309 /* Handle coprocessor ld/st instructions. */
11314 return arm_record_unsupported_insn (arm_insn_r);
11317 return arm_record_unsupported_insn (arm_insn_r);
11320 /* Move to coprocessor from two arm core registers. */
11322 return arm_record_unsupported_insn (arm_insn_r);
11324 /* Move to two arm core registers from coprocessor. */
11329 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11330 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11331 arm_insn_r->reg_rec_count = 2;
11333 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11337 return arm_record_unsupported_insn (arm_insn_r);
11340 /* Handling opcode 111 insns. */
11343 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11345 uint32_t op, op1_sbit, op1_ebit, coproc;
11346 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11347 struct regcache *reg_cache = arm_insn_r->regcache;
11349 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11350 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11351 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11352 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11353 op = bit (arm_insn_r->arm_insn, 4);
11355 /* Handle arm SWI/SVC system call instructions. */
11358 if (tdep->arm_syscall_record != NULL)
11360 ULONGEST svc_operand, svc_number;
11362 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11364 if (svc_operand) /* OABI. */
11365 svc_number = svc_operand - 0x900000;
11367 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11369 return tdep->arm_syscall_record (reg_cache, svc_number);
11373 printf_unfiltered (_("no syscall record support\n"));
11378 if ((coproc & 0x0e) == 0x0a)
11380 /* VFP data-processing instructions. */
11381 if (!op1_sbit && !op)
11382 return arm_record_vfp_data_proc_insn (arm_insn_r);
11384 /* Advanced SIMD, VFP instructions. */
11385 if (!op1_sbit && op)
11386 return arm_record_vdata_transfer_insn (arm_insn_r);
11390 /* Coprocessor data operations. */
11391 if (!op1_sbit && !op)
11392 return arm_record_unsupported_insn (arm_insn_r);
11394 /* Move to Coprocessor from ARM core register. */
11395 if (!op1_sbit && !op1_ebit && op)
11396 return arm_record_unsupported_insn (arm_insn_r);
11398 /* Move to arm core register from coprocessor. */
11399 if (!op1_sbit && op1_ebit && op)
11401 uint32_t record_buf[1];
11403 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11404 if (record_buf[0] == 15)
11405 record_buf[0] = ARM_PS_REGNUM;
11407 arm_insn_r->reg_rec_count = 1;
11408 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11414 return arm_record_unsupported_insn (arm_insn_r);
11417 /* Handling opcode 000 insns. */
11420 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11422 uint32_t record_buf[8];
11423 uint32_t reg_src1 = 0;
11425 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11427 record_buf[0] = ARM_PS_REGNUM;
11428 record_buf[1] = reg_src1;
11429 thumb_insn_r->reg_rec_count = 2;
11431 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11437 /* Handling opcode 001 insns. */
11440 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11442 uint32_t record_buf[8];
11443 uint32_t reg_src1 = 0;
11445 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11447 record_buf[0] = ARM_PS_REGNUM;
11448 record_buf[1] = reg_src1;
11449 thumb_insn_r->reg_rec_count = 2;
11451 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11456 /* Handling opcode 010 insns. */
11459 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11461 struct regcache *reg_cache = thumb_insn_r->regcache;
11462 uint32_t record_buf[8], record_buf_mem[8];
11464 uint32_t reg_src1 = 0, reg_src2 = 0;
11465 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11467 ULONGEST u_regval[2] = {0};
11469 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11471 if (bit (thumb_insn_r->arm_insn, 12))
11473 /* Handle load/store register offset. */
11474 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11475 if (opcode2 >= 12 && opcode2 <= 15)
11477 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11478 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11479 record_buf[0] = reg_src1;
11480 thumb_insn_r->reg_rec_count = 1;
11482 else if (opcode2 >= 8 && opcode2 <= 10)
11484 /* STR(2), STRB(2), STRH(2) . */
11485 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11486 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11487 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11488 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11490 record_buf_mem[0] = 4; /* STR (2). */
11491 else if (10 == opcode2)
11492 record_buf_mem[0] = 1; /* STRB (2). */
11493 else if (9 == opcode2)
11494 record_buf_mem[0] = 2; /* STRH (2). */
11495 record_buf_mem[1] = u_regval[0] + u_regval[1];
11496 thumb_insn_r->mem_rec_count = 1;
11499 else if (bit (thumb_insn_r->arm_insn, 11))
11501 /* Handle load from literal pool. */
11503 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11504 record_buf[0] = reg_src1;
11505 thumb_insn_r->reg_rec_count = 1;
11509 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11510 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11511 if ((3 == opcode2) && (!opcode3))
11513 /* Branch with exchange. */
11514 record_buf[0] = ARM_PS_REGNUM;
11515 thumb_insn_r->reg_rec_count = 1;
11519 /* Format 8; special data processing insns. */
11520 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11521 record_buf[0] = ARM_PS_REGNUM;
11522 record_buf[1] = reg_src1;
11523 thumb_insn_r->reg_rec_count = 2;
11528 /* Format 5; data processing insns. */
11529 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11530 if (bit (thumb_insn_r->arm_insn, 7))
11532 reg_src1 = reg_src1 + 8;
11534 record_buf[0] = ARM_PS_REGNUM;
11535 record_buf[1] = reg_src1;
11536 thumb_insn_r->reg_rec_count = 2;
11539 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11540 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11546 /* Handling opcode 001 insns. */
11549 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11551 struct regcache *reg_cache = thumb_insn_r->regcache;
11552 uint32_t record_buf[8], record_buf_mem[8];
11554 uint32_t reg_src1 = 0;
11555 uint32_t opcode = 0, immed_5 = 0;
11557 ULONGEST u_regval = 0;
11559 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11564 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11565 record_buf[0] = reg_src1;
11566 thumb_insn_r->reg_rec_count = 1;
11571 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11572 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11573 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11574 record_buf_mem[0] = 4;
11575 record_buf_mem[1] = u_regval + (immed_5 * 4);
11576 thumb_insn_r->mem_rec_count = 1;
11579 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11580 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11586 /* Handling opcode 100 insns. */
11589 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11591 struct regcache *reg_cache = thumb_insn_r->regcache;
11592 uint32_t record_buf[8], record_buf_mem[8];
11594 uint32_t reg_src1 = 0;
11595 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11597 ULONGEST u_regval = 0;
11599 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11604 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11605 record_buf[0] = reg_src1;
11606 thumb_insn_r->reg_rec_count = 1;
11608 else if (1 == opcode)
11611 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11612 record_buf[0] = reg_src1;
11613 thumb_insn_r->reg_rec_count = 1;
11615 else if (2 == opcode)
11618 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11619 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11620 record_buf_mem[0] = 4;
11621 record_buf_mem[1] = u_regval + (immed_8 * 4);
11622 thumb_insn_r->mem_rec_count = 1;
11624 else if (0 == opcode)
11627 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11628 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11629 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11630 record_buf_mem[0] = 2;
11631 record_buf_mem[1] = u_regval + (immed_5 * 2);
11632 thumb_insn_r->mem_rec_count = 1;
11635 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11636 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11642 /* Handling opcode 101 insns. */
11645 thumb_record_misc (insn_decode_record *thumb_insn_r)
11647 struct regcache *reg_cache = thumb_insn_r->regcache;
11649 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11650 uint32_t register_bits = 0, register_count = 0;
11651 uint32_t index = 0, start_address = 0;
11652 uint32_t record_buf[24], record_buf_mem[48];
11655 ULONGEST u_regval = 0;
11657 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11658 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11659 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11664 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11665 while (register_bits)
11667 if (register_bits & 0x00000001)
11668 record_buf[index++] = register_count;
11669 register_bits = register_bits >> 1;
11672 record_buf[index++] = ARM_PS_REGNUM;
11673 record_buf[index++] = ARM_SP_REGNUM;
11674 thumb_insn_r->reg_rec_count = index;
11676 else if (10 == opcode2)
11679 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11680 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11681 while (register_bits)
11683 if (register_bits & 0x00000001)
11685 register_bits = register_bits >> 1;
11687 start_address = u_regval - \
11688 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11689 thumb_insn_r->mem_rec_count = register_count;
11690 while (register_count)
11692 record_buf_mem[(register_count * 2) - 1] = start_address;
11693 record_buf_mem[(register_count * 2) - 2] = 4;
11694 start_address = start_address + 4;
11697 record_buf[0] = ARM_SP_REGNUM;
11698 thumb_insn_r->reg_rec_count = 1;
11700 else if (0x1E == opcode1)
11703 /* Handle enhanced software breakpoint insn, BKPT. */
11704 /* CPSR is changed to be executed in ARM state, disabling normal
11705 interrupts, entering abort mode. */
11706 /* According to high vector configuration PC is set. */
11707 /* User hits breakpoint and type reverse, in that case, we need to go back with
11708 previous CPSR and Program Counter. */
11709 record_buf[0] = ARM_PS_REGNUM;
11710 record_buf[1] = ARM_LR_REGNUM;
11711 thumb_insn_r->reg_rec_count = 2;
11712 /* We need to save SPSR value, which is not yet done. */
11713 printf_unfiltered (_("Process record does not support instruction "
11714 "0x%0x at address %s.\n"),
11715 thumb_insn_r->arm_insn,
11716 paddress (thumb_insn_r->gdbarch,
11717 thumb_insn_r->this_addr));
11720 else if ((0 == opcode) || (1 == opcode))
11722 /* ADD(5), ADD(6). */
11723 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11724 record_buf[0] = reg_src1;
11725 thumb_insn_r->reg_rec_count = 1;
11727 else if (2 == opcode)
11729 /* ADD(7), SUB(4). */
11730 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11731 record_buf[0] = ARM_SP_REGNUM;
11732 thumb_insn_r->reg_rec_count = 1;
11735 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11736 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11742 /* Handling opcode 110 insns. */
11745 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11747 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11748 struct regcache *reg_cache = thumb_insn_r->regcache;
11750 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11751 uint32_t reg_src1 = 0;
11752 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11753 uint32_t index = 0, start_address = 0;
11754 uint32_t record_buf[24], record_buf_mem[48];
11756 ULONGEST u_regval = 0;
11758 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11759 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11765 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11767 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11768 while (register_bits)
11770 if (register_bits & 0x00000001)
11771 record_buf[index++] = register_count;
11772 register_bits = register_bits >> 1;
11775 record_buf[index++] = reg_src1;
11776 thumb_insn_r->reg_rec_count = index;
11778 else if (0 == opcode2)
11780 /* It handles both STMIA. */
11781 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11783 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11784 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11785 while (register_bits)
11787 if (register_bits & 0x00000001)
11789 register_bits = register_bits >> 1;
11791 start_address = u_regval;
11792 thumb_insn_r->mem_rec_count = register_count;
11793 while (register_count)
11795 record_buf_mem[(register_count * 2) - 1] = start_address;
11796 record_buf_mem[(register_count * 2) - 2] = 4;
11797 start_address = start_address + 4;
11801 else if (0x1F == opcode1)
11803 /* Handle arm syscall insn. */
11804 if (tdep->arm_syscall_record != NULL)
11806 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11807 ret = tdep->arm_syscall_record (reg_cache, u_regval);
11811 printf_unfiltered (_("no syscall record support\n"));
11816 /* B (1), conditional branch is automatically taken care in process_record,
11817 as PC is saved there. */
11819 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11820 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11826 /* Handling opcode 111 insns. */
11829 thumb_record_branch (insn_decode_record *thumb_insn_r)
11831 uint32_t record_buf[8];
11832 uint32_t bits_h = 0;
11834 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
11836 if (2 == bits_h || 3 == bits_h)
11839 record_buf[0] = ARM_LR_REGNUM;
11840 thumb_insn_r->reg_rec_count = 1;
11842 else if (1 == bits_h)
11845 record_buf[0] = ARM_PS_REGNUM;
11846 record_buf[1] = ARM_LR_REGNUM;
11847 thumb_insn_r->reg_rec_count = 2;
11850 /* B(2) is automatically taken care in process_record, as PC is
11853 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11858 /* Handler for thumb2 load/store multiple instructions. */
11861 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
11863 struct regcache *reg_cache = thumb2_insn_r->regcache;
11865 uint32_t reg_rn, op;
11866 uint32_t register_bits = 0, register_count = 0;
11867 uint32_t index = 0, start_address = 0;
11868 uint32_t record_buf[24], record_buf_mem[48];
11870 ULONGEST u_regval = 0;
11872 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11873 op = bits (thumb2_insn_r->arm_insn, 23, 24);
11875 if (0 == op || 3 == op)
11877 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11879 /* Handle RFE instruction. */
11880 record_buf[0] = ARM_PS_REGNUM;
11881 thumb2_insn_r->reg_rec_count = 1;
11885 /* Handle SRS instruction after reading banked SP. */
11886 return arm_record_unsupported_insn (thumb2_insn_r);
11889 else if (1 == op || 2 == op)
11891 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11893 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
11894 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11895 while (register_bits)
11897 if (register_bits & 0x00000001)
11898 record_buf[index++] = register_count;
11901 register_bits = register_bits >> 1;
11903 record_buf[index++] = reg_rn;
11904 record_buf[index++] = ARM_PS_REGNUM;
11905 thumb2_insn_r->reg_rec_count = index;
11909 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
11910 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11911 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11912 while (register_bits)
11914 if (register_bits & 0x00000001)
11917 register_bits = register_bits >> 1;
11922 /* Start address calculation for LDMDB/LDMEA. */
11923 start_address = u_regval;
11927 /* Start address calculation for LDMDB/LDMEA. */
11928 start_address = u_regval - register_count * 4;
11931 thumb2_insn_r->mem_rec_count = register_count;
11932 while (register_count)
11934 record_buf_mem[register_count * 2 - 1] = start_address;
11935 record_buf_mem[register_count * 2 - 2] = 4;
11936 start_address = start_address + 4;
11939 record_buf[0] = reg_rn;
11940 record_buf[1] = ARM_PS_REGNUM;
11941 thumb2_insn_r->reg_rec_count = 2;
11945 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
11947 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
11949 return ARM_RECORD_SUCCESS;
11952 /* Handler for thumb2 load/store (dual/exclusive) and table branch
11956 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
11958 struct regcache *reg_cache = thumb2_insn_r->regcache;
11960 uint32_t reg_rd, reg_rn, offset_imm;
11961 uint32_t reg_dest1, reg_dest2;
11962 uint32_t address, offset_addr;
11963 uint32_t record_buf[8], record_buf_mem[8];
11964 uint32_t op1, op2, op3;
11966 ULONGEST u_regval[2];
11968 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
11969 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
11970 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
11972 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11974 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
11976 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
11977 record_buf[0] = reg_dest1;
11978 record_buf[1] = ARM_PS_REGNUM;
11979 thumb2_insn_r->reg_rec_count = 2;
11982 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
11984 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
11985 record_buf[2] = reg_dest2;
11986 thumb2_insn_r->reg_rec_count = 3;
11991 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11992 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
11994 if (0 == op1 && 0 == op2)
11996 /* Handle STREX. */
11997 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
11998 address = u_regval[0] + (offset_imm * 4);
11999 record_buf_mem[0] = 4;
12000 record_buf_mem[1] = address;
12001 thumb2_insn_r->mem_rec_count = 1;
12002 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12003 record_buf[0] = reg_rd;
12004 thumb2_insn_r->reg_rec_count = 1;
12006 else if (1 == op1 && 0 == op2)
12008 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12009 record_buf[0] = reg_rd;
12010 thumb2_insn_r->reg_rec_count = 1;
12011 address = u_regval[0];
12012 record_buf_mem[1] = address;
12016 /* Handle STREXB. */
12017 record_buf_mem[0] = 1;
12018 thumb2_insn_r->mem_rec_count = 1;
12022 /* Handle STREXH. */
12023 record_buf_mem[0] = 2 ;
12024 thumb2_insn_r->mem_rec_count = 1;
12028 /* Handle STREXD. */
12029 address = u_regval[0];
12030 record_buf_mem[0] = 4;
12031 record_buf_mem[2] = 4;
12032 record_buf_mem[3] = address + 4;
12033 thumb2_insn_r->mem_rec_count = 2;
12038 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12040 if (bit (thumb2_insn_r->arm_insn, 24))
12042 if (bit (thumb2_insn_r->arm_insn, 23))
12043 offset_addr = u_regval[0] + (offset_imm * 4);
12045 offset_addr = u_regval[0] - (offset_imm * 4);
12047 address = offset_addr;
12050 address = u_regval[0];
12052 record_buf_mem[0] = 4;
12053 record_buf_mem[1] = address;
12054 record_buf_mem[2] = 4;
12055 record_buf_mem[3] = address + 4;
12056 thumb2_insn_r->mem_rec_count = 2;
12057 record_buf[0] = reg_rn;
12058 thumb2_insn_r->reg_rec_count = 1;
12062 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12064 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12066 return ARM_RECORD_SUCCESS;
12069 /* Handler for thumb2 data processing (shift register and modified immediate)
12073 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12075 uint32_t reg_rd, op;
12076 uint32_t record_buf[8];
12078 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12079 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12081 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12083 record_buf[0] = ARM_PS_REGNUM;
12084 thumb2_insn_r->reg_rec_count = 1;
12088 record_buf[0] = reg_rd;
12089 record_buf[1] = ARM_PS_REGNUM;
12090 thumb2_insn_r->reg_rec_count = 2;
12093 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12095 return ARM_RECORD_SUCCESS;
12098 /* Generic handler for thumb2 instructions which effect destination and PS
12102 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12105 uint32_t record_buf[8];
12107 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12109 record_buf[0] = reg_rd;
12110 record_buf[1] = ARM_PS_REGNUM;
12111 thumb2_insn_r->reg_rec_count = 2;
12113 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12115 return ARM_RECORD_SUCCESS;
12118 /* Handler for thumb2 branch and miscellaneous control instructions. */
12121 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12123 uint32_t op, op1, op2;
12124 uint32_t record_buf[8];
12126 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12127 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12128 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12130 /* Handle MSR insn. */
12131 if (!(op1 & 0x2) && 0x38 == op)
12135 /* CPSR is going to be changed. */
12136 record_buf[0] = ARM_PS_REGNUM;
12137 thumb2_insn_r->reg_rec_count = 1;
12141 arm_record_unsupported_insn(thumb2_insn_r);
12145 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12148 record_buf[0] = ARM_PS_REGNUM;
12149 record_buf[1] = ARM_LR_REGNUM;
12150 thumb2_insn_r->reg_rec_count = 2;
12153 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12155 return ARM_RECORD_SUCCESS;
12158 /* Handler for thumb2 store single data item instructions. */
12161 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12163 struct regcache *reg_cache = thumb2_insn_r->regcache;
12165 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12166 uint32_t address, offset_addr;
12167 uint32_t record_buf[8], record_buf_mem[8];
12170 ULONGEST u_regval[2];
12172 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12173 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12174 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12175 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12177 if (bit (thumb2_insn_r->arm_insn, 23))
12180 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12181 offset_addr = u_regval[0] + offset_imm;
12182 address = offset_addr;
12187 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12189 /* Handle STRB (register). */
12190 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12191 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12192 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12193 offset_addr = u_regval[1] << shift_imm;
12194 address = u_regval[0] + offset_addr;
12198 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12199 if (bit (thumb2_insn_r->arm_insn, 10))
12201 if (bit (thumb2_insn_r->arm_insn, 9))
12202 offset_addr = u_regval[0] + offset_imm;
12204 offset_addr = u_regval[0] - offset_imm;
12206 address = offset_addr;
12209 address = u_regval[0];
12215 /* Store byte instructions. */
12218 record_buf_mem[0] = 1;
12220 /* Store half word instructions. */
12223 record_buf_mem[0] = 2;
12225 /* Store word instructions. */
12228 record_buf_mem[0] = 4;
12232 gdb_assert_not_reached ("no decoding pattern found");
12236 record_buf_mem[1] = address;
12237 thumb2_insn_r->mem_rec_count = 1;
12238 record_buf[0] = reg_rn;
12239 thumb2_insn_r->reg_rec_count = 1;
12241 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12243 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12245 return ARM_RECORD_SUCCESS;
12248 /* Handler for thumb2 load memory hints instructions. */
12251 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12253 uint32_t record_buf[8];
12254 uint32_t reg_rt, reg_rn;
12256 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12257 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12259 if (ARM_PC_REGNUM != reg_rt)
12261 record_buf[0] = reg_rt;
12262 record_buf[1] = reg_rn;
12263 record_buf[2] = ARM_PS_REGNUM;
12264 thumb2_insn_r->reg_rec_count = 3;
12266 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12268 return ARM_RECORD_SUCCESS;
12271 return ARM_RECORD_FAILURE;
12274 /* Handler for thumb2 load word instructions. */
12277 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12279 uint32_t record_buf[8];
12281 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12282 record_buf[1] = ARM_PS_REGNUM;
12283 thumb2_insn_r->reg_rec_count = 2;
12285 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12287 return ARM_RECORD_SUCCESS;
12290 /* Handler for thumb2 long multiply, long multiply accumulate, and
12291 divide instructions. */
12294 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12296 uint32_t opcode1 = 0, opcode2 = 0;
12297 uint32_t record_buf[8];
12299 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12300 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12302 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12304 /* Handle SMULL, UMULL, SMULAL. */
12305 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12306 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12307 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12308 record_buf[2] = ARM_PS_REGNUM;
12309 thumb2_insn_r->reg_rec_count = 3;
12311 else if (1 == opcode1 || 3 == opcode2)
12313 /* Handle SDIV and UDIV. */
12314 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12315 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12316 record_buf[2] = ARM_PS_REGNUM;
12317 thumb2_insn_r->reg_rec_count = 3;
12320 return ARM_RECORD_FAILURE;
12322 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12324 return ARM_RECORD_SUCCESS;
12327 /* Record handler for thumb32 coprocessor instructions. */
12330 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12332 if (bit (thumb2_insn_r->arm_insn, 25))
12333 return arm_record_coproc_data_proc (thumb2_insn_r);
12335 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12338 /* Record handler for advance SIMD structure load/store instructions. */
12341 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12343 struct regcache *reg_cache = thumb2_insn_r->regcache;
12344 uint32_t l_bit, a_bit, b_bits;
12345 uint32_t record_buf[128], record_buf_mem[128];
12346 uint32_t reg_rn, reg_vd, address, f_elem;
12347 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12350 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12351 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12352 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12353 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12354 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12355 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12356 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12357 f_elem = 8 / f_ebytes;
12361 ULONGEST u_regval = 0;
12362 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12363 address = u_regval;
12368 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12370 if (b_bits == 0x07)
12372 else if (b_bits == 0x0a)
12374 else if (b_bits == 0x06)
12376 else if (b_bits == 0x02)
12381 for (index_r = 0; index_r < bf_regs; index_r++)
12383 for (index_e = 0; index_e < f_elem; index_e++)
12385 record_buf_mem[index_m++] = f_ebytes;
12386 record_buf_mem[index_m++] = address;
12387 address = address + f_ebytes;
12388 thumb2_insn_r->mem_rec_count += 1;
12393 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12395 if (b_bits == 0x09 || b_bits == 0x08)
12397 else if (b_bits == 0x03)
12402 for (index_r = 0; index_r < bf_regs; index_r++)
12403 for (index_e = 0; index_e < f_elem; index_e++)
12405 for (loop_t = 0; loop_t < 2; loop_t++)
12407 record_buf_mem[index_m++] = f_ebytes;
12408 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12409 thumb2_insn_r->mem_rec_count += 1;
12411 address = address + (2 * f_ebytes);
12415 else if ((b_bits & 0x0e) == 0x04)
12417 for (index_e = 0; index_e < f_elem; index_e++)
12419 for (loop_t = 0; loop_t < 3; loop_t++)
12421 record_buf_mem[index_m++] = f_ebytes;
12422 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12423 thumb2_insn_r->mem_rec_count += 1;
12425 address = address + (3 * f_ebytes);
12429 else if (!(b_bits & 0x0e))
12431 for (index_e = 0; index_e < f_elem; index_e++)
12433 for (loop_t = 0; loop_t < 4; loop_t++)
12435 record_buf_mem[index_m++] = f_ebytes;
12436 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12437 thumb2_insn_r->mem_rec_count += 1;
12439 address = address + (4 * f_ebytes);
12445 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12447 if (bft_size == 0x00)
12449 else if (bft_size == 0x01)
12451 else if (bft_size == 0x02)
12457 if (!(b_bits & 0x0b) || b_bits == 0x08)
12458 thumb2_insn_r->mem_rec_count = 1;
12460 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12461 thumb2_insn_r->mem_rec_count = 2;
12463 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12464 thumb2_insn_r->mem_rec_count = 3;
12466 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12467 thumb2_insn_r->mem_rec_count = 4;
12469 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12471 record_buf_mem[index_m] = f_ebytes;
12472 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12481 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12482 thumb2_insn_r->reg_rec_count = 1;
12484 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12485 thumb2_insn_r->reg_rec_count = 2;
12487 else if ((b_bits & 0x0e) == 0x04)
12488 thumb2_insn_r->reg_rec_count = 3;
12490 else if (!(b_bits & 0x0e))
12491 thumb2_insn_r->reg_rec_count = 4;
12496 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12497 thumb2_insn_r->reg_rec_count = 1;
12499 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12500 thumb2_insn_r->reg_rec_count = 2;
12502 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12503 thumb2_insn_r->reg_rec_count = 3;
12505 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12506 thumb2_insn_r->reg_rec_count = 4;
12508 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12509 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12513 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12515 record_buf[index_r] = reg_rn;
12516 thumb2_insn_r->reg_rec_count += 1;
12519 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12521 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12526 /* Decodes thumb2 instruction type and invokes its record handler. */
12528 static unsigned int
12529 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12531 uint32_t op, op1, op2;
12533 op = bit (thumb2_insn_r->arm_insn, 15);
12534 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12535 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12539 if (!(op2 & 0x64 ))
12541 /* Load/store multiple instruction. */
12542 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12544 else if (!((op2 & 0x64) ^ 0x04))
12546 /* Load/store (dual/exclusive) and table branch instruction. */
12547 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12549 else if (!((op2 & 0x20) ^ 0x20))
12551 /* Data-processing (shifted register). */
12552 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12554 else if (op2 & 0x40)
12556 /* Co-processor instructions. */
12557 return thumb2_record_coproc_insn (thumb2_insn_r);
12560 else if (op1 == 0x02)
12564 /* Branches and miscellaneous control instructions. */
12565 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12567 else if (op2 & 0x20)
12569 /* Data-processing (plain binary immediate) instruction. */
12570 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12574 /* Data-processing (modified immediate). */
12575 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12578 else if (op1 == 0x03)
12580 if (!(op2 & 0x71 ))
12582 /* Store single data item. */
12583 return thumb2_record_str_single_data (thumb2_insn_r);
12585 else if (!((op2 & 0x71) ^ 0x10))
12587 /* Advanced SIMD or structure load/store instructions. */
12588 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12590 else if (!((op2 & 0x67) ^ 0x01))
12592 /* Load byte, memory hints instruction. */
12593 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12595 else if (!((op2 & 0x67) ^ 0x03))
12597 /* Load halfword, memory hints instruction. */
12598 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12600 else if (!((op2 & 0x67) ^ 0x05))
12602 /* Load word instruction. */
12603 return thumb2_record_ld_word (thumb2_insn_r);
12605 else if (!((op2 & 0x70) ^ 0x20))
12607 /* Data-processing (register) instruction. */
12608 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12610 else if (!((op2 & 0x78) ^ 0x30))
12612 /* Multiply, multiply accumulate, abs diff instruction. */
12613 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12615 else if (!((op2 & 0x78) ^ 0x38))
12617 /* Long multiply, long multiply accumulate, and divide. */
12618 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12620 else if (op2 & 0x40)
12622 /* Co-processor instructions. */
12623 return thumb2_record_coproc_insn (thumb2_insn_r);
12630 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12631 and positive val on fauilure. */
12634 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12636 gdb_byte buf[insn_size];
12638 memset (&buf[0], 0, insn_size);
12640 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12642 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12644 gdbarch_byte_order_for_code (insn_record->gdbarch));
12648 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12650 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12654 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12655 uint32_t insn_size)
12658 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
12659 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12661 arm_record_data_proc_misc_ld_str, /* 000. */
12662 arm_record_data_proc_imm, /* 001. */
12663 arm_record_ld_st_imm_offset, /* 010. */
12664 arm_record_ld_st_reg_offset, /* 011. */
12665 arm_record_ld_st_multiple, /* 100. */
12666 arm_record_b_bl, /* 101. */
12667 arm_record_asimd_vfp_coproc, /* 110. */
12668 arm_record_coproc_data_proc /* 111. */
12671 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
12672 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12674 thumb_record_shift_add_sub, /* 000. */
12675 thumb_record_add_sub_cmp_mov, /* 001. */
12676 thumb_record_ld_st_reg_offset, /* 010. */
12677 thumb_record_ld_st_imm_offset, /* 011. */
12678 thumb_record_ld_st_stack, /* 100. */
12679 thumb_record_misc, /* 101. */
12680 thumb_record_ldm_stm_swi, /* 110. */
12681 thumb_record_branch /* 111. */
12684 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12685 uint32_t insn_id = 0;
12687 if (extract_arm_insn (arm_record, insn_size))
12691 printf_unfiltered (_("Process record: error reading memory at "
12692 "addr %s len = %d.\n"),
12693 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12697 else if (ARM_RECORD == record_type)
12699 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12700 insn_id = bits (arm_record->arm_insn, 25, 27);
12701 ret = arm_record_extension_space (arm_record);
12702 /* If this insn has fallen into extension space
12703 then we need not decode it anymore. */
12704 if (ret != -1 && !INSN_RECORDED(arm_record))
12706 ret = arm_handle_insn[insn_id] (arm_record);
12709 else if (THUMB_RECORD == record_type)
12711 /* As thumb does not have condition codes, we set negative. */
12712 arm_record->cond = -1;
12713 insn_id = bits (arm_record->arm_insn, 13, 15);
12714 ret = thumb_handle_insn[insn_id] (arm_record);
12716 else if (THUMB2_RECORD == record_type)
12718 /* As thumb does not have condition codes, we set negative. */
12719 arm_record->cond = -1;
12721 /* Swap first half of 32bit thumb instruction with second half. */
12722 arm_record->arm_insn
12723 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12725 insn_id = thumb2_record_decode_insn_handler (arm_record);
12727 if (insn_id != ARM_RECORD_SUCCESS)
12729 arm_record_unsupported_insn (arm_record);
12735 /* Throw assertion. */
12736 gdb_assert_not_reached ("not a valid instruction, could not decode");
12743 /* Cleans up local record registers and memory allocations. */
12746 deallocate_reg_mem (insn_decode_record *record)
12748 xfree (record->arm_regs);
12749 xfree (record->arm_mems);
12753 /* Parse the current instruction and record the values of the registers and
12754 memory that will be changed in current instruction to record_arch_list".
12755 Return -1 if something is wrong. */
12758 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12759 CORE_ADDR insn_addr)
12762 uint32_t no_of_rec = 0;
12763 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12764 ULONGEST t_bit = 0, insn_id = 0;
12766 ULONGEST u_regval = 0;
12768 insn_decode_record arm_record;
12770 memset (&arm_record, 0, sizeof (insn_decode_record));
12771 arm_record.regcache = regcache;
12772 arm_record.this_addr = insn_addr;
12773 arm_record.gdbarch = gdbarch;
12776 if (record_debug > 1)
12778 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12780 paddress (gdbarch, arm_record.this_addr));
12783 if (extract_arm_insn (&arm_record, 2))
12787 printf_unfiltered (_("Process record: error reading memory at "
12788 "addr %s len = %d.\n"),
12789 paddress (arm_record.gdbarch,
12790 arm_record.this_addr), 2);
12795 /* Check the insn, whether it is thumb or arm one. */
12797 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12798 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12801 if (!(u_regval & t_bit))
12803 /* We are decoding arm insn. */
12804 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12808 insn_id = bits (arm_record.arm_insn, 11, 15);
12809 /* is it thumb2 insn? */
12810 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12812 ret = decode_insn (&arm_record, THUMB2_RECORD,
12813 THUMB2_INSN_SIZE_BYTES);
12817 /* We are decoding thumb insn. */
12818 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12824 /* Record registers. */
12825 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12826 if (arm_record.arm_regs)
12828 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12830 if (record_full_arch_list_add_reg
12831 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
12835 /* Record memories. */
12836 if (arm_record.arm_mems)
12838 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12840 if (record_full_arch_list_add_mem
12841 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12842 arm_record.arm_mems[no_of_rec].len))
12847 if (record_full_arch_list_add_end ())
12852 deallocate_reg_mem (&arm_record);