1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "arch/arm-get-next-pcs.h"
51 #include "gdb/sim-arm.h"
54 #include "coff/internal.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
244 /* get_next_pcs operations. */
245 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
246 arm_get_next_pcs_read_memory_unsigned_integer,
247 arm_get_next_pcs_syscall_next_pc,
248 arm_get_next_pcs_addr_bits_remove,
249 arm_get_next_pcs_is_thumb,
253 struct arm_prologue_cache
255 /* The stack pointer at the time this frame was created; i.e. the
256 caller's stack pointer when this function was called. It is used
257 to identify this frame. */
260 /* The frame base for this frame is just prev_sp - frame size.
261 FRAMESIZE is the distance from the frame pointer to the
262 initial stack pointer. */
266 /* The register used to hold the frame pointer for this frame. */
269 /* Saved register offsets. */
270 struct trad_frame_saved_reg *saved_regs;
273 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
274 CORE_ADDR prologue_start,
275 CORE_ADDR prologue_end,
276 struct arm_prologue_cache *cache);
278 /* Architecture version for displaced stepping. This effects the behaviour of
279 certain instructions, and really should not be hard-wired. */
281 #define DISPLACED_STEPPING_ARCH_VERSION 5
283 /* Set to true if the 32-bit mode is in use. */
287 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
290 arm_psr_thumb_bit (struct gdbarch *gdbarch)
292 if (gdbarch_tdep (gdbarch)->is_m)
298 /* Determine if the processor is currently executing in Thumb mode. */
301 arm_is_thumb (struct regcache *regcache)
304 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308 return (cpsr & t_bit) != 0;
311 /* Determine if FRAME is executing in Thumb mode. */
314 arm_frame_is_thumb (struct frame_info *frame)
317 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
319 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
320 directly (from a signal frame or dummy frame) or by interpreting
321 the saved LR (from a prologue or DWARF frame). So consult it and
322 trust the unwinders. */
323 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325 return (cpsr & t_bit) != 0;
328 /* Callback for VEC_lower_bound. */
331 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
332 const struct arm_mapping_symbol *rhs)
334 return lhs->value < rhs->value;
337 /* Search for the mapping symbol covering MEMADDR. If one is found,
338 return its type. Otherwise, return 0. If START is non-NULL,
339 set *START to the location of the mapping symbol. */
342 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
344 struct obj_section *sec;
346 /* If there are mapping symbols, consult them. */
347 sec = find_pc_section (memaddr);
350 struct arm_per_objfile *data;
351 VEC(arm_mapping_symbol_s) *map;
352 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
356 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
357 arm_objfile_data_key);
360 map = data->section_maps[sec->the_bfd_section->index];
361 if (!VEC_empty (arm_mapping_symbol_s, map))
363 struct arm_mapping_symbol *map_sym;
365 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
366 arm_compare_mapping_symbols);
368 /* VEC_lower_bound finds the earliest ordered insertion
369 point. If the following symbol starts at this exact
370 address, we use that; otherwise, the preceding
371 mapping symbol covers this address. */
372 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
375 if (map_sym->value == map_key.value)
378 *start = map_sym->value + obj_section_addr (sec);
379 return map_sym->type;
385 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
387 *start = map_sym->value + obj_section_addr (sec);
388 return map_sym->type;
397 /* Determine if the program counter specified in MEMADDR is in a Thumb
398 function. This function should be called for addresses unrelated to
399 any executing frame; otherwise, prefer arm_frame_is_thumb. */
402 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
404 struct bound_minimal_symbol sym;
406 struct displaced_step_closure* dsc
407 = get_displaced_step_closure_by_addr(memaddr);
409 /* If checking the mode of displaced instruction in copy area, the mode
410 should be determined by instruction on the original address. */
414 fprintf_unfiltered (gdb_stdlog,
415 "displaced: check mode of %.8lx instead of %.8lx\n",
416 (unsigned long) dsc->insn_addr,
417 (unsigned long) memaddr);
418 memaddr = dsc->insn_addr;
421 /* If bit 0 of the address is set, assume this is a Thumb address. */
422 if (IS_THUMB_ADDR (memaddr))
425 /* Respect internal mode override if active. */
426 if (arm_override_mode != -1)
427 return arm_override_mode;
429 /* If the user wants to override the symbol table, let him. */
430 if (strcmp (arm_force_mode_string, "arm") == 0)
432 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 /* ARM v6-M and v7-M are always in Thumb mode. */
436 if (gdbarch_tdep (gdbarch)->is_m)
439 /* If there are mapping symbols, consult them. */
440 type = arm_find_mapping_symbol (memaddr, NULL);
444 /* Thumb functions have a "special" bit set in minimal symbols. */
445 sym = lookup_minimal_symbol_by_pc (memaddr);
447 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449 /* If the user wants to override the fallback mode, let them. */
450 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 /* If we couldn't find any symbol, but we're talking to a running
456 target, then trust the current value of $cpsr. This lets
457 "display/i $pc" always show the correct mode (though if there is
458 a symbol table we will not reach here, so it still may not be
459 displayed in the mode it will be executed). */
460 if (target_has_registers)
461 return arm_frame_is_thumb (get_current_frame ());
463 /* Otherwise we're out of luck; we assume ARM. */
467 /* Remove useless bits from addresses in a running program. */
469 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
471 /* On M-profile devices, do not strip the low bit from EXC_RETURN
472 (the magic exception return address). */
473 if (gdbarch_tdep (gdbarch)->is_m
474 && (val & 0xfffffff0) == 0xfffffff0)
478 return UNMAKE_THUMB_ADDR (val);
480 return (val & 0x03fffffc);
483 /* Return 1 if PC is the start of a compiler helper function which
484 can be safely ignored during prologue skipping. IS_THUMB is true
485 if the function is known to be a Thumb function due to the way it
488 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
491 struct bound_minimal_symbol msym;
493 msym = lookup_minimal_symbol_by_pc (pc);
494 if (msym.minsym != NULL
495 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
496 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
498 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
500 /* The GNU linker's Thumb call stub to foo is named
502 if (strstr (name, "_from_thumb") != NULL)
505 /* On soft-float targets, __truncdfsf2 is called to convert promoted
506 arguments to their argument types in non-prototyped
508 if (startswith (name, "__truncdfsf2"))
510 if (startswith (name, "__aeabi_d2f"))
513 /* Internal functions related to thread-local storage. */
514 if (startswith (name, "__tls_get_addr"))
516 if (startswith (name, "__aeabi_read_tp"))
521 /* If we run against a stripped glibc, we may be unable to identify
522 special functions by name. Check for one important case,
523 __aeabi_read_tp, by comparing the *code* against the default
524 implementation (this is hand-written ARM assembler in glibc). */
527 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
528 == 0xe3e00a0f /* mov r0, #0xffff0fff */
529 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
530 == 0xe240f01f) /* sub pc, r0, #31 */
537 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
538 the first 16-bit of instruction, and INSN2 is the second 16-bit of
540 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
541 ((bits ((insn1), 0, 3) << 12) \
542 | (bits ((insn1), 10, 10) << 11) \
543 | (bits ((insn2), 12, 14) << 8) \
544 | bits ((insn2), 0, 7))
546 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
547 the 32-bit instruction. */
548 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
549 ((bits ((insn), 16, 19) << 12) \
550 | bits ((insn), 0, 11))
552 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
555 thumb_expand_immediate (unsigned int imm)
557 unsigned int count = imm >> 7;
565 return (imm & 0xff) | ((imm & 0xff) << 16);
567 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
569 return (imm & 0xff) | ((imm & 0xff) << 8)
570 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
573 return (0x80 | (imm & 0x7f)) << (32 - count);
576 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
577 epilogue, 0 otherwise. */
580 thumb_instruction_restores_sp (unsigned short insn)
582 return (insn == 0x46bd /* mov sp, r7 */
583 || (insn & 0xff80) == 0xb000 /* add sp, imm */
584 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
587 /* Analyze a Thumb prologue, looking for a recognizable stack frame
588 and frame pointer. Scan until we encounter a store that could
589 clobber the stack frame unexpectedly, or an unknown instruction.
590 Return the last address which is definitely safe to skip for an
591 initial breakpoint. */
594 thumb_analyze_prologue (struct gdbarch *gdbarch,
595 CORE_ADDR start, CORE_ADDR limit,
596 struct arm_prologue_cache *cache)
598 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
602 struct pv_area *stack;
603 struct cleanup *back_to;
605 CORE_ADDR unrecognized_pc = 0;
607 for (i = 0; i < 16; i++)
608 regs[i] = pv_register (i, 0);
609 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
610 back_to = make_cleanup_free_pv_area (stack);
612 while (start < limit)
616 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
618 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
626 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
627 whether to save LR (R14). */
628 mask = (insn & 0xff) | ((insn & 0x100) << 6);
630 /* Calculate offsets of saved R0-R7 and LR. */
631 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
632 if (mask & (1 << regno))
634 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
636 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
639 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
641 offset = (insn & 0x7f) << 2; /* get scaled offset */
642 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
645 else if (thumb_instruction_restores_sp (insn))
647 /* Don't scan past the epilogue. */
650 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
651 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
653 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
654 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
655 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
657 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
658 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
659 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
661 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
662 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
663 && pv_is_constant (regs[bits (insn, 3, 5)]))
664 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
665 regs[bits (insn, 6, 8)]);
666 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
667 && pv_is_constant (regs[bits (insn, 3, 6)]))
669 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
670 int rm = bits (insn, 3, 6);
671 regs[rd] = pv_add (regs[rd], regs[rm]);
673 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
675 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
676 int src_reg = (insn & 0x78) >> 3;
677 regs[dst_reg] = regs[src_reg];
679 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
681 /* Handle stores to the stack. Normally pushes are used,
682 but with GCC -mtpcs-frame, there may be other stores
683 in the prologue to create the frame. */
684 int regno = (insn >> 8) & 0x7;
687 offset = (insn & 0xff) << 2;
688 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
690 if (pv_area_store_would_trash (stack, addr))
693 pv_area_store (stack, addr, 4, regs[regno]);
695 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
697 int rd = bits (insn, 0, 2);
698 int rn = bits (insn, 3, 5);
701 offset = bits (insn, 6, 10) << 2;
702 addr = pv_add_constant (regs[rn], offset);
704 if (pv_area_store_would_trash (stack, addr))
707 pv_area_store (stack, addr, 4, regs[rd]);
709 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
710 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 /* Ignore stores of argument registers to the stack. */
714 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 /* Ignore block loads from the stack, potentially copying
717 parameters from memory. */
719 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
720 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
721 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
722 /* Similarly ignore single loads from the stack. */
724 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
725 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
726 /* Skip register copies, i.e. saves to another register
727 instead of the stack. */
729 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
730 /* Recognize constant loads; even with small stacks these are necessary
732 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
733 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
735 /* Constant pool loads, for the same reason. */
736 unsigned int constant;
739 loc = start + 4 + bits (insn, 0, 7) * 4;
740 constant = read_memory_unsigned_integer (loc, 4, byte_order);
741 regs[bits (insn, 8, 10)] = pv_constant (constant);
743 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
745 unsigned short inst2;
747 inst2 = read_memory_unsigned_integer (start + 2, 2,
748 byte_order_for_code);
750 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
752 /* BL, BLX. Allow some special function calls when
753 skipping the prologue; GCC generates these before
754 storing arguments to the stack. */
756 int j1, j2, imm1, imm2;
758 imm1 = sbits (insn, 0, 10);
759 imm2 = bits (inst2, 0, 10);
760 j1 = bit (inst2, 13);
761 j2 = bit (inst2, 11);
763 offset = ((imm1 << 12) + (imm2 << 1));
764 offset ^= ((!j2) << 22) | ((!j1) << 23);
766 nextpc = start + 4 + offset;
767 /* For BLX make sure to clear the low bits. */
768 if (bit (inst2, 12) == 0)
769 nextpc = nextpc & 0xfffffffc;
771 if (!skip_prologue_function (gdbarch, nextpc,
772 bit (inst2, 12) != 0))
776 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
778 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
780 pv_t addr = regs[bits (insn, 0, 3)];
783 if (pv_area_store_would_trash (stack, addr))
786 /* Calculate offsets of saved registers. */
787 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
788 if (inst2 & (1 << regno))
790 addr = pv_add_constant (addr, -4);
791 pv_area_store (stack, addr, 4, regs[regno]);
795 regs[bits (insn, 0, 3)] = addr;
798 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
800 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
802 int regno1 = bits (inst2, 12, 15);
803 int regno2 = bits (inst2, 8, 11);
804 pv_t addr = regs[bits (insn, 0, 3)];
806 offset = inst2 & 0xff;
808 addr = pv_add_constant (addr, offset);
810 addr = pv_add_constant (addr, -offset);
812 if (pv_area_store_would_trash (stack, addr))
815 pv_area_store (stack, addr, 4, regs[regno1]);
816 pv_area_store (stack, pv_add_constant (addr, 4),
820 regs[bits (insn, 0, 3)] = addr;
823 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
824 && (inst2 & 0x0c00) == 0x0c00
825 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
827 int regno = bits (inst2, 12, 15);
828 pv_t addr = regs[bits (insn, 0, 3)];
830 offset = inst2 & 0xff;
832 addr = pv_add_constant (addr, offset);
834 addr = pv_add_constant (addr, -offset);
836 if (pv_area_store_would_trash (stack, addr))
839 pv_area_store (stack, addr, 4, regs[regno]);
842 regs[bits (insn, 0, 3)] = addr;
845 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno = bits (inst2, 12, 15);
851 offset = inst2 & 0xfff;
852 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
854 if (pv_area_store_would_trash (stack, addr))
857 pv_area_store (stack, addr, 4, regs[regno]);
860 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
861 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
862 /* Ignore stores of argument registers to the stack. */
865 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
866 && (inst2 & 0x0d00) == 0x0c00
867 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
868 /* Ignore stores of argument registers to the stack. */
871 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
873 && (inst2 & 0x8000) == 0x0000
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 /* Ignore block loads from the stack, potentially copying
876 parameters from memory. */
879 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 /* Similarly ignore dual loads from the stack. */
885 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
886 && (inst2 & 0x0d00) == 0x0c00
887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
888 /* Similarly ignore single loads from the stack. */
891 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 /* Similarly ignore single loads from the stack. */
896 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
897 && (inst2 & 0x8000) == 0x0000)
899 unsigned int imm = ((bits (insn, 10, 10) << 11)
900 | (bits (inst2, 12, 14) << 8)
901 | bits (inst2, 0, 7));
903 regs[bits (inst2, 8, 11)]
904 = pv_add_constant (regs[bits (insn, 0, 3)],
905 thumb_expand_immediate (imm));
908 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
909 && (inst2 & 0x8000) == 0x0000)
911 unsigned int imm = ((bits (insn, 10, 10) << 11)
912 | (bits (inst2, 12, 14) << 8)
913 | bits (inst2, 0, 7));
915 regs[bits (inst2, 8, 11)]
916 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
919 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
920 && (inst2 & 0x8000) == 0x0000)
922 unsigned int imm = ((bits (insn, 10, 10) << 11)
923 | (bits (inst2, 12, 14) << 8)
924 | bits (inst2, 0, 7));
926 regs[bits (inst2, 8, 11)]
927 = pv_add_constant (regs[bits (insn, 0, 3)],
928 - (CORE_ADDR) thumb_expand_immediate (imm));
931 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
932 && (inst2 & 0x8000) == 0x0000)
934 unsigned int imm = ((bits (insn, 10, 10) << 11)
935 | (bits (inst2, 12, 14) << 8)
936 | bits (inst2, 0, 7));
938 regs[bits (inst2, 8, 11)]
939 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
942 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
948 regs[bits (inst2, 8, 11)]
949 = pv_constant (thumb_expand_immediate (imm));
952 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
955 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
957 regs[bits (inst2, 8, 11)] = pv_constant (imm);
960 else if (insn == 0xea5f /* mov.w Rd,Rm */
961 && (inst2 & 0xf0f0) == 0)
963 int dst_reg = (inst2 & 0x0f00) >> 8;
964 int src_reg = inst2 & 0xf;
965 regs[dst_reg] = regs[src_reg];
968 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
970 /* Constant pool loads. */
971 unsigned int constant;
974 offset = bits (inst2, 0, 11);
976 loc = start + 4 + offset;
978 loc = start + 4 - offset;
980 constant = read_memory_unsigned_integer (loc, 4, byte_order);
981 regs[bits (inst2, 12, 15)] = pv_constant (constant);
984 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
986 /* Constant pool loads. */
987 unsigned int constant;
990 offset = bits (inst2, 0, 7) << 2;
992 loc = start + 4 + offset;
994 loc = start + 4 - offset;
996 constant = read_memory_unsigned_integer (loc, 4, byte_order);
997 regs[bits (inst2, 12, 15)] = pv_constant (constant);
999 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1000 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1003 else if (thumb2_instruction_changes_pc (insn, inst2))
1005 /* Don't scan past anything that might change control flow. */
1010 /* The optimizer might shove anything into the prologue,
1011 so we just skip what we don't recognize. */
1012 unrecognized_pc = start;
1017 else if (thumb_instruction_changes_pc (insn))
1019 /* Don't scan past anything that might change control flow. */
1024 /* The optimizer might shove anything into the prologue,
1025 so we just skip what we don't recognize. */
1026 unrecognized_pc = start;
1033 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1034 paddress (gdbarch, start));
1036 if (unrecognized_pc == 0)
1037 unrecognized_pc = start;
1041 do_cleanups (back_to);
1042 return unrecognized_pc;
1045 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1047 /* Frame pointer is fp. Frame size is constant. */
1048 cache->framereg = ARM_FP_REGNUM;
1049 cache->framesize = -regs[ARM_FP_REGNUM].k;
1051 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1053 /* Frame pointer is r7. Frame size is constant. */
1054 cache->framereg = THUMB_FP_REGNUM;
1055 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1059 /* Try the stack pointer... this is a bit desperate. */
1060 cache->framereg = ARM_SP_REGNUM;
1061 cache->framesize = -regs[ARM_SP_REGNUM].k;
1064 for (i = 0; i < 16; i++)
1065 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1066 cache->saved_regs[i].addr = offset;
1068 do_cleanups (back_to);
1069 return unrecognized_pc;
1073 /* Try to analyze the instructions starting from PC, which load symbol
1074 __stack_chk_guard. Return the address of instruction after loading this
1075 symbol, set the dest register number to *BASEREG, and set the size of
1076 instructions for loading symbol in OFFSET. Return 0 if instructions are
1080 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1081 unsigned int *destreg, int *offset)
1083 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1084 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1085 unsigned int low, high, address;
1090 unsigned short insn1
1091 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1093 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1095 *destreg = bits (insn1, 8, 10);
1097 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1098 address = read_memory_unsigned_integer (address, 4,
1099 byte_order_for_code);
1101 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1103 unsigned short insn2
1104 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1106 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1109 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1111 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1113 /* movt Rd, #const */
1114 if ((insn1 & 0xfbc0) == 0xf2c0)
1116 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1117 *destreg = bits (insn2, 8, 11);
1119 address = (high << 16 | low);
1126 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1128 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1130 address = bits (insn, 0, 11) + pc + 8;
1131 address = read_memory_unsigned_integer (address, 4,
1132 byte_order_for_code);
1134 *destreg = bits (insn, 12, 15);
1137 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1139 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1144 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1146 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1147 *destreg = bits (insn, 12, 15);
1149 address = (high << 16 | low);
1157 /* Try to skip a sequence of instructions used for stack protector. If PC
1158 points to the first instruction of this sequence, return the address of
1159 first instruction after this sequence, otherwise, return original PC.
1161 On arm, this sequence of instructions is composed of mainly three steps,
1162 Step 1: load symbol __stack_chk_guard,
1163 Step 2: load from address of __stack_chk_guard,
1164 Step 3: store it to somewhere else.
1166 Usually, instructions on step 2 and step 3 are the same on various ARM
1167 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1168 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1169 instructions in step 1 vary from different ARM architectures. On ARMv7,
1172 movw Rn, #:lower16:__stack_chk_guard
1173 movt Rn, #:upper16:__stack_chk_guard
1180 .word __stack_chk_guard
1182 Since ldr/str is a very popular instruction, we can't use them as
1183 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1184 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1185 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1188 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1190 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1191 unsigned int basereg;
1192 struct bound_minimal_symbol stack_chk_guard;
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1197 /* Try to parse the instructions in Step 1. */
1198 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1203 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1204 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1205 Otherwise, this sequence cannot be for stack protector. */
1206 if (stack_chk_guard.minsym == NULL
1207 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1212 unsigned int destreg;
1214 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1216 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1217 if ((insn & 0xf800) != 0x6800)
1219 if (bits (insn, 3, 5) != basereg)
1221 destreg = bits (insn, 0, 2);
1223 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1224 byte_order_for_code);
1225 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1226 if ((insn & 0xf800) != 0x6000)
1228 if (destreg != bits (insn, 0, 2))
1233 unsigned int destreg;
1235 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1237 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1238 if ((insn & 0x0e500000) != 0x04100000)
1240 if (bits (insn, 16, 19) != basereg)
1242 destreg = bits (insn, 12, 15);
1243 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1244 insn = read_memory_unsigned_integer (pc + offset + 4,
1245 4, byte_order_for_code);
1246 if ((insn & 0x0e500000) != 0x04000000)
1248 if (bits (insn, 12, 15) != destreg)
1251 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1254 return pc + offset + 4;
1256 return pc + offset + 8;
1259 /* Advance the PC across any function entry prologue instructions to
1260 reach some "real" code.
1262 The APCS (ARM Procedure Call Standard) defines the following
1266 [stmfd sp!, {a1,a2,a3,a4}]
1267 stmfd sp!, {...,fp,ip,lr,pc}
1268 [stfe f7, [sp, #-12]!]
1269 [stfe f6, [sp, #-12]!]
1270 [stfe f5, [sp, #-12]!]
1271 [stfe f4, [sp, #-12]!]
1272 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1275 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1277 CORE_ADDR func_addr, limit_pc;
1279 /* See if we can determine the end of the prologue via the symbol table.
1280 If so, then return either PC, or the PC after the prologue, whichever
1282 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1284 CORE_ADDR post_prologue_pc
1285 = skip_prologue_using_sal (gdbarch, func_addr);
1286 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1288 if (post_prologue_pc)
1290 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1293 /* GCC always emits a line note before the prologue and another
1294 one after, even if the two are at the same address or on the
1295 same line. Take advantage of this so that we do not need to
1296 know every instruction that might appear in the prologue. We
1297 will have producer information for most binaries; if it is
1298 missing (e.g. for -gstabs), assuming the GNU tools. */
1299 if (post_prologue_pc
1301 || COMPUNIT_PRODUCER (cust) == NULL
1302 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1303 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1304 return post_prologue_pc;
1306 if (post_prologue_pc != 0)
1308 CORE_ADDR analyzed_limit;
1310 /* For non-GCC compilers, make sure the entire line is an
1311 acceptable prologue; GDB will round this function's
1312 return value up to the end of the following line so we
1313 can not skip just part of a line (and we do not want to).
1315 RealView does not treat the prologue specially, but does
1316 associate prologue code with the opening brace; so this
1317 lets us skip the first line if we think it is the opening
1319 if (arm_pc_is_thumb (gdbarch, func_addr))
1320 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1321 post_prologue_pc, NULL);
1323 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1324 post_prologue_pc, NULL);
1326 if (analyzed_limit != post_prologue_pc)
1329 return post_prologue_pc;
1333 /* Can't determine prologue from the symbol table, need to examine
1336 /* Find an upper limit on the function prologue using the debug
1337 information. If the debug information could not be used to provide
1338 that bound, then use an arbitrary large number as the upper bound. */
1339 /* Like arm_scan_prologue, stop no later than pc + 64. */
1340 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1342 limit_pc = pc + 64; /* Magic. */
1345 /* Check if this is Thumb code. */
1346 if (arm_pc_is_thumb (gdbarch, pc))
1347 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1349 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1353 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1354 This function decodes a Thumb function prologue to determine:
1355 1) the size of the stack frame
1356 2) which registers are saved on it
1357 3) the offsets of saved regs
1358 4) the offset from the stack pointer to the frame pointer
1360 A typical Thumb function prologue would create this stack frame
1361 (offsets relative to FP)
1362 old SP -> 24 stack parameters
1365 R7 -> 0 local variables (16 bytes)
1366 SP -> -12 additional stack space (12 bytes)
1367 The frame size would thus be 36 bytes, and the frame offset would be
1368 12 bytes. The frame register is R7.
1370 The comments for thumb_skip_prolog() describe the algorithm we use
1371 to detect the end of the prolog. */
1375 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1376 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1378 CORE_ADDR prologue_start;
1379 CORE_ADDR prologue_end;
1381 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1384 /* See comment in arm_scan_prologue for an explanation of
1386 if (prologue_end > prologue_start + 64)
1388 prologue_end = prologue_start + 64;
1392 /* We're in the boondocks: we have no idea where the start of the
1396 prologue_end = min (prologue_end, prev_pc);
1398 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1401 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1405 arm_instruction_restores_sp (unsigned int insn)
1407 if (bits (insn, 28, 31) != INST_NV)
1409 if ((insn & 0x0df0f000) == 0x0080d000
1410 /* ADD SP (register or immediate). */
1411 || (insn & 0x0df0f000) == 0x0040d000
1412 /* SUB SP (register or immediate). */
1413 || (insn & 0x0ffffff0) == 0x01a0d000
1415 || (insn & 0x0fff0000) == 0x08bd0000
1417 || (insn & 0x0fff0000) == 0x049d0000)
1418 /* POP of a single register. */
1425 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1426 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1427 fill it in. Return the first address not recognized as a prologue
1430 We recognize all the instructions typically found in ARM prologues,
1431 plus harmless instructions which can be skipped (either for analysis
1432 purposes, or a more restrictive set that can be skipped when finding
1433 the end of the prologue). */
1436 arm_analyze_prologue (struct gdbarch *gdbarch,
1437 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1438 struct arm_prologue_cache *cache)
1440 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1442 CORE_ADDR offset, current_pc;
1443 pv_t regs[ARM_FPS_REGNUM];
1444 struct pv_area *stack;
1445 struct cleanup *back_to;
1446 CORE_ADDR unrecognized_pc = 0;
1448 /* Search the prologue looking for instructions that set up the
1449 frame pointer, adjust the stack pointer, and save registers.
1451 Be careful, however, and if it doesn't look like a prologue,
1452 don't try to scan it. If, for instance, a frameless function
1453 begins with stmfd sp!, then we will tell ourselves there is
1454 a frame, which will confuse stack traceback, as well as "finish"
1455 and other operations that rely on a knowledge of the stack
1458 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1459 regs[regno] = pv_register (regno, 0);
1460 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1461 back_to = make_cleanup_free_pv_area (stack);
1463 for (current_pc = prologue_start;
1464 current_pc < prologue_end;
1468 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1470 if (insn == 0xe1a0c00d) /* mov ip, sp */
1472 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1475 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1476 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1478 unsigned imm = insn & 0xff; /* immediate value */
1479 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1480 int rd = bits (insn, 12, 15);
1481 imm = (imm >> rot) | (imm << (32 - rot));
1482 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1485 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1486 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1488 unsigned imm = insn & 0xff; /* immediate value */
1489 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1490 int rd = bits (insn, 12, 15);
1491 imm = (imm >> rot) | (imm << (32 - rot));
1492 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1495 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1498 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1500 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1501 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1502 regs[bits (insn, 12, 15)]);
1505 else if ((insn & 0xffff0000) == 0xe92d0000)
1506 /* stmfd sp!, {..., fp, ip, lr, pc}
1508 stmfd sp!, {a1, a2, a3, a4} */
1510 int mask = insn & 0xffff;
1512 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1515 /* Calculate offsets of saved registers. */
1516 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1517 if (mask & (1 << regno))
1520 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1521 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1524 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1525 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1526 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1528 /* No need to add this to saved_regs -- it's just an arg reg. */
1531 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1532 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1533 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1535 /* No need to add this to saved_regs -- it's just an arg reg. */
1538 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1540 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1542 /* No need to add this to saved_regs -- it's just arg regs. */
1545 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1547 unsigned imm = insn & 0xff; /* immediate value */
1548 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1549 imm = (imm >> rot) | (imm << (32 - rot));
1550 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1552 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1554 unsigned imm = insn & 0xff; /* immediate value */
1555 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1556 imm = (imm >> rot) | (imm << (32 - rot));
1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1559 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1561 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1563 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1566 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1567 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1568 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1570 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1572 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1574 int n_saved_fp_regs;
1575 unsigned int fp_start_reg, fp_bound_reg;
1577 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1580 if ((insn & 0x800) == 0x800) /* N0 is set */
1582 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1583 n_saved_fp_regs = 3;
1585 n_saved_fp_regs = 1;
1589 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1590 n_saved_fp_regs = 2;
1592 n_saved_fp_regs = 4;
1595 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1596 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1597 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1599 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1600 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1601 regs[fp_start_reg++]);
1604 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1606 /* Allow some special function calls when skipping the
1607 prologue; GCC generates these before storing arguments to
1609 CORE_ADDR dest = BranchDest (current_pc, insn);
1611 if (skip_prologue_function (gdbarch, dest, 0))
1616 else if ((insn & 0xf0000000) != 0xe0000000)
1617 break; /* Condition not true, exit early. */
1618 else if (arm_instruction_changes_pc (insn))
1619 /* Don't scan past anything that might change control flow. */
1621 else if (arm_instruction_restores_sp (insn))
1623 /* Don't scan past the epilogue. */
1626 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1627 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1628 /* Ignore block loads from the stack, potentially copying
1629 parameters from memory. */
1631 else if ((insn & 0xfc500000) == 0xe4100000
1632 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1633 /* Similarly ignore single loads from the stack. */
1635 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1636 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1637 register instead of the stack. */
1641 /* The optimizer might shove anything into the prologue, if
1642 we build up cache (cache != NULL) from scanning prologue,
1643 we just skip what we don't recognize and scan further to
1644 make cache as complete as possible. However, if we skip
1645 prologue, we'll stop immediately on unrecognized
1647 unrecognized_pc = current_pc;
1655 if (unrecognized_pc == 0)
1656 unrecognized_pc = current_pc;
1660 int framereg, framesize;
1662 /* The frame size is just the distance from the frame register
1663 to the original stack pointer. */
1664 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1666 /* Frame pointer is fp. */
1667 framereg = ARM_FP_REGNUM;
1668 framesize = -regs[ARM_FP_REGNUM].k;
1672 /* Try the stack pointer... this is a bit desperate. */
1673 framereg = ARM_SP_REGNUM;
1674 framesize = -regs[ARM_SP_REGNUM].k;
1677 cache->framereg = framereg;
1678 cache->framesize = framesize;
1680 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1681 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1682 cache->saved_regs[regno].addr = offset;
1686 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1687 paddress (gdbarch, unrecognized_pc));
1689 do_cleanups (back_to);
1690 return unrecognized_pc;
1694 arm_scan_prologue (struct frame_info *this_frame,
1695 struct arm_prologue_cache *cache)
1697 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1698 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1699 CORE_ADDR prologue_start, prologue_end;
1700 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1701 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1703 /* Assume there is no frame until proven otherwise. */
1704 cache->framereg = ARM_SP_REGNUM;
1705 cache->framesize = 0;
1707 /* Check for Thumb prologue. */
1708 if (arm_frame_is_thumb (this_frame))
1710 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1714 /* Find the function prologue. If we can't find the function in
1715 the symbol table, peek in the stack frame to find the PC. */
1716 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1719 /* One way to find the end of the prologue (which works well
1720 for unoptimized code) is to do the following:
1722 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1725 prologue_end = prev_pc;
1726 else if (sal.end < prologue_end)
1727 prologue_end = sal.end;
1729 This mechanism is very accurate so long as the optimizer
1730 doesn't move any instructions from the function body into the
1731 prologue. If this happens, sal.end will be the last
1732 instruction in the first hunk of prologue code just before
1733 the first instruction that the scheduler has moved from
1734 the body to the prologue.
1736 In order to make sure that we scan all of the prologue
1737 instructions, we use a slightly less accurate mechanism which
1738 may scan more than necessary. To help compensate for this
1739 lack of accuracy, the prologue scanning loop below contains
1740 several clauses which'll cause the loop to terminate early if
1741 an implausible prologue instruction is encountered.
1747 is a suitable endpoint since it accounts for the largest
1748 possible prologue plus up to five instructions inserted by
1751 if (prologue_end > prologue_start + 64)
1753 prologue_end = prologue_start + 64; /* See above. */
1758 /* We have no symbol information. Our only option is to assume this
1759 function has a standard stack frame and the normal frame register.
1760 Then, we can find the value of our frame pointer on entrance to
1761 the callee (or at the present moment if this is the innermost frame).
1762 The value stored there should be the address of the stmfd + 8. */
1763 CORE_ADDR frame_loc;
1764 LONGEST return_value;
1766 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1767 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1771 prologue_start = gdbarch_addr_bits_remove
1772 (gdbarch, return_value) - 8;
1773 prologue_end = prologue_start + 64; /* See above. */
1777 if (prev_pc < prologue_end)
1778 prologue_end = prev_pc;
1780 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1783 static struct arm_prologue_cache *
1784 arm_make_prologue_cache (struct frame_info *this_frame)
1787 struct arm_prologue_cache *cache;
1788 CORE_ADDR unwound_fp;
1790 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1791 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1793 arm_scan_prologue (this_frame, cache);
1795 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1796 if (unwound_fp == 0)
1799 cache->prev_sp = unwound_fp + cache->framesize;
1801 /* Calculate actual addresses of saved registers using offsets
1802 determined by arm_scan_prologue. */
1803 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1804 if (trad_frame_addr_p (cache->saved_regs, reg))
1805 cache->saved_regs[reg].addr += cache->prev_sp;
1810 /* Implementation of the stop_reason hook for arm_prologue frames. */
1812 static enum unwind_stop_reason
1813 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1816 struct arm_prologue_cache *cache;
1819 if (*this_cache == NULL)
1820 *this_cache = arm_make_prologue_cache (this_frame);
1821 cache = (struct arm_prologue_cache *) *this_cache;
1823 /* This is meant to halt the backtrace at "_start". */
1824 pc = get_frame_pc (this_frame);
1825 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1826 return UNWIND_OUTERMOST;
1828 /* If we've hit a wall, stop. */
1829 if (cache->prev_sp == 0)
1830 return UNWIND_OUTERMOST;
1832 return UNWIND_NO_REASON;
1835 /* Our frame ID for a normal frame is the current function's starting PC
1836 and the caller's SP when we were called. */
1839 arm_prologue_this_id (struct frame_info *this_frame,
1841 struct frame_id *this_id)
1843 struct arm_prologue_cache *cache;
1847 if (*this_cache == NULL)
1848 *this_cache = arm_make_prologue_cache (this_frame);
1849 cache = (struct arm_prologue_cache *) *this_cache;
1851 /* Use function start address as part of the frame ID. If we cannot
1852 identify the start address (due to missing symbol information),
1853 fall back to just using the current PC. */
1854 pc = get_frame_pc (this_frame);
1855 func = get_frame_func (this_frame);
1859 id = frame_id_build (cache->prev_sp, func);
1863 static struct value *
1864 arm_prologue_prev_register (struct frame_info *this_frame,
1868 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1869 struct arm_prologue_cache *cache;
1871 if (*this_cache == NULL)
1872 *this_cache = arm_make_prologue_cache (this_frame);
1873 cache = (struct arm_prologue_cache *) *this_cache;
1875 /* If we are asked to unwind the PC, then we need to return the LR
1876 instead. The prologue may save PC, but it will point into this
1877 frame's prologue, not the next frame's resume location. Also
1878 strip the saved T bit. A valid LR may have the low bit set, but
1879 a valid PC never does. */
1880 if (prev_regnum == ARM_PC_REGNUM)
1884 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1885 return frame_unwind_got_constant (this_frame, prev_regnum,
1886 arm_addr_bits_remove (gdbarch, lr));
1889 /* SP is generally not saved to the stack, but this frame is
1890 identified by the next frame's stack pointer at the time of the call.
1891 The value was already reconstructed into PREV_SP. */
1892 if (prev_regnum == ARM_SP_REGNUM)
1893 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1895 /* The CPSR may have been changed by the call instruction and by the
1896 called function. The only bit we can reconstruct is the T bit,
1897 by checking the low bit of LR as of the call. This is a reliable
1898 indicator of Thumb-ness except for some ARM v4T pre-interworking
1899 Thumb code, which could get away with a clear low bit as long as
1900 the called function did not use bx. Guess that all other
1901 bits are unchanged; the condition flags are presumably lost,
1902 but the processor status is likely valid. */
1903 if (prev_regnum == ARM_PS_REGNUM)
1906 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1908 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1909 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1910 if (IS_THUMB_ADDR (lr))
1914 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1917 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1921 struct frame_unwind arm_prologue_unwind = {
1923 arm_prologue_unwind_stop_reason,
1924 arm_prologue_this_id,
1925 arm_prologue_prev_register,
1927 default_frame_sniffer
1930 /* Maintain a list of ARM exception table entries per objfile, similar to the
1931 list of mapping symbols. We only cache entries for standard ARM-defined
1932 personality routines; the cache will contain only the frame unwinding
1933 instructions associated with the entry (not the descriptors). */
1935 static const struct objfile_data *arm_exidx_data_key;
1937 struct arm_exidx_entry
1942 typedef struct arm_exidx_entry arm_exidx_entry_s;
1943 DEF_VEC_O(arm_exidx_entry_s);
1945 struct arm_exidx_data
1947 VEC(arm_exidx_entry_s) **section_maps;
1951 arm_exidx_data_free (struct objfile *objfile, void *arg)
1953 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1956 for (i = 0; i < objfile->obfd->section_count; i++)
1957 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1961 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1962 const struct arm_exidx_entry *rhs)
1964 return lhs->addr < rhs->addr;
1967 static struct obj_section *
1968 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1970 struct obj_section *osect;
1972 ALL_OBJFILE_OSECTIONS (objfile, osect)
1973 if (bfd_get_section_flags (objfile->obfd,
1974 osect->the_bfd_section) & SEC_ALLOC)
1976 bfd_vma start, size;
1977 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1978 size = bfd_get_section_size (osect->the_bfd_section);
1980 if (start <= vma && vma < start + size)
1987 /* Parse contents of exception table and exception index sections
1988 of OBJFILE, and fill in the exception table entry cache.
1990 For each entry that refers to a standard ARM-defined personality
1991 routine, extract the frame unwinding instructions (from either
1992 the index or the table section). The unwinding instructions
1994 - extracting them from the rest of the table data
1995 - converting to host endianness
1996 - appending the implicit 0xb0 ("Finish") code
1998 The extracted and normalized instructions are stored for later
1999 retrieval by the arm_find_exidx_entry routine. */
2002 arm_exidx_new_objfile (struct objfile *objfile)
2004 struct cleanup *cleanups;
2005 struct arm_exidx_data *data;
2006 asection *exidx, *extab;
2007 bfd_vma exidx_vma = 0, extab_vma = 0;
2008 bfd_size_type exidx_size = 0, extab_size = 0;
2009 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2012 /* If we've already touched this file, do nothing. */
2013 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2015 cleanups = make_cleanup (null_cleanup, NULL);
2017 /* Read contents of exception table and index. */
2018 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2021 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2022 exidx_size = bfd_get_section_size (exidx);
2023 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2024 make_cleanup (xfree, exidx_data);
2026 if (!bfd_get_section_contents (objfile->obfd, exidx,
2027 exidx_data, 0, exidx_size))
2029 do_cleanups (cleanups);
2034 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2037 extab_vma = bfd_section_vma (objfile->obfd, extab);
2038 extab_size = bfd_get_section_size (extab);
2039 extab_data = (gdb_byte *) xmalloc (extab_size);
2040 make_cleanup (xfree, extab_data);
2042 if (!bfd_get_section_contents (objfile->obfd, extab,
2043 extab_data, 0, extab_size))
2045 do_cleanups (cleanups);
2050 /* Allocate exception table data structure. */
2051 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2052 set_objfile_data (objfile, arm_exidx_data_key, data);
2053 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2054 objfile->obfd->section_count,
2055 VEC(arm_exidx_entry_s) *);
2057 /* Fill in exception table. */
2058 for (i = 0; i < exidx_size / 8; i++)
2060 struct arm_exidx_entry new_exidx_entry;
2061 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2062 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2063 bfd_vma addr = 0, word = 0;
2064 int n_bytes = 0, n_words = 0;
2065 struct obj_section *sec;
2066 gdb_byte *entry = NULL;
2068 /* Extract address of start of function. */
2069 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2070 idx += exidx_vma + i * 8;
2072 /* Find section containing function and compute section offset. */
2073 sec = arm_obj_section_from_vma (objfile, idx);
2076 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2078 /* Determine address of exception table entry. */
2081 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2083 else if ((val & 0xff000000) == 0x80000000)
2085 /* Exception table entry embedded in .ARM.exidx
2086 -- must be short form. */
2090 else if (!(val & 0x80000000))
2092 /* Exception table entry in .ARM.extab. */
2093 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2094 addr += exidx_vma + i * 8 + 4;
2096 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2098 word = bfd_h_get_32 (objfile->obfd,
2099 extab_data + addr - extab_vma);
2102 if ((word & 0xff000000) == 0x80000000)
2107 else if ((word & 0xff000000) == 0x81000000
2108 || (word & 0xff000000) == 0x82000000)
2112 n_words = ((word >> 16) & 0xff);
2114 else if (!(word & 0x80000000))
2117 struct obj_section *pers_sec;
2118 int gnu_personality = 0;
2120 /* Custom personality routine. */
2121 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2122 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2124 /* Check whether we've got one of the variants of the
2125 GNU personality routines. */
2126 pers_sec = arm_obj_section_from_vma (objfile, pers);
2129 static const char *personality[] =
2131 "__gcc_personality_v0",
2132 "__gxx_personality_v0",
2133 "__gcj_personality_v0",
2134 "__gnu_objc_personality_v0",
2138 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2141 for (k = 0; personality[k]; k++)
2142 if (lookup_minimal_symbol_by_pc_name
2143 (pc, personality[k], objfile))
2145 gnu_personality = 1;
2150 /* If so, the next word contains a word count in the high
2151 byte, followed by the same unwind instructions as the
2152 pre-defined forms. */
2154 && addr + 4 <= extab_vma + extab_size)
2156 word = bfd_h_get_32 (objfile->obfd,
2157 extab_data + addr - extab_vma);
2160 n_words = ((word >> 24) & 0xff);
2166 /* Sanity check address. */
2168 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2169 n_words = n_bytes = 0;
2171 /* The unwind instructions reside in WORD (only the N_BYTES least
2172 significant bytes are valid), followed by N_WORDS words in the
2173 extab section starting at ADDR. */
2174 if (n_bytes || n_words)
2177 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2178 n_bytes + n_words * 4 + 1);
2181 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2185 word = bfd_h_get_32 (objfile->obfd,
2186 extab_data + addr - extab_vma);
2189 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2190 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2192 *p++ = (gdb_byte) (word & 0xff);
2195 /* Implied "Finish" to terminate the list. */
2199 /* Push entry onto vector. They are guaranteed to always
2200 appear in order of increasing addresses. */
2201 new_exidx_entry.addr = idx;
2202 new_exidx_entry.entry = entry;
2203 VEC_safe_push (arm_exidx_entry_s,
2204 data->section_maps[sec->the_bfd_section->index],
2208 do_cleanups (cleanups);
2211 /* Search for the exception table entry covering MEMADDR. If one is found,
2212 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2213 set *START to the start of the region covered by this entry. */
2216 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2218 struct obj_section *sec;
2220 sec = find_pc_section (memaddr);
2223 struct arm_exidx_data *data;
2224 VEC(arm_exidx_entry_s) *map;
2225 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2228 data = ((struct arm_exidx_data *)
2229 objfile_data (sec->objfile, arm_exidx_data_key));
2232 map = data->section_maps[sec->the_bfd_section->index];
2233 if (!VEC_empty (arm_exidx_entry_s, map))
2235 struct arm_exidx_entry *map_sym;
2237 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2238 arm_compare_exidx_entries);
2240 /* VEC_lower_bound finds the earliest ordered insertion
2241 point. If the following symbol starts at this exact
2242 address, we use that; otherwise, the preceding
2243 exception table entry covers this address. */
2244 if (idx < VEC_length (arm_exidx_entry_s, map))
2246 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2247 if (map_sym->addr == map_key.addr)
2250 *start = map_sym->addr + obj_section_addr (sec);
2251 return map_sym->entry;
2257 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2259 *start = map_sym->addr + obj_section_addr (sec);
2260 return map_sym->entry;
2269 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2270 instruction list from the ARM exception table entry ENTRY, allocate and
2271 return a prologue cache structure describing how to unwind this frame.
2273 Return NULL if the unwinding instruction list contains a "spare",
2274 "reserved" or "refuse to unwind" instruction as defined in section
2275 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2276 for the ARM Architecture" document. */
2278 static struct arm_prologue_cache *
2279 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2284 struct arm_prologue_cache *cache;
2285 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2286 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2292 /* Whenever we reload SP, we actually have to retrieve its
2293 actual value in the current frame. */
2296 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2298 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2299 vsp = get_frame_register_unsigned (this_frame, reg);
2303 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2304 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2310 /* Decode next unwind instruction. */
2313 if ((insn & 0xc0) == 0)
2315 int offset = insn & 0x3f;
2316 vsp += (offset << 2) + 4;
2318 else if ((insn & 0xc0) == 0x40)
2320 int offset = insn & 0x3f;
2321 vsp -= (offset << 2) + 4;
2323 else if ((insn & 0xf0) == 0x80)
2325 int mask = ((insn & 0xf) << 8) | *entry++;
2328 /* The special case of an all-zero mask identifies
2329 "Refuse to unwind". We return NULL to fall back
2330 to the prologue analyzer. */
2334 /* Pop registers r4..r15 under mask. */
2335 for (i = 0; i < 12; i++)
2336 if (mask & (1 << i))
2338 cache->saved_regs[4 + i].addr = vsp;
2342 /* Special-case popping SP -- we need to reload vsp. */
2343 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2346 else if ((insn & 0xf0) == 0x90)
2348 int reg = insn & 0xf;
2350 /* Reserved cases. */
2351 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2354 /* Set SP from another register and mark VSP for reload. */
2355 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2358 else if ((insn & 0xf0) == 0xa0)
2360 int count = insn & 0x7;
2361 int pop_lr = (insn & 0x8) != 0;
2364 /* Pop r4..r[4+count]. */
2365 for (i = 0; i <= count; i++)
2367 cache->saved_regs[4 + i].addr = vsp;
2371 /* If indicated by flag, pop LR as well. */
2374 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2378 else if (insn == 0xb0)
2380 /* We could only have updated PC by popping into it; if so, it
2381 will show up as address. Otherwise, copy LR into PC. */
2382 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2383 cache->saved_regs[ARM_PC_REGNUM]
2384 = cache->saved_regs[ARM_LR_REGNUM];
2389 else if (insn == 0xb1)
2391 int mask = *entry++;
2394 /* All-zero mask and mask >= 16 is "spare". */
2395 if (mask == 0 || mask >= 16)
2398 /* Pop r0..r3 under mask. */
2399 for (i = 0; i < 4; i++)
2400 if (mask & (1 << i))
2402 cache->saved_regs[i].addr = vsp;
2406 else if (insn == 0xb2)
2408 ULONGEST offset = 0;
2413 offset |= (*entry & 0x7f) << shift;
2416 while (*entry++ & 0x80);
2418 vsp += 0x204 + (offset << 2);
2420 else if (insn == 0xb3)
2422 int start = *entry >> 4;
2423 int count = (*entry++) & 0xf;
2426 /* Only registers D0..D15 are valid here. */
2427 if (start + count >= 16)
2430 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2431 for (i = 0; i <= count; i++)
2433 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2437 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2440 else if ((insn & 0xf8) == 0xb8)
2442 int count = insn & 0x7;
2445 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2446 for (i = 0; i <= count; i++)
2448 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2452 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2455 else if (insn == 0xc6)
2457 int start = *entry >> 4;
2458 int count = (*entry++) & 0xf;
2461 /* Only registers WR0..WR15 are valid. */
2462 if (start + count >= 16)
2465 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2466 for (i = 0; i <= count; i++)
2468 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2472 else if (insn == 0xc7)
2474 int mask = *entry++;
2477 /* All-zero mask and mask >= 16 is "spare". */
2478 if (mask == 0 || mask >= 16)
2481 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2482 for (i = 0; i < 4; i++)
2483 if (mask & (1 << i))
2485 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2489 else if ((insn & 0xf8) == 0xc0)
2491 int count = insn & 0x7;
2494 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2495 for (i = 0; i <= count; i++)
2497 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2501 else if (insn == 0xc8)
2503 int start = *entry >> 4;
2504 int count = (*entry++) & 0xf;
2507 /* Only registers D0..D31 are valid. */
2508 if (start + count >= 16)
2511 /* Pop VFP double-precision registers
2512 D[16+start]..D[16+start+count]. */
2513 for (i = 0; i <= count; i++)
2515 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2519 else if (insn == 0xc9)
2521 int start = *entry >> 4;
2522 int count = (*entry++) & 0xf;
2525 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2526 for (i = 0; i <= count; i++)
2528 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2532 else if ((insn & 0xf8) == 0xd0)
2534 int count = insn & 0x7;
2537 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2538 for (i = 0; i <= count; i++)
2540 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2546 /* Everything else is "spare". */
2551 /* If we restore SP from a register, assume this was the frame register.
2552 Otherwise just fall back to SP as frame register. */
2553 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2554 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2556 cache->framereg = ARM_SP_REGNUM;
2558 /* Determine offset to previous frame. */
2560 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2562 /* We already got the previous SP. */
2563 cache->prev_sp = vsp;
2568 /* Unwinding via ARM exception table entries. Note that the sniffer
2569 already computes a filled-in prologue cache, which is then used
2570 with the same arm_prologue_this_id and arm_prologue_prev_register
2571 routines also used for prologue-parsing based unwinding. */
2574 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2575 struct frame_info *this_frame,
2576 void **this_prologue_cache)
2578 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2579 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2580 CORE_ADDR addr_in_block, exidx_region, func_start;
2581 struct arm_prologue_cache *cache;
2584 /* See if we have an ARM exception table entry covering this address. */
2585 addr_in_block = get_frame_address_in_block (this_frame);
2586 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2590 /* The ARM exception table does not describe unwind information
2591 for arbitrary PC values, but is guaranteed to be correct only
2592 at call sites. We have to decide here whether we want to use
2593 ARM exception table information for this frame, or fall back
2594 to using prologue parsing. (Note that if we have DWARF CFI,
2595 this sniffer isn't even called -- CFI is always preferred.)
2597 Before we make this decision, however, we check whether we
2598 actually have *symbol* information for the current frame.
2599 If not, prologue parsing would not work anyway, so we might
2600 as well use the exception table and hope for the best. */
2601 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2605 /* If the next frame is "normal", we are at a call site in this
2606 frame, so exception information is guaranteed to be valid. */
2607 if (get_next_frame (this_frame)
2608 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2611 /* We also assume exception information is valid if we're currently
2612 blocked in a system call. The system library is supposed to
2613 ensure this, so that e.g. pthread cancellation works. */
2614 if (arm_frame_is_thumb (this_frame))
2618 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2619 byte_order_for_code, &insn)
2620 && (insn & 0xff00) == 0xdf00 /* svc */)
2627 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2628 byte_order_for_code, &insn)
2629 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2633 /* Bail out if we don't know that exception information is valid. */
2637 /* The ARM exception index does not mark the *end* of the region
2638 covered by the entry, and some functions will not have any entry.
2639 To correctly recognize the end of the covered region, the linker
2640 should have inserted dummy records with a CANTUNWIND marker.
2642 Unfortunately, current versions of GNU ld do not reliably do
2643 this, and thus we may have found an incorrect entry above.
2644 As a (temporary) sanity check, we only use the entry if it
2645 lies *within* the bounds of the function. Note that this check
2646 might reject perfectly valid entries that just happen to cover
2647 multiple functions; therefore this check ought to be removed
2648 once the linker is fixed. */
2649 if (func_start > exidx_region)
2653 /* Decode the list of unwinding instructions into a prologue cache.
2654 Note that this may fail due to e.g. a "refuse to unwind" code. */
2655 cache = arm_exidx_fill_cache (this_frame, entry);
2659 *this_prologue_cache = cache;
2663 struct frame_unwind arm_exidx_unwind = {
2665 default_frame_unwind_stop_reason,
2666 arm_prologue_this_id,
2667 arm_prologue_prev_register,
2669 arm_exidx_unwind_sniffer
2672 static struct arm_prologue_cache *
2673 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2675 struct arm_prologue_cache *cache;
2678 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2679 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2681 /* Still rely on the offset calculated from prologue. */
2682 arm_scan_prologue (this_frame, cache);
2684 /* Since we are in epilogue, the SP has been restored. */
2685 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2687 /* Calculate actual addresses of saved registers using offsets
2688 determined by arm_scan_prologue. */
2689 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2690 if (trad_frame_addr_p (cache->saved_regs, reg))
2691 cache->saved_regs[reg].addr += cache->prev_sp;
2696 /* Implementation of function hook 'this_id' in
2697 'struct frame_uwnind' for epilogue unwinder. */
2700 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2702 struct frame_id *this_id)
2704 struct arm_prologue_cache *cache;
2707 if (*this_cache == NULL)
2708 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2709 cache = (struct arm_prologue_cache *) *this_cache;
2711 /* Use function start address as part of the frame ID. If we cannot
2712 identify the start address (due to missing symbol information),
2713 fall back to just using the current PC. */
2714 pc = get_frame_pc (this_frame);
2715 func = get_frame_func (this_frame);
2719 (*this_id) = frame_id_build (cache->prev_sp, pc);
2722 /* Implementation of function hook 'prev_register' in
2723 'struct frame_uwnind' for epilogue unwinder. */
2725 static struct value *
2726 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2727 void **this_cache, int regnum)
2729 if (*this_cache == NULL)
2730 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2732 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2735 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2737 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2740 /* Implementation of function hook 'sniffer' in
2741 'struct frame_uwnind' for epilogue unwinder. */
2744 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2745 struct frame_info *this_frame,
2746 void **this_prologue_cache)
2748 if (frame_relative_level (this_frame) == 0)
2750 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2751 CORE_ADDR pc = get_frame_pc (this_frame);
2753 if (arm_frame_is_thumb (this_frame))
2754 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2756 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2762 /* Frame unwinder from epilogue. */
2764 static const struct frame_unwind arm_epilogue_frame_unwind =
2767 default_frame_unwind_stop_reason,
2768 arm_epilogue_frame_this_id,
2769 arm_epilogue_frame_prev_register,
2771 arm_epilogue_frame_sniffer,
2774 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2775 trampoline, return the target PC. Otherwise return 0.
2777 void call0a (char c, short s, int i, long l) {}
2781 (*pointer_to_call0a) (c, s, i, l);
2784 Instead of calling a stub library function _call_via_xx (xx is
2785 the register name), GCC may inline the trampoline in the object
2786 file as below (register r2 has the address of call0a).
2789 .type main, %function
2798 The trampoline 'bx r2' doesn't belong to main. */
2801 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2803 /* The heuristics of recognizing such trampoline is that FRAME is
2804 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2805 if (arm_frame_is_thumb (frame))
2809 if (target_read_memory (pc, buf, 2) == 0)
2811 struct gdbarch *gdbarch = get_frame_arch (frame);
2812 enum bfd_endian byte_order_for_code
2813 = gdbarch_byte_order_for_code (gdbarch);
2815 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2817 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2820 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2822 /* Clear the LSB so that gdb core sets step-resume
2823 breakpoint at the right address. */
2824 return UNMAKE_THUMB_ADDR (dest);
2832 static struct arm_prologue_cache *
2833 arm_make_stub_cache (struct frame_info *this_frame)
2835 struct arm_prologue_cache *cache;
2837 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2838 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2840 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2845 /* Our frame ID for a stub frame is the current SP and LR. */
2848 arm_stub_this_id (struct frame_info *this_frame,
2850 struct frame_id *this_id)
2852 struct arm_prologue_cache *cache;
2854 if (*this_cache == NULL)
2855 *this_cache = arm_make_stub_cache (this_frame);
2856 cache = (struct arm_prologue_cache *) *this_cache;
2858 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2862 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2863 struct frame_info *this_frame,
2864 void **this_prologue_cache)
2866 CORE_ADDR addr_in_block;
2868 CORE_ADDR pc, start_addr;
2871 addr_in_block = get_frame_address_in_block (this_frame);
2872 pc = get_frame_pc (this_frame);
2873 if (in_plt_section (addr_in_block)
2874 /* We also use the stub winder if the target memory is unreadable
2875 to avoid having the prologue unwinder trying to read it. */
2876 || target_read_memory (pc, dummy, 4) != 0)
2879 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2880 && arm_skip_bx_reg (this_frame, pc) != 0)
2886 struct frame_unwind arm_stub_unwind = {
2888 default_frame_unwind_stop_reason,
2890 arm_prologue_prev_register,
2892 arm_stub_unwind_sniffer
2895 /* Put here the code to store, into CACHE->saved_regs, the addresses
2896 of the saved registers of frame described by THIS_FRAME. CACHE is
2899 static struct arm_prologue_cache *
2900 arm_m_exception_cache (struct frame_info *this_frame)
2902 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2903 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2904 struct arm_prologue_cache *cache;
2905 CORE_ADDR unwound_sp;
2908 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2909 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2911 unwound_sp = get_frame_register_unsigned (this_frame,
2914 /* The hardware saves eight 32-bit words, comprising xPSR,
2915 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2916 "B1.5.6 Exception entry behavior" in
2917 "ARMv7-M Architecture Reference Manual". */
2918 cache->saved_regs[0].addr = unwound_sp;
2919 cache->saved_regs[1].addr = unwound_sp + 4;
2920 cache->saved_regs[2].addr = unwound_sp + 8;
2921 cache->saved_regs[3].addr = unwound_sp + 12;
2922 cache->saved_regs[12].addr = unwound_sp + 16;
2923 cache->saved_regs[14].addr = unwound_sp + 20;
2924 cache->saved_regs[15].addr = unwound_sp + 24;
2925 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2927 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2928 aligner between the top of the 32-byte stack frame and the
2929 previous context's stack pointer. */
2930 cache->prev_sp = unwound_sp + 32;
2931 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2932 && (xpsr & (1 << 9)) != 0)
2933 cache->prev_sp += 4;
2938 /* Implementation of function hook 'this_id' in
2939 'struct frame_uwnind'. */
2942 arm_m_exception_this_id (struct frame_info *this_frame,
2944 struct frame_id *this_id)
2946 struct arm_prologue_cache *cache;
2948 if (*this_cache == NULL)
2949 *this_cache = arm_m_exception_cache (this_frame);
2950 cache = (struct arm_prologue_cache *) *this_cache;
2952 /* Our frame ID for a stub frame is the current SP and LR. */
2953 *this_id = frame_id_build (cache->prev_sp,
2954 get_frame_pc (this_frame));
2957 /* Implementation of function hook 'prev_register' in
2958 'struct frame_uwnind'. */
2960 static struct value *
2961 arm_m_exception_prev_register (struct frame_info *this_frame,
2965 struct arm_prologue_cache *cache;
2967 if (*this_cache == NULL)
2968 *this_cache = arm_m_exception_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2971 /* The value was already reconstructed into PREV_SP. */
2972 if (prev_regnum == ARM_SP_REGNUM)
2973 return frame_unwind_got_constant (this_frame, prev_regnum,
2976 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2980 /* Implementation of function hook 'sniffer' in
2981 'struct frame_uwnind'. */
2984 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2985 struct frame_info *this_frame,
2986 void **this_prologue_cache)
2988 CORE_ADDR this_pc = get_frame_pc (this_frame);
2990 /* No need to check is_m; this sniffer is only registered for
2991 M-profile architectures. */
2993 /* Exception frames return to one of these magic PCs. Other values
2994 are not defined as of v7-M. See details in "B1.5.8 Exception
2995 return behavior" in "ARMv7-M Architecture Reference Manual". */
2996 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2997 || this_pc == 0xfffffffd)
3003 /* Frame unwinder for M-profile exceptions. */
3005 struct frame_unwind arm_m_exception_unwind =
3008 default_frame_unwind_stop_reason,
3009 arm_m_exception_this_id,
3010 arm_m_exception_prev_register,
3012 arm_m_exception_unwind_sniffer
3016 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3018 struct arm_prologue_cache *cache;
3020 if (*this_cache == NULL)
3021 *this_cache = arm_make_prologue_cache (this_frame);
3022 cache = (struct arm_prologue_cache *) *this_cache;
3024 return cache->prev_sp - cache->framesize;
3027 struct frame_base arm_normal_base = {
3028 &arm_prologue_unwind,
3029 arm_normal_frame_base,
3030 arm_normal_frame_base,
3031 arm_normal_frame_base
3034 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3035 dummy frame. The frame ID's base needs to match the TOS value
3036 saved by save_dummy_frame_tos() and returned from
3037 arm_push_dummy_call, and the PC needs to match the dummy frame's
3040 static struct frame_id
3041 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3043 return frame_id_build (get_frame_register_unsigned (this_frame,
3045 get_frame_pc (this_frame));
3048 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3049 be used to construct the previous frame's ID, after looking up the
3050 containing function). */
3053 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3056 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3057 return arm_addr_bits_remove (gdbarch, pc);
3061 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3063 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3066 static struct value *
3067 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3070 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3072 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3077 /* The PC is normally copied from the return column, which
3078 describes saves of LR. However, that version may have an
3079 extra bit set to indicate Thumb state. The bit is not
3081 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3082 return frame_unwind_got_constant (this_frame, regnum,
3083 arm_addr_bits_remove (gdbarch, lr));
3086 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3087 cpsr = get_frame_register_unsigned (this_frame, regnum);
3088 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3089 if (IS_THUMB_ADDR (lr))
3093 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3096 internal_error (__FILE__, __LINE__,
3097 _("Unexpected register %d"), regnum);
3102 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3103 struct dwarf2_frame_state_reg *reg,
3104 struct frame_info *this_frame)
3110 reg->how = DWARF2_FRAME_REG_FN;
3111 reg->loc.fn = arm_dwarf2_prev_register;
3114 reg->how = DWARF2_FRAME_REG_CFA;
3119 /* Implement the stack_frame_destroyed_p gdbarch method. */
3122 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3124 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3125 unsigned int insn, insn2;
3126 int found_return = 0, found_stack_adjust = 0;
3127 CORE_ADDR func_start, func_end;
3131 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3134 /* The epilogue is a sequence of instructions along the following lines:
3136 - add stack frame size to SP or FP
3137 - [if frame pointer used] restore SP from FP
3138 - restore registers from SP [may include PC]
3139 - a return-type instruction [if PC wasn't already restored]
3141 In a first pass, we scan forward from the current PC and verify the
3142 instructions we find as compatible with this sequence, ending in a
3145 However, this is not sufficient to distinguish indirect function calls
3146 within a function from indirect tail calls in the epilogue in some cases.
3147 Therefore, if we didn't already find any SP-changing instruction during
3148 forward scan, we add a backward scanning heuristic to ensure we actually
3149 are in the epilogue. */
3152 while (scan_pc < func_end && !found_return)
3154 if (target_read_memory (scan_pc, buf, 2))
3158 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3160 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3162 else if (insn == 0x46f7) /* mov pc, lr */
3164 else if (thumb_instruction_restores_sp (insn))
3166 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3169 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3171 if (target_read_memory (scan_pc, buf, 2))
3175 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3177 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3179 if (insn2 & 0x8000) /* <registers> include PC. */
3182 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3183 && (insn2 & 0x0fff) == 0x0b04)
3185 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3188 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3189 && (insn2 & 0x0e00) == 0x0a00)
3201 /* Since any instruction in the epilogue sequence, with the possible
3202 exception of return itself, updates the stack pointer, we need to
3203 scan backwards for at most one instruction. Try either a 16-bit or
3204 a 32-bit instruction. This is just a heuristic, so we do not worry
3205 too much about false positives. */
3207 if (pc - 4 < func_start)
3209 if (target_read_memory (pc - 4, buf, 4))
3212 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3213 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3215 if (thumb_instruction_restores_sp (insn2))
3216 found_stack_adjust = 1;
3217 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3218 found_stack_adjust = 1;
3219 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3220 && (insn2 & 0x0fff) == 0x0b04)
3221 found_stack_adjust = 1;
3222 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3223 && (insn2 & 0x0e00) == 0x0a00)
3224 found_stack_adjust = 1;
3226 return found_stack_adjust;
3230 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3235 CORE_ADDR func_start, func_end;
3237 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3240 /* We are in the epilogue if the previous instruction was a stack
3241 adjustment and the next instruction is a possible return (bx, mov
3242 pc, or pop). We could have to scan backwards to find the stack
3243 adjustment, or forwards to find the return, but this is a decent
3244 approximation. First scan forwards. */
3247 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3248 if (bits (insn, 28, 31) != INST_NV)
3250 if ((insn & 0x0ffffff0) == 0x012fff10)
3253 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3256 else if ((insn & 0x0fff0000) == 0x08bd0000
3257 && (insn & 0x0000c000) != 0)
3258 /* POP (LDMIA), including PC or LR. */
3265 /* Scan backwards. This is just a heuristic, so do not worry about
3266 false positives from mode changes. */
3268 if (pc < func_start + 4)
3271 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3272 if (arm_instruction_restores_sp (insn))
3278 /* Implement the stack_frame_destroyed_p gdbarch method. */
3281 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3283 if (arm_pc_is_thumb (gdbarch, pc))
3284 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3286 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3289 /* When arguments must be pushed onto the stack, they go on in reverse
3290 order. The code below implements a FILO (stack) to do this. */
3295 struct stack_item *prev;
3299 static struct stack_item *
3300 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3302 struct stack_item *si;
3303 si = XNEW (struct stack_item);
3304 si->data = (gdb_byte *) xmalloc (len);
3307 memcpy (si->data, contents, len);
3311 static struct stack_item *
3312 pop_stack_item (struct stack_item *si)
3314 struct stack_item *dead = si;
3322 /* Return the alignment (in bytes) of the given type. */
3325 arm_type_align (struct type *t)
3331 t = check_typedef (t);
3332 switch (TYPE_CODE (t))
3335 /* Should never happen. */
3336 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3340 case TYPE_CODE_ENUM:
3344 case TYPE_CODE_RANGE:
3346 case TYPE_CODE_CHAR:
3347 case TYPE_CODE_BOOL:
3348 return TYPE_LENGTH (t);
3350 case TYPE_CODE_ARRAY:
3351 if (TYPE_VECTOR (t))
3353 /* Use the natural alignment for vector types (the same for
3354 scalar type), but the maximum alignment is 64-bit. */
3355 if (TYPE_LENGTH (t) > 8)
3358 return TYPE_LENGTH (t);
3361 return arm_type_align (TYPE_TARGET_TYPE (t));
3362 case TYPE_CODE_COMPLEX:
3363 return arm_type_align (TYPE_TARGET_TYPE (t));
3365 case TYPE_CODE_STRUCT:
3366 case TYPE_CODE_UNION:
3368 for (n = 0; n < TYPE_NFIELDS (t); n++)
3370 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3378 /* Possible base types for a candidate for passing and returning in
3381 enum arm_vfp_cprc_base_type
3390 /* The length of one element of base type B. */
3393 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3397 case VFP_CPRC_SINGLE:
3399 case VFP_CPRC_DOUBLE:
3401 case VFP_CPRC_VEC64:
3403 case VFP_CPRC_VEC128:
3406 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3411 /* The character ('s', 'd' or 'q') for the type of VFP register used
3412 for passing base type B. */
3415 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3419 case VFP_CPRC_SINGLE:
3421 case VFP_CPRC_DOUBLE:
3423 case VFP_CPRC_VEC64:
3425 case VFP_CPRC_VEC128:
3428 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3433 /* Determine whether T may be part of a candidate for passing and
3434 returning in VFP registers, ignoring the limit on the total number
3435 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3436 classification of the first valid component found; if it is not
3437 VFP_CPRC_UNKNOWN, all components must have the same classification
3438 as *BASE_TYPE. If it is found that T contains a type not permitted
3439 for passing and returning in VFP registers, a type differently
3440 classified from *BASE_TYPE, or two types differently classified
3441 from each other, return -1, otherwise return the total number of
3442 base-type elements found (possibly 0 in an empty structure or
3443 array). Vector types are not currently supported, matching the
3444 generic AAPCS support. */
3447 arm_vfp_cprc_sub_candidate (struct type *t,
3448 enum arm_vfp_cprc_base_type *base_type)
3450 t = check_typedef (t);
3451 switch (TYPE_CODE (t))
3454 switch (TYPE_LENGTH (t))
3457 if (*base_type == VFP_CPRC_UNKNOWN)
3458 *base_type = VFP_CPRC_SINGLE;
3459 else if (*base_type != VFP_CPRC_SINGLE)
3464 if (*base_type == VFP_CPRC_UNKNOWN)
3465 *base_type = VFP_CPRC_DOUBLE;
3466 else if (*base_type != VFP_CPRC_DOUBLE)
3475 case TYPE_CODE_COMPLEX:
3476 /* Arguments of complex T where T is one of the types float or
3477 double get treated as if they are implemented as:
3486 switch (TYPE_LENGTH (t))
3489 if (*base_type == VFP_CPRC_UNKNOWN)
3490 *base_type = VFP_CPRC_SINGLE;
3491 else if (*base_type != VFP_CPRC_SINGLE)
3496 if (*base_type == VFP_CPRC_UNKNOWN)
3497 *base_type = VFP_CPRC_DOUBLE;
3498 else if (*base_type != VFP_CPRC_DOUBLE)
3507 case TYPE_CODE_ARRAY:
3509 if (TYPE_VECTOR (t))
3511 /* A 64-bit or 128-bit containerized vector type are VFP
3513 switch (TYPE_LENGTH (t))
3516 if (*base_type == VFP_CPRC_UNKNOWN)
3517 *base_type = VFP_CPRC_VEC64;
3520 if (*base_type == VFP_CPRC_UNKNOWN)
3521 *base_type = VFP_CPRC_VEC128;
3532 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3536 if (TYPE_LENGTH (t) == 0)
3538 gdb_assert (count == 0);
3541 else if (count == 0)
3543 unitlen = arm_vfp_cprc_unit_length (*base_type);
3544 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3545 return TYPE_LENGTH (t) / unitlen;
3550 case TYPE_CODE_STRUCT:
3555 for (i = 0; i < TYPE_NFIELDS (t); i++)
3557 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3559 if (sub_count == -1)
3563 if (TYPE_LENGTH (t) == 0)
3565 gdb_assert (count == 0);
3568 else if (count == 0)
3570 unitlen = arm_vfp_cprc_unit_length (*base_type);
3571 if (TYPE_LENGTH (t) != unitlen * count)
3576 case TYPE_CODE_UNION:
3581 for (i = 0; i < TYPE_NFIELDS (t); i++)
3583 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3585 if (sub_count == -1)
3587 count = (count > sub_count ? count : sub_count);
3589 if (TYPE_LENGTH (t) == 0)
3591 gdb_assert (count == 0);
3594 else if (count == 0)
3596 unitlen = arm_vfp_cprc_unit_length (*base_type);
3597 if (TYPE_LENGTH (t) != unitlen * count)
3609 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3610 if passed to or returned from a non-variadic function with the VFP
3611 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3612 *BASE_TYPE to the base type for T and *COUNT to the number of
3613 elements of that base type before returning. */
3616 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3619 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3620 int c = arm_vfp_cprc_sub_candidate (t, &b);
3621 if (c <= 0 || c > 4)
3628 /* Return 1 if the VFP ABI should be used for passing arguments to and
3629 returning values from a function of type FUNC_TYPE, 0
3633 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3635 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3636 /* Variadic functions always use the base ABI. Assume that functions
3637 without debug info are not variadic. */
3638 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3640 /* The VFP ABI is only supported as a variant of AAPCS. */
3641 if (tdep->arm_abi != ARM_ABI_AAPCS)
3643 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3646 /* We currently only support passing parameters in integer registers, which
3647 conforms with GCC's default model, and VFP argument passing following
3648 the VFP variant of AAPCS. Several other variants exist and
3649 we should probably support some of them based on the selected ABI. */
3652 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3653 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3654 struct value **args, CORE_ADDR sp, int struct_return,
3655 CORE_ADDR struct_addr)
3657 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3661 struct stack_item *si = NULL;
3664 unsigned vfp_regs_free = (1 << 16) - 1;
3666 /* Determine the type of this function and whether the VFP ABI
3668 ftype = check_typedef (value_type (function));
3669 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3670 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3671 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3673 /* Set the return address. For the ARM, the return breakpoint is
3674 always at BP_ADDR. */
3675 if (arm_pc_is_thumb (gdbarch, bp_addr))
3677 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3679 /* Walk through the list of args and determine how large a temporary
3680 stack is required. Need to take care here as structs may be
3681 passed on the stack, and we have to push them. */
3684 argreg = ARM_A1_REGNUM;
3687 /* The struct_return pointer occupies the first parameter
3688 passing register. */
3692 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3693 gdbarch_register_name (gdbarch, argreg),
3694 paddress (gdbarch, struct_addr));
3695 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3699 for (argnum = 0; argnum < nargs; argnum++)
3702 struct type *arg_type;
3703 struct type *target_type;
3704 enum type_code typecode;
3705 const bfd_byte *val;
3707 enum arm_vfp_cprc_base_type vfp_base_type;
3709 int may_use_core_reg = 1;
3711 arg_type = check_typedef (value_type (args[argnum]));
3712 len = TYPE_LENGTH (arg_type);
3713 target_type = TYPE_TARGET_TYPE (arg_type);
3714 typecode = TYPE_CODE (arg_type);
3715 val = value_contents (args[argnum]);
3717 align = arm_type_align (arg_type);
3718 /* Round alignment up to a whole number of words. */
3719 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3720 /* Different ABIs have different maximum alignments. */
3721 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3723 /* The APCS ABI only requires word alignment. */
3724 align = INT_REGISTER_SIZE;
3728 /* The AAPCS requires at most doubleword alignment. */
3729 if (align > INT_REGISTER_SIZE * 2)
3730 align = INT_REGISTER_SIZE * 2;
3734 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3742 /* Because this is a CPRC it cannot go in a core register or
3743 cause a core register to be skipped for alignment.
3744 Either it goes in VFP registers and the rest of this loop
3745 iteration is skipped for this argument, or it goes on the
3746 stack (and the stack alignment code is correct for this
3748 may_use_core_reg = 0;
3750 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3751 shift = unit_length / 4;
3752 mask = (1 << (shift * vfp_base_count)) - 1;
3753 for (regno = 0; regno < 16; regno += shift)
3754 if (((vfp_regs_free >> regno) & mask) == mask)
3763 vfp_regs_free &= ~(mask << regno);
3764 reg_scaled = regno / shift;
3765 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3766 for (i = 0; i < vfp_base_count; i++)
3770 if (reg_char == 'q')
3771 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3772 val + i * unit_length);
3775 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3776 reg_char, reg_scaled + i);
3777 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3779 regcache_cooked_write (regcache, regnum,
3780 val + i * unit_length);
3787 /* This CPRC could not go in VFP registers, so all VFP
3788 registers are now marked as used. */
3793 /* Push stack padding for dowubleword alignment. */
3794 if (nstack & (align - 1))
3796 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3797 nstack += INT_REGISTER_SIZE;
3800 /* Doubleword aligned quantities must go in even register pairs. */
3801 if (may_use_core_reg
3802 && argreg <= ARM_LAST_ARG_REGNUM
3803 && align > INT_REGISTER_SIZE
3807 /* If the argument is a pointer to a function, and it is a
3808 Thumb function, create a LOCAL copy of the value and set
3809 the THUMB bit in it. */
3810 if (TYPE_CODE_PTR == typecode
3811 && target_type != NULL
3812 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3814 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3815 if (arm_pc_is_thumb (gdbarch, regval))
3817 bfd_byte *copy = (bfd_byte *) alloca (len);
3818 store_unsigned_integer (copy, len, byte_order,
3819 MAKE_THUMB_ADDR (regval));
3824 /* Copy the argument to general registers or the stack in
3825 register-sized pieces. Large arguments are split between
3826 registers and stack. */
3829 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3831 = extract_unsigned_integer (val, partial_len, byte_order);
3833 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3835 /* The argument is being passed in a general purpose
3837 if (byte_order == BFD_ENDIAN_BIG)
3838 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3840 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3842 gdbarch_register_name
3844 phex (regval, INT_REGISTER_SIZE));
3845 regcache_cooked_write_unsigned (regcache, argreg, regval);
3850 gdb_byte buf[INT_REGISTER_SIZE];
3852 memset (buf, 0, sizeof (buf));
3853 store_unsigned_integer (buf, partial_len, byte_order, regval);
3855 /* Push the arguments onto the stack. */
3857 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3859 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3860 nstack += INT_REGISTER_SIZE;
3867 /* If we have an odd number of words to push, then decrement the stack
3868 by one word now, so first stack argument will be dword aligned. */
3875 write_memory (sp, si->data, si->len);
3876 si = pop_stack_item (si);
3879 /* Finally, update teh SP register. */
3880 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3886 /* Always align the frame to an 8-byte boundary. This is required on
3887 some platforms and harmless on the rest. */
3890 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3892 /* Align the stack to eight bytes. */
3893 return sp & ~ (CORE_ADDR) 7;
3897 print_fpu_flags (struct ui_file *file, int flags)
3899 if (flags & (1 << 0))
3900 fputs_filtered ("IVO ", file);
3901 if (flags & (1 << 1))
3902 fputs_filtered ("DVZ ", file);
3903 if (flags & (1 << 2))
3904 fputs_filtered ("OFL ", file);
3905 if (flags & (1 << 3))
3906 fputs_filtered ("UFL ", file);
3907 if (flags & (1 << 4))
3908 fputs_filtered ("INX ", file);
3909 fputc_filtered ('\n', file);
3912 /* Print interesting information about the floating point processor
3913 (if present) or emulator. */
3915 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3916 struct frame_info *frame, const char *args)
3918 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3921 type = (status >> 24) & 127;
3922 if (status & (1 << 31))
3923 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3925 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3926 /* i18n: [floating point unit] mask */
3927 fputs_filtered (_("mask: "), file);
3928 print_fpu_flags (file, status >> 16);
3929 /* i18n: [floating point unit] flags */
3930 fputs_filtered (_("flags: "), file);
3931 print_fpu_flags (file, status);
3934 /* Construct the ARM extended floating point type. */
3935 static struct type *
3936 arm_ext_type (struct gdbarch *gdbarch)
3938 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3940 if (!tdep->arm_ext_type)
3942 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3943 floatformats_arm_ext);
3945 return tdep->arm_ext_type;
3948 static struct type *
3949 arm_neon_double_type (struct gdbarch *gdbarch)
3951 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3953 if (tdep->neon_double_type == NULL)
3955 struct type *t, *elem;
3957 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3959 elem = builtin_type (gdbarch)->builtin_uint8;
3960 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3961 elem = builtin_type (gdbarch)->builtin_uint16;
3962 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3963 elem = builtin_type (gdbarch)->builtin_uint32;
3964 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3965 elem = builtin_type (gdbarch)->builtin_uint64;
3966 append_composite_type_field (t, "u64", elem);
3967 elem = builtin_type (gdbarch)->builtin_float;
3968 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_double;
3970 append_composite_type_field (t, "f64", elem);
3972 TYPE_VECTOR (t) = 1;
3973 TYPE_NAME (t) = "neon_d";
3974 tdep->neon_double_type = t;
3977 return tdep->neon_double_type;
3980 /* FIXME: The vector types are not correctly ordered on big-endian
3981 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3982 bits of d0 - regardless of what unit size is being held in d0. So
3983 the offset of the first uint8 in d0 is 7, but the offset of the
3984 first float is 4. This code works as-is for little-endian
3987 static struct type *
3988 arm_neon_quad_type (struct gdbarch *gdbarch)
3990 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3992 if (tdep->neon_quad_type == NULL)
3994 struct type *t, *elem;
3996 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3998 elem = builtin_type (gdbarch)->builtin_uint8;
3999 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4000 elem = builtin_type (gdbarch)->builtin_uint16;
4001 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4002 elem = builtin_type (gdbarch)->builtin_uint32;
4003 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4004 elem = builtin_type (gdbarch)->builtin_uint64;
4005 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4006 elem = builtin_type (gdbarch)->builtin_float;
4007 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4008 elem = builtin_type (gdbarch)->builtin_double;
4009 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4011 TYPE_VECTOR (t) = 1;
4012 TYPE_NAME (t) = "neon_q";
4013 tdep->neon_quad_type = t;
4016 return tdep->neon_quad_type;
4019 /* Return the GDB type object for the "standard" data type of data in
4022 static struct type *
4023 arm_register_type (struct gdbarch *gdbarch, int regnum)
4025 int num_regs = gdbarch_num_regs (gdbarch);
4027 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4028 && regnum >= num_regs && regnum < num_regs + 32)
4029 return builtin_type (gdbarch)->builtin_float;
4031 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4032 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4033 return arm_neon_quad_type (gdbarch);
4035 /* If the target description has register information, we are only
4036 in this function so that we can override the types of
4037 double-precision registers for NEON. */
4038 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4040 struct type *t = tdesc_register_type (gdbarch, regnum);
4042 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4043 && TYPE_CODE (t) == TYPE_CODE_FLT
4044 && gdbarch_tdep (gdbarch)->have_neon)
4045 return arm_neon_double_type (gdbarch);
4050 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4052 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4053 return builtin_type (gdbarch)->builtin_void;
4055 return arm_ext_type (gdbarch);
4057 else if (regnum == ARM_SP_REGNUM)
4058 return builtin_type (gdbarch)->builtin_data_ptr;
4059 else if (regnum == ARM_PC_REGNUM)
4060 return builtin_type (gdbarch)->builtin_func_ptr;
4061 else if (regnum >= ARRAY_SIZE (arm_register_names))
4062 /* These registers are only supported on targets which supply
4063 an XML description. */
4064 return builtin_type (gdbarch)->builtin_int0;
4066 return builtin_type (gdbarch)->builtin_uint32;
4069 /* Map a DWARF register REGNUM onto the appropriate GDB register
4073 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4075 /* Core integer regs. */
4076 if (reg >= 0 && reg <= 15)
4079 /* Legacy FPA encoding. These were once used in a way which
4080 overlapped with VFP register numbering, so their use is
4081 discouraged, but GDB doesn't support the ARM toolchain
4082 which used them for VFP. */
4083 if (reg >= 16 && reg <= 23)
4084 return ARM_F0_REGNUM + reg - 16;
4086 /* New assignments for the FPA registers. */
4087 if (reg >= 96 && reg <= 103)
4088 return ARM_F0_REGNUM + reg - 96;
4090 /* WMMX register assignments. */
4091 if (reg >= 104 && reg <= 111)
4092 return ARM_WCGR0_REGNUM + reg - 104;
4094 if (reg >= 112 && reg <= 127)
4095 return ARM_WR0_REGNUM + reg - 112;
4097 if (reg >= 192 && reg <= 199)
4098 return ARM_WC0_REGNUM + reg - 192;
4100 /* VFP v2 registers. A double precision value is actually
4101 in d1 rather than s2, but the ABI only defines numbering
4102 for the single precision registers. This will "just work"
4103 in GDB for little endian targets (we'll read eight bytes,
4104 starting in s0 and then progressing to s1), but will be
4105 reversed on big endian targets with VFP. This won't
4106 be a problem for the new Neon quad registers; you're supposed
4107 to use DW_OP_piece for those. */
4108 if (reg >= 64 && reg <= 95)
4112 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4113 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4117 /* VFP v3 / Neon registers. This range is also used for VFP v2
4118 registers, except that it now describes d0 instead of s0. */
4119 if (reg >= 256 && reg <= 287)
4123 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4124 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4131 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4133 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4136 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4138 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4139 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4141 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4142 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4144 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4145 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4147 if (reg < NUM_GREGS)
4148 return SIM_ARM_R0_REGNUM + reg;
4151 if (reg < NUM_FREGS)
4152 return SIM_ARM_FP0_REGNUM + reg;
4155 if (reg < NUM_SREGS)
4156 return SIM_ARM_FPS_REGNUM + reg;
4159 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4162 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4163 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4164 It is thought that this is is the floating-point register format on
4165 little-endian systems. */
4168 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4169 void *dbl, int endianess)
4173 if (endianess == BFD_ENDIAN_BIG)
4174 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4176 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4178 floatformat_from_doublest (fmt, &d, dbl);
4182 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4187 floatformat_to_doublest (fmt, ptr, &d);
4188 if (endianess == BFD_ENDIAN_BIG)
4189 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4191 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4195 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4196 of the appropriate mode (as encoded in the PC value), even if this
4197 differs from what would be expected according to the symbol tables. */
4200 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4201 struct address_space *aspace,
4204 struct cleanup *old_chain
4205 = make_cleanup_restore_integer (&arm_override_mode);
4207 arm_override_mode = IS_THUMB_ADDR (pc);
4208 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4210 insert_single_step_breakpoint (gdbarch, aspace, pc);
4212 do_cleanups (old_chain);
4215 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4216 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4217 NULL if an error occurs. BUF is freed. */
4220 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4221 int old_len, int new_len)
4224 int bytes_to_read = new_len - old_len;
4226 new_buf = (gdb_byte *) xmalloc (new_len);
4227 memcpy (new_buf + bytes_to_read, buf, old_len);
4229 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4237 /* An IT block is at most the 2-byte IT instruction followed by
4238 four 4-byte instructions. The furthest back we must search to
4239 find an IT block that affects the current instruction is thus
4240 2 + 3 * 4 == 14 bytes. */
4241 #define MAX_IT_BLOCK_PREFIX 14
4243 /* Use a quick scan if there are more than this many bytes of
4245 #define IT_SCAN_THRESHOLD 32
4247 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4248 A breakpoint in an IT block may not be hit, depending on the
4251 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4255 CORE_ADDR boundary, func_start;
4257 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4258 int i, any, last_it, last_it_count;
4260 /* If we are using BKPT breakpoints, none of this is necessary. */
4261 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4264 /* ARM mode does not have this problem. */
4265 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4268 /* We are setting a breakpoint in Thumb code that could potentially
4269 contain an IT block. The first step is to find how much Thumb
4270 code there is; we do not need to read outside of known Thumb
4272 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4274 /* Thumb-2 code must have mapping symbols to have a chance. */
4277 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4279 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4280 && func_start > boundary)
4281 boundary = func_start;
4283 /* Search for a candidate IT instruction. We have to do some fancy
4284 footwork to distinguish a real IT instruction from the second
4285 half of a 32-bit instruction, but there is no need for that if
4286 there's no candidate. */
4287 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4289 /* No room for an IT instruction. */
4292 buf = (gdb_byte *) xmalloc (buf_len);
4293 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4296 for (i = 0; i < buf_len; i += 2)
4298 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4299 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4312 /* OK, the code bytes before this instruction contain at least one
4313 halfword which resembles an IT instruction. We know that it's
4314 Thumb code, but there are still two possibilities. Either the
4315 halfword really is an IT instruction, or it is the second half of
4316 a 32-bit Thumb instruction. The only way we can tell is to
4317 scan forwards from a known instruction boundary. */
4318 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4322 /* There's a lot of code before this instruction. Start with an
4323 optimistic search; it's easy to recognize halfwords that can
4324 not be the start of a 32-bit instruction, and use that to
4325 lock on to the instruction boundaries. */
4326 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4329 buf_len = IT_SCAN_THRESHOLD;
4332 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4334 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4335 if (thumb_insn_size (inst1) == 2)
4342 /* At this point, if DEFINITE, BUF[I] is the first place we
4343 are sure that we know the instruction boundaries, and it is far
4344 enough from BPADDR that we could not miss an IT instruction
4345 affecting BPADDR. If ! DEFINITE, give up - start from a
4349 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4353 buf_len = bpaddr - boundary;
4359 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4362 buf_len = bpaddr - boundary;
4366 /* Scan forwards. Find the last IT instruction before BPADDR. */
4371 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4373 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4378 else if (inst1 & 0x0002)
4380 else if (inst1 & 0x0004)
4385 i += thumb_insn_size (inst1);
4391 /* There wasn't really an IT instruction after all. */
4394 if (last_it_count < 1)
4395 /* It was too far away. */
4398 /* This really is a trouble spot. Move the breakpoint to the IT
4400 return bpaddr - buf_len + last_it;
4403 /* ARM displaced stepping support.
4405 Generally ARM displaced stepping works as follows:
4407 1. When an instruction is to be single-stepped, it is first decoded by
4408 arm_process_displaced_insn. Depending on the type of instruction, it is
4409 then copied to a scratch location, possibly in a modified form. The
4410 copy_* set of functions performs such modification, as necessary. A
4411 breakpoint is placed after the modified instruction in the scratch space
4412 to return control to GDB. Note in particular that instructions which
4413 modify the PC will no longer do so after modification.
4415 2. The instruction is single-stepped, by setting the PC to the scratch
4416 location address, and resuming. Control returns to GDB when the
4419 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4420 function used for the current instruction. This function's job is to
4421 put the CPU/memory state back to what it would have been if the
4422 instruction had been executed unmodified in its original location. */
4424 /* NOP instruction (mov r0, r0). */
4425 #define ARM_NOP 0xe1a00000
4426 #define THUMB_NOP 0x4600
4428 /* Helper for register reads for displaced stepping. In particular, this
4429 returns the PC as it would be seen by the instruction at its original
4433 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4437 CORE_ADDR from = dsc->insn_addr;
4439 if (regno == ARM_PC_REGNUM)
4441 /* Compute pipeline offset:
4442 - When executing an ARM instruction, PC reads as the address of the
4443 current instruction plus 8.
4444 - When executing a Thumb instruction, PC reads as the address of the
4445 current instruction plus 4. */
4452 if (debug_displaced)
4453 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4454 (unsigned long) from);
4455 return (ULONGEST) from;
4459 regcache_cooked_read_unsigned (regs, regno, &ret);
4460 if (debug_displaced)
4461 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4462 regno, (unsigned long) ret);
4468 displaced_in_arm_mode (struct regcache *regs)
4471 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4473 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4475 return (ps & t_bit) == 0;
4478 /* Write to the PC as from a branch instruction. */
4481 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4485 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4486 architecture versions < 6. */
4487 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4488 val & ~(ULONGEST) 0x3);
4490 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4491 val & ~(ULONGEST) 0x1);
4494 /* Write to the PC as from a branch-exchange instruction. */
4497 bx_write_pc (struct regcache *regs, ULONGEST val)
4500 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4502 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4506 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4507 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4509 else if ((val & 2) == 0)
4511 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4512 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4516 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4517 mode, align dest to 4 bytes). */
4518 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4519 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4520 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4524 /* Write to the PC as if from a load instruction. */
4527 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4530 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4531 bx_write_pc (regs, val);
4533 branch_write_pc (regs, dsc, val);
4536 /* Write to the PC as if from an ALU instruction. */
4539 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4542 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4543 bx_write_pc (regs, val);
4545 branch_write_pc (regs, dsc, val);
4548 /* Helper for writing to registers for displaced stepping. Writing to the PC
4549 has a varying effects depending on the instruction which does the write:
4550 this is controlled by the WRITE_PC argument. */
4553 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4554 int regno, ULONGEST val, enum pc_write_style write_pc)
4556 if (regno == ARM_PC_REGNUM)
4558 if (debug_displaced)
4559 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4560 (unsigned long) val);
4563 case BRANCH_WRITE_PC:
4564 branch_write_pc (regs, dsc, val);
4568 bx_write_pc (regs, val);
4572 load_write_pc (regs, dsc, val);
4576 alu_write_pc (regs, dsc, val);
4579 case CANNOT_WRITE_PC:
4580 warning (_("Instruction wrote to PC in an unexpected way when "
4581 "single-stepping"));
4585 internal_error (__FILE__, __LINE__,
4586 _("Invalid argument to displaced_write_reg"));
4589 dsc->wrote_to_pc = 1;
4593 if (debug_displaced)
4594 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4595 regno, (unsigned long) val);
4596 regcache_cooked_write_unsigned (regs, regno, val);
4600 /* This function is used to concisely determine if an instruction INSN
4601 references PC. Register fields of interest in INSN should have the
4602 corresponding fields of BITMASK set to 0b1111. The function
4603 returns return 1 if any of these fields in INSN reference the PC
4604 (also 0b1111, r15), else it returns 0. */
4607 insn_references_pc (uint32_t insn, uint32_t bitmask)
4609 uint32_t lowbit = 1;
4611 while (bitmask != 0)
4615 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4621 mask = lowbit * 0xf;
4623 if ((insn & mask) == mask)
4632 /* The simplest copy function. Many instructions have the same effect no
4633 matter what address they are executed at: in those cases, use this. */
4636 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4637 const char *iname, struct displaced_step_closure *dsc)
4639 if (debug_displaced)
4640 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4641 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4644 dsc->modinsn[0] = insn;
4650 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4651 uint16_t insn2, const char *iname,
4652 struct displaced_step_closure *dsc)
4654 if (debug_displaced)
4655 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4656 "opcode/class '%s' unmodified\n", insn1, insn2,
4659 dsc->modinsn[0] = insn1;
4660 dsc->modinsn[1] = insn2;
4666 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4669 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4671 struct displaced_step_closure *dsc)
4673 if (debug_displaced)
4674 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4675 "opcode/class '%s' unmodified\n", insn,
4678 dsc->modinsn[0] = insn;
4683 /* Preload instructions with immediate offset. */
4686 cleanup_preload (struct gdbarch *gdbarch,
4687 struct regcache *regs, struct displaced_step_closure *dsc)
4689 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4690 if (!dsc->u.preload.immed)
4691 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4695 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4696 struct displaced_step_closure *dsc, unsigned int rn)
4699 /* Preload instructions:
4701 {pli/pld} [rn, #+/-imm]
4703 {pli/pld} [r0, #+/-imm]. */
4705 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4706 rn_val = displaced_read_reg (regs, dsc, rn);
4707 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4708 dsc->u.preload.immed = 1;
4710 dsc->cleanup = &cleanup_preload;
4714 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4715 struct displaced_step_closure *dsc)
4717 unsigned int rn = bits (insn, 16, 19);
4719 if (!insn_references_pc (insn, 0x000f0000ul))
4720 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4722 if (debug_displaced)
4723 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4724 (unsigned long) insn);
4726 dsc->modinsn[0] = insn & 0xfff0ffff;
4728 install_preload (gdbarch, regs, dsc, rn);
4734 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4735 struct regcache *regs, struct displaced_step_closure *dsc)
4737 unsigned int rn = bits (insn1, 0, 3);
4738 unsigned int u_bit = bit (insn1, 7);
4739 int imm12 = bits (insn2, 0, 11);
4742 if (rn != ARM_PC_REGNUM)
4743 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4745 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4746 PLD (literal) Encoding T1. */
4747 if (debug_displaced)
4748 fprintf_unfiltered (gdb_stdlog,
4749 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4750 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4756 /* Rewrite instruction {pli/pld} PC imm12 into:
4757 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4761 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4763 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4764 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4766 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4768 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4769 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4770 dsc->u.preload.immed = 0;
4772 /* {pli/pld} [r0, r1] */
4773 dsc->modinsn[0] = insn1 & 0xfff0;
4774 dsc->modinsn[1] = 0xf001;
4777 dsc->cleanup = &cleanup_preload;
4781 /* Preload instructions with register offset. */
4784 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4785 struct displaced_step_closure *dsc, unsigned int rn,
4788 ULONGEST rn_val, rm_val;
4790 /* Preload register-offset instructions:
4792 {pli/pld} [rn, rm {, shift}]
4794 {pli/pld} [r0, r1 {, shift}]. */
4796 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4797 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4798 rn_val = displaced_read_reg (regs, dsc, rn);
4799 rm_val = displaced_read_reg (regs, dsc, rm);
4800 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4801 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4802 dsc->u.preload.immed = 0;
4804 dsc->cleanup = &cleanup_preload;
4808 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4809 struct regcache *regs,
4810 struct displaced_step_closure *dsc)
4812 unsigned int rn = bits (insn, 16, 19);
4813 unsigned int rm = bits (insn, 0, 3);
4816 if (!insn_references_pc (insn, 0x000f000ful))
4817 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4819 if (debug_displaced)
4820 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4821 (unsigned long) insn);
4823 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4825 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4829 /* Copy/cleanup coprocessor load and store instructions. */
4832 cleanup_copro_load_store (struct gdbarch *gdbarch,
4833 struct regcache *regs,
4834 struct displaced_step_closure *dsc)
4836 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4838 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4840 if (dsc->u.ldst.writeback)
4841 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4845 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4846 struct displaced_step_closure *dsc,
4847 int writeback, unsigned int rn)
4851 /* Coprocessor load/store instructions:
4853 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4855 {stc/stc2} [r0, #+/-imm].
4857 ldc/ldc2 are handled identically. */
4859 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4860 rn_val = displaced_read_reg (regs, dsc, rn);
4861 /* PC should be 4-byte aligned. */
4862 rn_val = rn_val & 0xfffffffc;
4863 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4865 dsc->u.ldst.writeback = writeback;
4866 dsc->u.ldst.rn = rn;
4868 dsc->cleanup = &cleanup_copro_load_store;
4872 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4873 struct regcache *regs,
4874 struct displaced_step_closure *dsc)
4876 unsigned int rn = bits (insn, 16, 19);
4878 if (!insn_references_pc (insn, 0x000f0000ul))
4879 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4881 if (debug_displaced)
4882 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4883 "load/store insn %.8lx\n", (unsigned long) insn);
4885 dsc->modinsn[0] = insn & 0xfff0ffff;
4887 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4893 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4894 uint16_t insn2, struct regcache *regs,
4895 struct displaced_step_closure *dsc)
4897 unsigned int rn = bits (insn1, 0, 3);
4899 if (rn != ARM_PC_REGNUM)
4900 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4901 "copro load/store", dsc);
4903 if (debug_displaced)
4904 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4905 "load/store insn %.4x%.4x\n", insn1, insn2);
4907 dsc->modinsn[0] = insn1 & 0xfff0;
4908 dsc->modinsn[1] = insn2;
4911 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4912 doesn't support writeback, so pass 0. */
4913 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4918 /* Clean up branch instructions (actually perform the branch, by setting
4922 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4923 struct displaced_step_closure *dsc)
4925 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4926 int branch_taken = condition_true (dsc->u.branch.cond, status);
4927 enum pc_write_style write_pc = dsc->u.branch.exchange
4928 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4933 if (dsc->u.branch.link)
4935 /* The value of LR should be the next insn of current one. In order
4936 not to confuse logic hanlding later insn `bx lr', if current insn mode
4937 is Thumb, the bit 0 of LR value should be set to 1. */
4938 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4941 next_insn_addr |= 0x1;
4943 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4947 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4950 /* Copy B/BL/BLX instructions with immediate destinations. */
4953 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4954 struct displaced_step_closure *dsc,
4955 unsigned int cond, int exchange, int link, long offset)
4957 /* Implement "BL<cond> <label>" as:
4959 Preparation: cond <- instruction condition
4960 Insn: mov r0, r0 (nop)
4961 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4963 B<cond> similar, but don't set r14 in cleanup. */
4965 dsc->u.branch.cond = cond;
4966 dsc->u.branch.link = link;
4967 dsc->u.branch.exchange = exchange;
4969 dsc->u.branch.dest = dsc->insn_addr;
4970 if (link && exchange)
4971 /* For BLX, offset is computed from the Align (PC, 4). */
4972 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4975 dsc->u.branch.dest += 4 + offset;
4977 dsc->u.branch.dest += 8 + offset;
4979 dsc->cleanup = &cleanup_branch;
4982 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4983 struct regcache *regs, struct displaced_step_closure *dsc)
4985 unsigned int cond = bits (insn, 28, 31);
4986 int exchange = (cond == 0xf);
4987 int link = exchange || bit (insn, 24);
4990 if (debug_displaced)
4991 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4992 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4993 (unsigned long) insn);
4995 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4996 then arrange the switch into Thumb mode. */
4997 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4999 offset = bits (insn, 0, 23) << 2;
5001 if (bit (offset, 25))
5002 offset = offset | ~0x3ffffff;
5004 dsc->modinsn[0] = ARM_NOP;
5006 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5011 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5012 uint16_t insn2, struct regcache *regs,
5013 struct displaced_step_closure *dsc)
5015 int link = bit (insn2, 14);
5016 int exchange = link && !bit (insn2, 12);
5019 int j1 = bit (insn2, 13);
5020 int j2 = bit (insn2, 11);
5021 int s = sbits (insn1, 10, 10);
5022 int i1 = !(j1 ^ bit (insn1, 10));
5023 int i2 = !(j2 ^ bit (insn1, 10));
5025 if (!link && !exchange) /* B */
5027 offset = (bits (insn2, 0, 10) << 1);
5028 if (bit (insn2, 12)) /* Encoding T4 */
5030 offset |= (bits (insn1, 0, 9) << 12)
5036 else /* Encoding T3 */
5038 offset |= (bits (insn1, 0, 5) << 12)
5042 cond = bits (insn1, 6, 9);
5047 offset = (bits (insn1, 0, 9) << 12);
5048 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5049 offset |= exchange ?
5050 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5053 if (debug_displaced)
5054 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5055 "%.4x %.4x with offset %.8lx\n",
5056 link ? (exchange) ? "blx" : "bl" : "b",
5057 insn1, insn2, offset);
5059 dsc->modinsn[0] = THUMB_NOP;
5061 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5065 /* Copy B Thumb instructions. */
5067 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5068 struct displaced_step_closure *dsc)
5070 unsigned int cond = 0;
5072 unsigned short bit_12_15 = bits (insn, 12, 15);
5073 CORE_ADDR from = dsc->insn_addr;
5075 if (bit_12_15 == 0xd)
5077 /* offset = SignExtend (imm8:0, 32) */
5078 offset = sbits ((insn << 1), 0, 8);
5079 cond = bits (insn, 8, 11);
5081 else if (bit_12_15 == 0xe) /* Encoding T2 */
5083 offset = sbits ((insn << 1), 0, 11);
5087 if (debug_displaced)
5088 fprintf_unfiltered (gdb_stdlog,
5089 "displaced: copying b immediate insn %.4x "
5090 "with offset %d\n", insn, offset);
5092 dsc->u.branch.cond = cond;
5093 dsc->u.branch.link = 0;
5094 dsc->u.branch.exchange = 0;
5095 dsc->u.branch.dest = from + 4 + offset;
5097 dsc->modinsn[0] = THUMB_NOP;
5099 dsc->cleanup = &cleanup_branch;
5104 /* Copy BX/BLX with register-specified destinations. */
5107 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5108 struct displaced_step_closure *dsc, int link,
5109 unsigned int cond, unsigned int rm)
5111 /* Implement {BX,BLX}<cond> <reg>" as:
5113 Preparation: cond <- instruction condition
5114 Insn: mov r0, r0 (nop)
5115 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5117 Don't set r14 in cleanup for BX. */
5119 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5121 dsc->u.branch.cond = cond;
5122 dsc->u.branch.link = link;
5124 dsc->u.branch.exchange = 1;
5126 dsc->cleanup = &cleanup_branch;
5130 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5131 struct regcache *regs, struct displaced_step_closure *dsc)
5133 unsigned int cond = bits (insn, 28, 31);
5136 int link = bit (insn, 5);
5137 unsigned int rm = bits (insn, 0, 3);
5139 if (debug_displaced)
5140 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5141 (unsigned long) insn);
5143 dsc->modinsn[0] = ARM_NOP;
5145 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5150 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5151 struct regcache *regs,
5152 struct displaced_step_closure *dsc)
5154 int link = bit (insn, 7);
5155 unsigned int rm = bits (insn, 3, 6);
5157 if (debug_displaced)
5158 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5159 (unsigned short) insn);
5161 dsc->modinsn[0] = THUMB_NOP;
5163 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5169 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5172 cleanup_alu_imm (struct gdbarch *gdbarch,
5173 struct regcache *regs, struct displaced_step_closure *dsc)
5175 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5176 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5177 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5178 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5182 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5183 struct displaced_step_closure *dsc)
5185 unsigned int rn = bits (insn, 16, 19);
5186 unsigned int rd = bits (insn, 12, 15);
5187 unsigned int op = bits (insn, 21, 24);
5188 int is_mov = (op == 0xd);
5189 ULONGEST rd_val, rn_val;
5191 if (!insn_references_pc (insn, 0x000ff000ul))
5192 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5194 if (debug_displaced)
5195 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5196 "%.8lx\n", is_mov ? "move" : "ALU",
5197 (unsigned long) insn);
5199 /* Instruction is of form:
5201 <op><cond> rd, [rn,] #imm
5205 Preparation: tmp1, tmp2 <- r0, r1;
5207 Insn: <op><cond> r0, r1, #imm
5208 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5211 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5212 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5213 rn_val = displaced_read_reg (regs, dsc, rn);
5214 rd_val = displaced_read_reg (regs, dsc, rd);
5215 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5216 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5220 dsc->modinsn[0] = insn & 0xfff00fff;
5222 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5224 dsc->cleanup = &cleanup_alu_imm;
5230 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5231 uint16_t insn2, struct regcache *regs,
5232 struct displaced_step_closure *dsc)
5234 unsigned int op = bits (insn1, 5, 8);
5235 unsigned int rn, rm, rd;
5236 ULONGEST rd_val, rn_val;
5238 rn = bits (insn1, 0, 3); /* Rn */
5239 rm = bits (insn2, 0, 3); /* Rm */
5240 rd = bits (insn2, 8, 11); /* Rd */
5242 /* This routine is only called for instruction MOV. */
5243 gdb_assert (op == 0x2 && rn == 0xf);
5245 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5246 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5248 if (debug_displaced)
5249 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5250 "ALU", insn1, insn2);
5252 /* Instruction is of form:
5254 <op><cond> rd, [rn,] #imm
5258 Preparation: tmp1, tmp2 <- r0, r1;
5260 Insn: <op><cond> r0, r1, #imm
5261 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5264 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5265 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5266 rn_val = displaced_read_reg (regs, dsc, rn);
5267 rd_val = displaced_read_reg (regs, dsc, rd);
5268 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5269 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5272 dsc->modinsn[0] = insn1;
5273 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5276 dsc->cleanup = &cleanup_alu_imm;
5281 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5284 cleanup_alu_reg (struct gdbarch *gdbarch,
5285 struct regcache *regs, struct displaced_step_closure *dsc)
5290 rd_val = displaced_read_reg (regs, dsc, 0);
5292 for (i = 0; i < 3; i++)
5293 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5295 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5299 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5300 struct displaced_step_closure *dsc,
5301 unsigned int rd, unsigned int rn, unsigned int rm)
5303 ULONGEST rd_val, rn_val, rm_val;
5305 /* Instruction is of form:
5307 <op><cond> rd, [rn,] rm [, <shift>]
5311 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5312 r0, r1, r2 <- rd, rn, rm
5313 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5314 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5317 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5318 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5319 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5320 rd_val = displaced_read_reg (regs, dsc, rd);
5321 rn_val = displaced_read_reg (regs, dsc, rn);
5322 rm_val = displaced_read_reg (regs, dsc, rm);
5323 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5328 dsc->cleanup = &cleanup_alu_reg;
5332 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5333 struct displaced_step_closure *dsc)
5335 unsigned int op = bits (insn, 21, 24);
5336 int is_mov = (op == 0xd);
5338 if (!insn_references_pc (insn, 0x000ff00ful))
5339 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5341 if (debug_displaced)
5342 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5343 is_mov ? "move" : "ALU", (unsigned long) insn);
5346 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5348 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5350 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5356 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5357 struct regcache *regs,
5358 struct displaced_step_closure *dsc)
5362 rm = bits (insn, 3, 6);
5363 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5365 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5366 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5368 if (debug_displaced)
5369 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5370 (unsigned short) insn);
5372 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5374 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5379 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5382 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5383 struct regcache *regs,
5384 struct displaced_step_closure *dsc)
5386 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5389 for (i = 0; i < 4; i++)
5390 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5392 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5396 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5397 struct displaced_step_closure *dsc,
5398 unsigned int rd, unsigned int rn, unsigned int rm,
5402 ULONGEST rd_val, rn_val, rm_val, rs_val;
5404 /* Instruction is of form:
5406 <op><cond> rd, [rn,] rm, <shift> rs
5410 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5411 r0, r1, r2, r3 <- rd, rn, rm, rs
5412 Insn: <op><cond> r0, r1, r2, <shift> r3
5414 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5418 for (i = 0; i < 4; i++)
5419 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5421 rd_val = displaced_read_reg (regs, dsc, rd);
5422 rn_val = displaced_read_reg (regs, dsc, rn);
5423 rm_val = displaced_read_reg (regs, dsc, rm);
5424 rs_val = displaced_read_reg (regs, dsc, rs);
5425 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5426 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5427 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5428 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5430 dsc->cleanup = &cleanup_alu_shifted_reg;
5434 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5435 struct regcache *regs,
5436 struct displaced_step_closure *dsc)
5438 unsigned int op = bits (insn, 21, 24);
5439 int is_mov = (op == 0xd);
5440 unsigned int rd, rn, rm, rs;
5442 if (!insn_references_pc (insn, 0x000fff0ful))
5443 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5445 if (debug_displaced)
5446 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5447 "%.8lx\n", is_mov ? "move" : "ALU",
5448 (unsigned long) insn);
5450 rn = bits (insn, 16, 19);
5451 rm = bits (insn, 0, 3);
5452 rs = bits (insn, 8, 11);
5453 rd = bits (insn, 12, 15);
5456 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5458 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5460 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5465 /* Clean up load instructions. */
5468 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5469 struct displaced_step_closure *dsc)
5471 ULONGEST rt_val, rt_val2 = 0, rn_val;
5473 rt_val = displaced_read_reg (regs, dsc, 0);
5474 if (dsc->u.ldst.xfersize == 8)
5475 rt_val2 = displaced_read_reg (regs, dsc, 1);
5476 rn_val = displaced_read_reg (regs, dsc, 2);
5478 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5479 if (dsc->u.ldst.xfersize > 4)
5480 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5481 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5482 if (!dsc->u.ldst.immed)
5483 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5485 /* Handle register writeback. */
5486 if (dsc->u.ldst.writeback)
5487 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5488 /* Put result in right place. */
5489 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5490 if (dsc->u.ldst.xfersize == 8)
5491 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5494 /* Clean up store instructions. */
5497 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5498 struct displaced_step_closure *dsc)
5500 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5502 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5503 if (dsc->u.ldst.xfersize > 4)
5504 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5505 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5506 if (!dsc->u.ldst.immed)
5507 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5508 if (!dsc->u.ldst.restore_r4)
5509 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5512 if (dsc->u.ldst.writeback)
5513 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5516 /* Copy "extra" load/store instructions. These are halfword/doubleword
5517 transfers, which have a different encoding to byte/word transfers. */
5520 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5521 struct regcache *regs, struct displaced_step_closure *dsc)
5523 unsigned int op1 = bits (insn, 20, 24);
5524 unsigned int op2 = bits (insn, 5, 6);
5525 unsigned int rt = bits (insn, 12, 15);
5526 unsigned int rn = bits (insn, 16, 19);
5527 unsigned int rm = bits (insn, 0, 3);
5528 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5529 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5530 int immed = (op1 & 0x4) != 0;
5532 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5534 if (!insn_references_pc (insn, 0x000ff00ful))
5535 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5537 if (debug_displaced)
5538 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5539 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5540 (unsigned long) insn);
5542 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5545 internal_error (__FILE__, __LINE__,
5546 _("copy_extra_ld_st: instruction decode error"));
5548 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5549 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5550 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5552 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5554 rt_val = displaced_read_reg (regs, dsc, rt);
5555 if (bytesize[opcode] == 8)
5556 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5557 rn_val = displaced_read_reg (regs, dsc, rn);
5559 rm_val = displaced_read_reg (regs, dsc, rm);
5561 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5562 if (bytesize[opcode] == 8)
5563 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5564 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5566 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5569 dsc->u.ldst.xfersize = bytesize[opcode];
5570 dsc->u.ldst.rn = rn;
5571 dsc->u.ldst.immed = immed;
5572 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5573 dsc->u.ldst.restore_r4 = 0;
5576 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5578 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5579 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5581 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5583 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5584 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5586 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5591 /* Copy byte/half word/word loads and stores. */
5594 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5595 struct displaced_step_closure *dsc, int load,
5596 int immed, int writeback, int size, int usermode,
5597 int rt, int rm, int rn)
5599 ULONGEST rt_val, rn_val, rm_val = 0;
5601 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5602 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5604 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5606 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5608 rt_val = displaced_read_reg (regs, dsc, rt);
5609 rn_val = displaced_read_reg (regs, dsc, rn);
5611 rm_val = displaced_read_reg (regs, dsc, rm);
5613 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5614 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5616 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5618 dsc->u.ldst.xfersize = size;
5619 dsc->u.ldst.rn = rn;
5620 dsc->u.ldst.immed = immed;
5621 dsc->u.ldst.writeback = writeback;
5623 /* To write PC we can do:
5625 Before this sequence of instructions:
5626 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5627 r2 is the Rn value got from dispalced_read_reg.
5629 Insn1: push {pc} Write address of STR instruction + offset on stack
5630 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5631 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5632 = addr(Insn1) + offset - addr(Insn3) - 8
5634 Insn4: add r4, r4, #8 r4 = offset - 8
5635 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5637 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5639 Otherwise we don't know what value to write for PC, since the offset is
5640 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5641 of this can be found in Section "Saving from r15" in
5642 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5644 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5649 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5650 uint16_t insn2, struct regcache *regs,
5651 struct displaced_step_closure *dsc, int size)
5653 unsigned int u_bit = bit (insn1, 7);
5654 unsigned int rt = bits (insn2, 12, 15);
5655 int imm12 = bits (insn2, 0, 11);
5658 if (debug_displaced)
5659 fprintf_unfiltered (gdb_stdlog,
5660 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5661 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5667 /* Rewrite instruction LDR Rt imm12 into:
5669 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5673 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5676 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5677 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5678 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5680 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5682 pc_val = pc_val & 0xfffffffc;
5684 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5685 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5689 dsc->u.ldst.xfersize = size;
5690 dsc->u.ldst.immed = 0;
5691 dsc->u.ldst.writeback = 0;
5692 dsc->u.ldst.restore_r4 = 0;
5694 /* LDR R0, R2, R3 */
5695 dsc->modinsn[0] = 0xf852;
5696 dsc->modinsn[1] = 0x3;
5699 dsc->cleanup = &cleanup_load;
5705 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5706 uint16_t insn2, struct regcache *regs,
5707 struct displaced_step_closure *dsc,
5708 int writeback, int immed)
5710 unsigned int rt = bits (insn2, 12, 15);
5711 unsigned int rn = bits (insn1, 0, 3);
5712 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5713 /* In LDR (register), there is also a register Rm, which is not allowed to
5714 be PC, so we don't have to check it. */
5716 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5717 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5720 if (debug_displaced)
5721 fprintf_unfiltered (gdb_stdlog,
5722 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5723 rt, rn, insn1, insn2);
5725 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5728 dsc->u.ldst.restore_r4 = 0;
5731 /* ldr[b]<cond> rt, [rn, #imm], etc.
5733 ldr[b]<cond> r0, [r2, #imm]. */
5735 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5736 dsc->modinsn[1] = insn2 & 0x0fff;
5739 /* ldr[b]<cond> rt, [rn, rm], etc.
5741 ldr[b]<cond> r0, [r2, r3]. */
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5754 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5755 struct regcache *regs,
5756 struct displaced_step_closure *dsc,
5757 int load, int size, int usermode)
5759 int immed = !bit (insn, 25);
5760 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5761 unsigned int rt = bits (insn, 12, 15);
5762 unsigned int rn = bits (insn, 16, 19);
5763 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5765 if (!insn_references_pc (insn, 0x000ff00ful))
5766 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5768 if (debug_displaced)
5769 fprintf_unfiltered (gdb_stdlog,
5770 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5771 load ? (size == 1 ? "ldrb" : "ldr")
5772 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5774 (unsigned long) insn);
5776 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5777 usermode, rt, rm, rn);
5779 if (load || rt != ARM_PC_REGNUM)
5781 dsc->u.ldst.restore_r4 = 0;
5784 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5786 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5787 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5789 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5791 {ldr,str}[b]<cond> r0, [r2, r3]. */
5792 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5796 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5797 dsc->u.ldst.restore_r4 = 1;
5798 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5799 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5800 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5801 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5802 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5806 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5808 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5813 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5818 /* Cleanup LDM instructions with fully-populated register list. This is an
5819 unfortunate corner case: it's impossible to implement correctly by modifying
5820 the instruction. The issue is as follows: we have an instruction,
5824 which we must rewrite to avoid loading PC. A possible solution would be to
5825 do the load in two halves, something like (with suitable cleanup
5829 ldm[id][ab] r8!, {r0-r7}
5831 ldm[id][ab] r8, {r7-r14}
5834 but at present there's no suitable place for <temp>, since the scratch space
5835 is overwritten before the cleanup routine is called. For now, we simply
5836 emulate the instruction. */
5839 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5840 struct displaced_step_closure *dsc)
5842 int inc = dsc->u.block.increment;
5843 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5844 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5845 uint32_t regmask = dsc->u.block.regmask;
5846 int regno = inc ? 0 : 15;
5847 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5848 int exception_return = dsc->u.block.load && dsc->u.block.user
5849 && (regmask & 0x8000) != 0;
5850 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5851 int do_transfer = condition_true (dsc->u.block.cond, status);
5852 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5857 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5858 sensible we can do here. Complain loudly. */
5859 if (exception_return)
5860 error (_("Cannot single-step exception return"));
5862 /* We don't handle any stores here for now. */
5863 gdb_assert (dsc->u.block.load != 0);
5865 if (debug_displaced)
5866 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5867 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5868 dsc->u.block.increment ? "inc" : "dec",
5869 dsc->u.block.before ? "before" : "after");
5876 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5879 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5882 xfer_addr += bump_before;
5884 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5885 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5887 xfer_addr += bump_after;
5889 regmask &= ~(1 << regno);
5892 if (dsc->u.block.writeback)
5893 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5897 /* Clean up an STM which included the PC in the register list. */
5900 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5901 struct displaced_step_closure *dsc)
5903 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5904 int store_executed = condition_true (dsc->u.block.cond, status);
5905 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5906 CORE_ADDR stm_insn_addr;
5909 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5911 /* If condition code fails, there's nothing else to do. */
5912 if (!store_executed)
5915 if (dsc->u.block.increment)
5917 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5919 if (dsc->u.block.before)
5924 pc_stored_at = dsc->u.block.xfer_addr;
5926 if (dsc->u.block.before)
5930 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5931 stm_insn_addr = dsc->scratch_base;
5932 offset = pc_val - stm_insn_addr;
5934 if (debug_displaced)
5935 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5936 "STM instruction\n", offset);
5938 /* Rewrite the stored PC to the proper value for the non-displaced original
5940 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5941 dsc->insn_addr + offset);
5944 /* Clean up an LDM which includes the PC in the register list. We clumped all
5945 the registers in the transferred list into a contiguous range r0...rX (to
5946 avoid loading PC directly and losing control of the debugged program), so we
5947 must undo that here. */
5950 cleanup_block_load_pc (struct gdbarch *gdbarch,
5951 struct regcache *regs,
5952 struct displaced_step_closure *dsc)
5954 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5955 int load_executed = condition_true (dsc->u.block.cond, status);
5956 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5957 unsigned int regs_loaded = bitcount (mask);
5958 unsigned int num_to_shuffle = regs_loaded, clobbered;
5960 /* The method employed here will fail if the register list is fully populated
5961 (we need to avoid loading PC directly). */
5962 gdb_assert (num_to_shuffle < 16);
5967 clobbered = (1 << num_to_shuffle) - 1;
5969 while (num_to_shuffle > 0)
5971 if ((mask & (1 << write_reg)) != 0)
5973 unsigned int read_reg = num_to_shuffle - 1;
5975 if (read_reg != write_reg)
5977 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5978 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5979 if (debug_displaced)
5980 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5981 "loaded register r%d to r%d\n"), read_reg,
5984 else if (debug_displaced)
5985 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5986 "r%d already in the right place\n"),
5989 clobbered &= ~(1 << write_reg);
5997 /* Restore any registers we scribbled over. */
5998 for (write_reg = 0; clobbered != 0; write_reg++)
6000 if ((clobbered & (1 << write_reg)) != 0)
6002 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6004 if (debug_displaced)
6005 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6006 "clobbered register r%d\n"), write_reg);
6007 clobbered &= ~(1 << write_reg);
6011 /* Perform register writeback manually. */
6012 if (dsc->u.block.writeback)
6014 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6016 if (dsc->u.block.increment)
6017 new_rn_val += regs_loaded * 4;
6019 new_rn_val -= regs_loaded * 4;
6021 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6026 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6027 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6030 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6031 struct regcache *regs,
6032 struct displaced_step_closure *dsc)
6034 int load = bit (insn, 20);
6035 int user = bit (insn, 22);
6036 int increment = bit (insn, 23);
6037 int before = bit (insn, 24);
6038 int writeback = bit (insn, 21);
6039 int rn = bits (insn, 16, 19);
6041 /* Block transfers which don't mention PC can be run directly
6043 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6044 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6046 if (rn == ARM_PC_REGNUM)
6048 warning (_("displaced: Unpredictable LDM or STM with "
6049 "base register r15"));
6050 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6053 if (debug_displaced)
6054 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6055 "%.8lx\n", (unsigned long) insn);
6057 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6058 dsc->u.block.rn = rn;
6060 dsc->u.block.load = load;
6061 dsc->u.block.user = user;
6062 dsc->u.block.increment = increment;
6063 dsc->u.block.before = before;
6064 dsc->u.block.writeback = writeback;
6065 dsc->u.block.cond = bits (insn, 28, 31);
6067 dsc->u.block.regmask = insn & 0xffff;
6071 if ((insn & 0xffff) == 0xffff)
6073 /* LDM with a fully-populated register list. This case is
6074 particularly tricky. Implement for now by fully emulating the
6075 instruction (which might not behave perfectly in all cases, but
6076 these instructions should be rare enough for that not to matter
6078 dsc->modinsn[0] = ARM_NOP;
6080 dsc->cleanup = &cleanup_block_load_all;
6084 /* LDM of a list of registers which includes PC. Implement by
6085 rewriting the list of registers to be transferred into a
6086 contiguous chunk r0...rX before doing the transfer, then shuffling
6087 registers into the correct places in the cleanup routine. */
6088 unsigned int regmask = insn & 0xffff;
6089 unsigned int num_in_list = bitcount (regmask), new_regmask;
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6095 /* Writeback makes things complicated. We need to avoid clobbering
6096 the base register with one of the registers in our modified
6097 register list, but just using a different register can't work in
6100 ldm r14!, {r0-r13,pc}
6102 which would need to be rewritten as:
6106 but that can't work, because there's no free register for N.
6108 Solve this by turning off the writeback bit, and emulating
6109 writeback manually in the cleanup routine. */
6114 new_regmask = (1 << num_in_list) - 1;
6116 if (debug_displaced)
6117 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6118 "{..., pc}: original reg list %.4x, modified "
6119 "list %.4x\n"), rn, writeback ? "!" : "",
6120 (int) insn & 0xffff, new_regmask);
6122 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6124 dsc->cleanup = &cleanup_block_load_pc;
6129 /* STM of a list of registers which includes PC. Run the instruction
6130 as-is, but out of line: this will store the wrong value for the PC,
6131 so we must manually fix up the memory in the cleanup routine.
6132 Doing things this way has the advantage that we can auto-detect
6133 the offset of the PC write (which is architecture-dependent) in
6134 the cleanup routine. */
6135 dsc->modinsn[0] = insn;
6137 dsc->cleanup = &cleanup_block_store_pc;
6144 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6145 struct regcache *regs,
6146 struct displaced_step_closure *dsc)
6148 int rn = bits (insn1, 0, 3);
6149 int load = bit (insn1, 4);
6150 int writeback = bit (insn1, 5);
6152 /* Block transfers which don't mention PC can be run directly
6154 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6157 if (rn == ARM_PC_REGNUM)
6159 warning (_("displaced: Unpredictable LDM or STM with "
6160 "base register r15"));
6161 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6162 "unpredictable ldm/stm", dsc);
6165 if (debug_displaced)
6166 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6167 "%.4x%.4x\n", insn1, insn2);
6169 /* Clear bit 13, since it should be always zero. */
6170 dsc->u.block.regmask = (insn2 & 0xdfff);
6171 dsc->u.block.rn = rn;
6173 dsc->u.block.load = load;
6174 dsc->u.block.user = 0;
6175 dsc->u.block.increment = bit (insn1, 7);
6176 dsc->u.block.before = bit (insn1, 8);
6177 dsc->u.block.writeback = writeback;
6178 dsc->u.block.cond = INST_AL;
6179 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6183 if (dsc->u.block.regmask == 0xffff)
6185 /* This branch is impossible to happen. */
6190 unsigned int regmask = dsc->u.block.regmask;
6191 unsigned int num_in_list = bitcount (regmask), new_regmask;
6194 for (i = 0; i < num_in_list; i++)
6195 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6200 new_regmask = (1 << num_in_list) - 1;
6202 if (debug_displaced)
6203 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6204 "{..., pc}: original reg list %.4x, modified "
6205 "list %.4x\n"), rn, writeback ? "!" : "",
6206 (int) dsc->u.block.regmask, new_regmask);
6208 dsc->modinsn[0] = insn1;
6209 dsc->modinsn[1] = (new_regmask & 0xffff);
6212 dsc->cleanup = &cleanup_block_load_pc;
6217 dsc->modinsn[0] = insn1;
6218 dsc->modinsn[1] = insn2;
6220 dsc->cleanup = &cleanup_block_store_pc;
6225 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6226 This is used to avoid a dependency on BFD's bfd_endian enum. */
6229 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6232 return read_memory_unsigned_integer (memaddr, len,
6233 (enum bfd_endian) byte_order);
6236 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6239 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6242 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6245 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6248 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6253 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6256 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6258 return arm_is_thumb (self->regcache);
6261 /* single_step() is called just before we want to resume the inferior,
6262 if we want to single-step it but there is no hardware or kernel
6263 single-step support. We find the target of the coming instructions
6264 and breakpoint them. */
6267 arm_software_single_step (struct frame_info *frame)
6269 struct regcache *regcache = get_current_regcache ();
6270 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6271 struct address_space *aspace = get_regcache_aspace (regcache);
6272 struct arm_get_next_pcs next_pcs_ctx;
6275 VEC (CORE_ADDR) *next_pcs = NULL;
6276 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6278 arm_get_next_pcs_ctor (&next_pcs_ctx,
6279 &arm_get_next_pcs_ops,
6280 gdbarch_byte_order (gdbarch),
6281 gdbarch_byte_order_for_code (gdbarch),
6285 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6287 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6288 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6290 do_cleanups (old_chain);
6295 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6296 for Linux, where some SVC instructions must be treated specially. */
6299 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6300 struct displaced_step_closure *dsc)
6302 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6304 if (debug_displaced)
6305 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6306 "%.8lx\n", (unsigned long) resume_addr);
6308 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6312 /* Common copy routine for svc instruciton. */
6315 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6316 struct displaced_step_closure *dsc)
6318 /* Preparation: none.
6319 Insn: unmodified svc.
6320 Cleanup: pc <- insn_addr + insn_size. */
6322 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6324 dsc->wrote_to_pc = 1;
6326 /* Allow OS-specific code to override SVC handling. */
6327 if (dsc->u.svc.copy_svc_os)
6328 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6331 dsc->cleanup = &cleanup_svc;
6337 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6338 struct regcache *regs, struct displaced_step_closure *dsc)
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6343 (unsigned long) insn);
6345 dsc->modinsn[0] = insn;
6347 return install_svc (gdbarch, regs, dsc);
6351 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6352 struct regcache *regs, struct displaced_step_closure *dsc)
6355 if (debug_displaced)
6356 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6359 dsc->modinsn[0] = insn;
6361 return install_svc (gdbarch, regs, dsc);
6364 /* Copy undefined instructions. */
6367 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6368 struct displaced_step_closure *dsc)
6370 if (debug_displaced)
6371 fprintf_unfiltered (gdb_stdlog,
6372 "displaced: copying undefined insn %.8lx\n",
6373 (unsigned long) insn);
6375 dsc->modinsn[0] = insn;
6381 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6382 struct displaced_step_closure *dsc)
6385 if (debug_displaced)
6386 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6387 "%.4x %.4x\n", (unsigned short) insn1,
6388 (unsigned short) insn2);
6390 dsc->modinsn[0] = insn1;
6391 dsc->modinsn[1] = insn2;
6397 /* Copy unpredictable instructions. */
6400 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6401 struct displaced_step_closure *dsc)
6403 if (debug_displaced)
6404 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6405 "%.8lx\n", (unsigned long) insn);
6407 dsc->modinsn[0] = insn;
6412 /* The decode_* functions are instruction decoding helpers. They mostly follow
6413 the presentation in the ARM ARM. */
6416 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6417 struct regcache *regs,
6418 struct displaced_step_closure *dsc)
6420 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6421 unsigned int rn = bits (insn, 16, 19);
6423 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6424 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6425 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6426 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6427 else if ((op1 & 0x60) == 0x20)
6428 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6429 else if ((op1 & 0x71) == 0x40)
6430 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6432 else if ((op1 & 0x77) == 0x41)
6433 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6434 else if ((op1 & 0x77) == 0x45)
6435 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6436 else if ((op1 & 0x77) == 0x51)
6439 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6441 return arm_copy_unpred (gdbarch, insn, dsc);
6443 else if ((op1 & 0x77) == 0x55)
6444 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6445 else if (op1 == 0x57)
6448 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6449 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6450 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6451 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6452 default: return arm_copy_unpred (gdbarch, insn, dsc);
6454 else if ((op1 & 0x63) == 0x43)
6455 return arm_copy_unpred (gdbarch, insn, dsc);
6456 else if ((op2 & 0x1) == 0x0)
6457 switch (op1 & ~0x80)
6460 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6462 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6463 case 0x71: case 0x75:
6465 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6466 case 0x63: case 0x67: case 0x73: case 0x77:
6467 return arm_copy_unpred (gdbarch, insn, dsc);
6469 return arm_copy_undef (gdbarch, insn, dsc);
6472 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6476 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6477 struct regcache *regs,
6478 struct displaced_step_closure *dsc)
6480 if (bit (insn, 27) == 0)
6481 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6482 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6483 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6486 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6489 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6491 case 0x4: case 0x5: case 0x6: case 0x7:
6492 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6495 switch ((insn & 0xe00000) >> 21)
6497 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6499 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6502 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6505 return arm_copy_undef (gdbarch, insn, dsc);
6510 int rn_f = (bits (insn, 16, 19) == 0xf);
6511 switch ((insn & 0xe00000) >> 21)
6514 /* ldc/ldc2 imm (undefined for rn == pc). */
6515 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6516 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6519 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6521 case 0x4: case 0x5: case 0x6: case 0x7:
6522 /* ldc/ldc2 lit (undefined for rn != pc). */
6523 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6524 : arm_copy_undef (gdbarch, insn, dsc);
6527 return arm_copy_undef (gdbarch, insn, dsc);
6532 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6535 if (bits (insn, 16, 19) == 0xf)
6537 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6539 return arm_copy_undef (gdbarch, insn, dsc);
6543 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6545 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6549 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6551 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6554 return arm_copy_undef (gdbarch, insn, dsc);
6558 /* Decode miscellaneous instructions in dp/misc encoding space. */
6561 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6562 struct regcache *regs,
6563 struct displaced_step_closure *dsc)
6565 unsigned int op2 = bits (insn, 4, 6);
6566 unsigned int op = bits (insn, 21, 22);
6571 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6574 if (op == 0x1) /* bx. */
6575 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6577 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6579 return arm_copy_undef (gdbarch, insn, dsc);
6583 /* Not really supported. */
6584 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6586 return arm_copy_undef (gdbarch, insn, dsc);
6590 return arm_copy_bx_blx_reg (gdbarch, insn,
6591 regs, dsc); /* blx register. */
6593 return arm_copy_undef (gdbarch, insn, dsc);
6596 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6600 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6602 /* Not really supported. */
6603 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6606 return arm_copy_undef (gdbarch, insn, dsc);
6611 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6612 struct regcache *regs,
6613 struct displaced_step_closure *dsc)
6616 switch (bits (insn, 20, 24))
6619 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6622 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6624 case 0x12: case 0x16:
6625 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6628 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6632 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6634 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6635 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6636 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6637 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6638 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6639 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6641 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6642 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6643 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6644 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6645 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6646 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6647 /* 2nd arg means "unprivileged". */
6648 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6652 /* Should be unreachable. */
6657 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6658 struct regcache *regs,
6659 struct displaced_step_closure *dsc)
6661 int a = bit (insn, 25), b = bit (insn, 4);
6662 uint32_t op1 = bits (insn, 20, 24);
6664 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6665 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6667 else if ((!a && (op1 & 0x17) == 0x02)
6668 || (a && (op1 & 0x17) == 0x02 && !b))
6669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6670 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6671 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6672 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6673 else if ((!a && (op1 & 0x17) == 0x03)
6674 || (a && (op1 & 0x17) == 0x03 && !b))
6675 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6676 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6677 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6678 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6679 else if ((!a && (op1 & 0x17) == 0x06)
6680 || (a && (op1 & 0x17) == 0x06 && !b))
6681 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6682 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6683 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6684 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6685 else if ((!a && (op1 & 0x17) == 0x07)
6686 || (a && (op1 & 0x17) == 0x07 && !b))
6687 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6689 /* Should be unreachable. */
6694 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6695 struct displaced_step_closure *dsc)
6697 switch (bits (insn, 20, 24))
6699 case 0x00: case 0x01: case 0x02: case 0x03:
6700 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6702 case 0x04: case 0x05: case 0x06: case 0x07:
6703 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6705 case 0x08: case 0x09: case 0x0a: case 0x0b:
6706 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6707 return arm_copy_unmodified (gdbarch, insn,
6708 "decode/pack/unpack/saturate/reverse", dsc);
6711 if (bits (insn, 5, 7) == 0) /* op2. */
6713 if (bits (insn, 12, 15) == 0xf)
6714 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6716 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6719 return arm_copy_undef (gdbarch, insn, dsc);
6721 case 0x1a: case 0x1b:
6722 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6723 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6725 return arm_copy_undef (gdbarch, insn, dsc);
6727 case 0x1c: case 0x1d:
6728 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6730 if (bits (insn, 0, 3) == 0xf)
6731 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6733 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6736 return arm_copy_undef (gdbarch, insn, dsc);
6738 case 0x1e: case 0x1f:
6739 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6740 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6742 return arm_copy_undef (gdbarch, insn, dsc);
6745 /* Should be unreachable. */
6750 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6751 struct regcache *regs,
6752 struct displaced_step_closure *dsc)
6755 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6757 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6761 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6762 struct regcache *regs,
6763 struct displaced_step_closure *dsc)
6765 unsigned int opcode = bits (insn, 20, 24);
6769 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6770 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6772 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6773 case 0x12: case 0x16:
6774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6776 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6777 case 0x13: case 0x17:
6778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6780 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6781 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6782 /* Note: no writeback for these instructions. Bit 25 will always be
6783 zero though (via caller), so the following works OK. */
6784 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 /* Should be unreachable. */
6791 /* Decode shifted register instructions. */
6794 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6795 uint16_t insn2, struct regcache *regs,
6796 struct displaced_step_closure *dsc)
6798 /* PC is only allowed to be used in instruction MOV. */
6800 unsigned int op = bits (insn1, 5, 8);
6801 unsigned int rn = bits (insn1, 0, 3);
6803 if (op == 0x2 && rn == 0xf) /* MOV */
6804 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6807 "dp (shift reg)", dsc);
6811 /* Decode extension register load/store. Exactly the same as
6812 arm_decode_ext_reg_ld_st. */
6815 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6816 uint16_t insn2, struct regcache *regs,
6817 struct displaced_step_closure *dsc)
6819 unsigned int opcode = bits (insn1, 4, 8);
6823 case 0x04: case 0x05:
6824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6825 "vfp/neon vmov", dsc);
6827 case 0x08: case 0x0c: /* 01x00 */
6828 case 0x0a: case 0x0e: /* 01x10 */
6829 case 0x12: case 0x16: /* 10x10 */
6830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6831 "vfp/neon vstm/vpush", dsc);
6833 case 0x09: case 0x0d: /* 01x01 */
6834 case 0x0b: case 0x0f: /* 01x11 */
6835 case 0x13: case 0x17: /* 10x11 */
6836 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6837 "vfp/neon vldm/vpop", dsc);
6839 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6842 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6843 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6846 /* Should be unreachable. */
6851 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6852 struct regcache *regs, struct displaced_step_closure *dsc)
6854 unsigned int op1 = bits (insn, 20, 25);
6855 int op = bit (insn, 4);
6856 unsigned int coproc = bits (insn, 8, 11);
6858 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6859 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6860 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6861 && (coproc & 0xe) != 0xa)
6863 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6864 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6865 && (coproc & 0xe) != 0xa)
6866 /* ldc/ldc2 imm/lit. */
6867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6868 else if ((op1 & 0x3e) == 0x00)
6869 return arm_copy_undef (gdbarch, insn, dsc);
6870 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6871 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6872 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6873 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6874 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6876 else if ((op1 & 0x30) == 0x20 && !op)
6878 if ((coproc & 0xe) == 0xa)
6879 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6881 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6883 else if ((op1 & 0x30) == 0x20 && op)
6884 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6885 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6886 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6887 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6888 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6889 else if ((op1 & 0x30) == 0x30)
6890 return arm_copy_svc (gdbarch, insn, regs, dsc);
6892 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6896 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6897 uint16_t insn2, struct regcache *regs,
6898 struct displaced_step_closure *dsc)
6900 unsigned int coproc = bits (insn2, 8, 11);
6901 unsigned int bit_5_8 = bits (insn1, 5, 8);
6902 unsigned int bit_9 = bit (insn1, 9);
6903 unsigned int bit_4 = bit (insn1, 4);
6908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6909 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6911 else if (bit_5_8 == 0) /* UNDEFINED. */
6912 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6915 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6916 if ((coproc & 0xe) == 0xa)
6917 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6919 else /* coproc is not 101x. */
6921 if (bit_4 == 0) /* STC/STC2. */
6922 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6924 else /* LDC/LDC2 {literal, immeidate}. */
6925 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6931 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6937 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6938 struct displaced_step_closure *dsc, int rd)
6944 Preparation: Rd <- PC
6950 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6951 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6955 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6956 struct displaced_step_closure *dsc,
6957 int rd, unsigned int imm)
6960 /* Encoding T2: ADDS Rd, #imm */
6961 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6963 install_pc_relative (gdbarch, regs, dsc, rd);
6969 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6970 struct regcache *regs,
6971 struct displaced_step_closure *dsc)
6973 unsigned int rd = bits (insn, 8, 10);
6974 unsigned int imm8 = bits (insn, 0, 7);
6976 if (debug_displaced)
6977 fprintf_unfiltered (gdb_stdlog,
6978 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6981 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6985 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6986 uint16_t insn2, struct regcache *regs,
6987 struct displaced_step_closure *dsc)
6989 unsigned int rd = bits (insn2, 8, 11);
6990 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6991 extract raw immediate encoding rather than computing immediate. When
6992 generating ADD or SUB instruction, we can simply perform OR operation to
6993 set immediate into ADD. */
6994 unsigned int imm_3_8 = insn2 & 0x70ff;
6995 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6997 if (debug_displaced)
6998 fprintf_unfiltered (gdb_stdlog,
6999 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7000 rd, imm_i, imm_3_8, insn1, insn2);
7002 if (bit (insn1, 7)) /* Encoding T2 */
7004 /* Encoding T3: SUB Rd, Rd, #imm */
7005 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7006 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7008 else /* Encoding T3 */
7010 /* Encoding T3: ADD Rd, Rd, #imm */
7011 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7012 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7016 install_pc_relative (gdbarch, regs, dsc, rd);
7022 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7023 struct regcache *regs,
7024 struct displaced_step_closure *dsc)
7026 unsigned int rt = bits (insn1, 8, 10);
7028 int imm8 = (bits (insn1, 0, 7) << 2);
7034 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7036 Insn: LDR R0, [R2, R3];
7037 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7039 if (debug_displaced)
7040 fprintf_unfiltered (gdb_stdlog,
7041 "displaced: copying thumb ldr r%d [pc #%d]\n"
7044 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7045 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7046 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7047 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7048 /* The assembler calculates the required value of the offset from the
7049 Align(PC,4) value of this instruction to the label. */
7050 pc = pc & 0xfffffffc;
7052 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7053 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7056 dsc->u.ldst.xfersize = 4;
7058 dsc->u.ldst.immed = 0;
7059 dsc->u.ldst.writeback = 0;
7060 dsc->u.ldst.restore_r4 = 0;
7062 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7064 dsc->cleanup = &cleanup_load;
7069 /* Copy Thumb cbnz/cbz insruction. */
7072 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7073 struct regcache *regs,
7074 struct displaced_step_closure *dsc)
7076 int non_zero = bit (insn1, 11);
7077 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7078 CORE_ADDR from = dsc->insn_addr;
7079 int rn = bits (insn1, 0, 2);
7080 int rn_val = displaced_read_reg (regs, dsc, rn);
7082 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7083 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7084 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7085 condition is false, let it be, cleanup_branch will do nothing. */
7086 if (dsc->u.branch.cond)
7088 dsc->u.branch.cond = INST_AL;
7089 dsc->u.branch.dest = from + 4 + imm5;
7092 dsc->u.branch.dest = from + 2;
7094 dsc->u.branch.link = 0;
7095 dsc->u.branch.exchange = 0;
7097 if (debug_displaced)
7098 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7099 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7100 rn, rn_val, insn1, dsc->u.branch.dest);
7102 dsc->modinsn[0] = THUMB_NOP;
7104 dsc->cleanup = &cleanup_branch;
7108 /* Copy Table Branch Byte/Halfword */
7110 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7111 uint16_t insn2, struct regcache *regs,
7112 struct displaced_step_closure *dsc)
7114 ULONGEST rn_val, rm_val;
7115 int is_tbh = bit (insn2, 4);
7116 CORE_ADDR halfwords = 0;
7117 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7119 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7120 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7126 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7127 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7133 target_read_memory (rn_val + rm_val, buf, 1);
7134 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7137 if (debug_displaced)
7138 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7139 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7140 (unsigned int) rn_val, (unsigned int) rm_val,
7141 (unsigned int) halfwords);
7143 dsc->u.branch.cond = INST_AL;
7144 dsc->u.branch.link = 0;
7145 dsc->u.branch.exchange = 0;
7146 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7148 dsc->cleanup = &cleanup_branch;
7154 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7155 struct displaced_step_closure *dsc)
7158 int val = displaced_read_reg (regs, dsc, 7);
7159 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7162 val = displaced_read_reg (regs, dsc, 8);
7163 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7166 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7171 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7172 struct regcache *regs,
7173 struct displaced_step_closure *dsc)
7175 dsc->u.block.regmask = insn1 & 0x00ff;
7177 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7180 (1) register list is full, that is, r0-r7 are used.
7181 Prepare: tmp[0] <- r8
7183 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7184 MOV r8, r7; Move value of r7 to r8;
7185 POP {r7}; Store PC value into r7.
7187 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7189 (2) register list is not full, supposing there are N registers in
7190 register list (except PC, 0 <= N <= 7).
7191 Prepare: for each i, 0 - N, tmp[i] <- ri.
7193 POP {r0, r1, ...., rN};
7195 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7196 from tmp[] properly.
7198 if (debug_displaced)
7199 fprintf_unfiltered (gdb_stdlog,
7200 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7201 dsc->u.block.regmask, insn1);
7203 if (dsc->u.block.regmask == 0xff)
7205 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7207 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7208 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7209 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7212 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7216 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7218 unsigned int new_regmask;
7220 for (i = 0; i < num_in_list + 1; i++)
7221 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7223 new_regmask = (1 << (num_in_list + 1)) - 1;
7225 if (debug_displaced)
7226 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7227 "{..., pc}: original reg list %.4x,"
7228 " modified list %.4x\n"),
7229 (int) dsc->u.block.regmask, new_regmask);
7231 dsc->u.block.regmask |= 0x8000;
7232 dsc->u.block.writeback = 0;
7233 dsc->u.block.cond = INST_AL;
7235 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7237 dsc->cleanup = &cleanup_block_load_pc;
7244 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7245 struct regcache *regs,
7246 struct displaced_step_closure *dsc)
7248 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7249 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7252 /* 16-bit thumb instructions. */
7253 switch (op_bit_12_15)
7255 /* Shift (imme), add, subtract, move and compare. */
7256 case 0: case 1: case 2: case 3:
7257 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7258 "shift/add/sub/mov/cmp",
7262 switch (op_bit_10_11)
7264 case 0: /* Data-processing */
7265 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7269 case 1: /* Special data instructions and branch and exchange. */
7271 unsigned short op = bits (insn1, 7, 9);
7272 if (op == 6 || op == 7) /* BX or BLX */
7273 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7274 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7275 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7277 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7281 default: /* LDR (literal) */
7282 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7285 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7286 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7289 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7290 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7291 else /* Generate SP-relative address */
7292 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7294 case 11: /* Misc 16-bit instructions */
7296 switch (bits (insn1, 8, 11))
7298 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7299 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7301 case 12: case 13: /* POP */
7302 if (bit (insn1, 8)) /* PC is in register list. */
7303 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7305 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7307 case 15: /* If-Then, and hints */
7308 if (bits (insn1, 0, 3))
7309 /* If-Then makes up to four following instructions conditional.
7310 IT instruction itself is not conditional, so handle it as a
7311 common unmodified instruction. */
7312 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7315 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7318 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7323 if (op_bit_10_11 < 2) /* Store multiple registers */
7324 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7325 else /* Load multiple registers */
7326 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7328 case 13: /* Conditional branch and supervisor call */
7329 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7330 err = thumb_copy_b (gdbarch, insn1, dsc);
7332 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7334 case 14: /* Unconditional branch */
7335 err = thumb_copy_b (gdbarch, insn1, dsc);
7342 internal_error (__FILE__, __LINE__,
7343 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7347 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7348 uint16_t insn1, uint16_t insn2,
7349 struct regcache *regs,
7350 struct displaced_step_closure *dsc)
7352 int rt = bits (insn2, 12, 15);
7353 int rn = bits (insn1, 0, 3);
7354 int op1 = bits (insn1, 7, 8);
7356 switch (bits (insn1, 5, 6))
7358 case 0: /* Load byte and memory hints */
7359 if (rt == 0xf) /* PLD/PLI */
7362 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7363 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7365 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7371 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7374 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7375 "ldrb{reg, immediate}/ldrbt",
7380 case 1: /* Load halfword and memory hints. */
7381 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7382 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7383 "pld/unalloc memhint", dsc);
7387 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7390 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 case 2: /* Load word */
7396 int insn2_bit_8_11 = bits (insn2, 8, 11);
7399 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7400 else if (op1 == 0x1) /* Encoding T3 */
7401 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7403 else /* op1 == 0x0 */
7405 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7406 /* LDR (immediate) */
7407 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7408 dsc, bit (insn2, 8), 1);
7409 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7410 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 /* LDR (register) */
7414 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7420 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7427 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7428 uint16_t insn2, struct regcache *regs,
7429 struct displaced_step_closure *dsc)
7432 unsigned short op = bit (insn2, 15);
7433 unsigned int op1 = bits (insn1, 11, 12);
7439 switch (bits (insn1, 9, 10))
7444 /* Load/store {dual, execlusive}, table branch. */
7445 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7446 && bits (insn2, 5, 7) == 0)
7447 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7450 /* PC is not allowed to use in load/store {dual, exclusive}
7452 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7453 "load/store dual/ex", dsc);
7455 else /* load/store multiple */
7457 switch (bits (insn1, 7, 8))
7459 case 0: case 3: /* SRS, RFE */
7460 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7463 case 1: case 2: /* LDM/STM/PUSH/POP */
7464 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7471 /* Data-processing (shift register). */
7472 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7475 default: /* Coprocessor instructions. */
7476 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7481 case 2: /* op1 = 2 */
7482 if (op) /* Branch and misc control. */
7484 if (bit (insn2, 14) /* BLX/BL */
7485 || bit (insn2, 12) /* Unconditional branch */
7486 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7487 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7496 int op = bits (insn1, 4, 8);
7497 int rn = bits (insn1, 0, 3);
7498 if ((op == 0 || op == 0xa) && rn == 0xf)
7499 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7502 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 else /* Data processing (modified immeidate) */
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7510 case 3: /* op1 = 3 */
7511 switch (bits (insn1, 9, 10))
7515 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7517 else /* NEON Load/Store and Store single data item */
7518 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7519 "neon elt/struct load/store",
7522 case 1: /* op1 = 3, bits (9, 10) == 1 */
7523 switch (bits (insn1, 7, 8))
7525 case 0: case 1: /* Data processing (register) */
7526 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7529 case 2: /* Multiply and absolute difference */
7530 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7531 "mul/mua/diff", dsc);
7533 case 3: /* Long multiply and divide */
7534 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7539 default: /* Coprocessor instructions */
7540 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7549 internal_error (__FILE__, __LINE__,
7550 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7555 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7556 struct regcache *regs,
7557 struct displaced_step_closure *dsc)
7559 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7561 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7563 if (debug_displaced)
7564 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7565 "at %.8lx\n", insn1, (unsigned long) from);
7568 dsc->insn_size = thumb_insn_size (insn1);
7569 if (thumb_insn_size (insn1) == 4)
7572 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7573 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7576 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7580 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7581 CORE_ADDR to, struct regcache *regs,
7582 struct displaced_step_closure *dsc)
7585 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7588 /* Most displaced instructions use a 1-instruction scratch space, so set this
7589 here and override below if/when necessary. */
7591 dsc->insn_addr = from;
7592 dsc->scratch_base = to;
7593 dsc->cleanup = NULL;
7594 dsc->wrote_to_pc = 0;
7596 if (!displaced_in_arm_mode (regs))
7597 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7601 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7602 if (debug_displaced)
7603 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7604 "at %.8lx\n", (unsigned long) insn,
7605 (unsigned long) from);
7607 if ((insn & 0xf0000000) == 0xf0000000)
7608 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7609 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7611 case 0x0: case 0x1: case 0x2: case 0x3:
7612 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7615 case 0x4: case 0x5: case 0x6:
7616 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7620 err = arm_decode_media (gdbarch, insn, dsc);
7623 case 0x8: case 0x9: case 0xa: case 0xb:
7624 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7627 case 0xc: case 0xd: case 0xe: case 0xf:
7628 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7633 internal_error (__FILE__, __LINE__,
7634 _("arm_process_displaced_insn: Instruction decode error"));
7637 /* Actually set up the scratch space for a displaced instruction. */
7640 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7641 CORE_ADDR to, struct displaced_step_closure *dsc)
7643 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7644 unsigned int i, len, offset;
7645 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7646 int size = dsc->is_thumb? 2 : 4;
7647 const gdb_byte *bkp_insn;
7650 /* Poke modified instruction(s). */
7651 for (i = 0; i < dsc->numinsns; i++)
7653 if (debug_displaced)
7655 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7657 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7660 fprintf_unfiltered (gdb_stdlog, "%.4x",
7661 (unsigned short)dsc->modinsn[i]);
7663 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7664 (unsigned long) to + offset);
7667 write_memory_unsigned_integer (to + offset, size,
7668 byte_order_for_code,
7673 /* Choose the correct breakpoint instruction. */
7676 bkp_insn = tdep->thumb_breakpoint;
7677 len = tdep->thumb_breakpoint_size;
7681 bkp_insn = tdep->arm_breakpoint;
7682 len = tdep->arm_breakpoint_size;
7685 /* Put breakpoint afterwards. */
7686 write_memory (to + offset, bkp_insn, len);
7688 if (debug_displaced)
7689 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7690 paddress (gdbarch, from), paddress (gdbarch, to));
7693 /* Entry point for cleaning things up after a displaced instruction has been
7697 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7698 struct displaced_step_closure *dsc,
7699 CORE_ADDR from, CORE_ADDR to,
7700 struct regcache *regs)
7703 dsc->cleanup (gdbarch, regs, dsc);
7705 if (!dsc->wrote_to_pc)
7706 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7707 dsc->insn_addr + dsc->insn_size);
7711 #include "bfd-in2.h"
7712 #include "libcoff.h"
7715 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7717 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7719 if (arm_pc_is_thumb (gdbarch, memaddr))
7721 static asymbol *asym;
7722 static combined_entry_type ce;
7723 static struct coff_symbol_struct csym;
7724 static struct bfd fake_bfd;
7725 static bfd_target fake_target;
7727 if (csym.native == NULL)
7729 /* Create a fake symbol vector containing a Thumb symbol.
7730 This is solely so that the code in print_insn_little_arm()
7731 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7732 the presence of a Thumb symbol and switch to decoding
7733 Thumb instructions. */
7735 fake_target.flavour = bfd_target_coff_flavour;
7736 fake_bfd.xvec = &fake_target;
7737 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7739 csym.symbol.the_bfd = &fake_bfd;
7740 csym.symbol.name = "fake";
7741 asym = (asymbol *) & csym;
7744 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7745 info->symbols = &asym;
7748 info->symbols = NULL;
7750 if (info->endian == BFD_ENDIAN_BIG)
7751 return print_insn_big_arm (memaddr, info);
7753 return print_insn_little_arm (memaddr, info);
7756 /* The following define instruction sequences that will cause ARM
7757 cpu's to take an undefined instruction trap. These are used to
7758 signal a breakpoint to GDB.
7760 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7761 modes. A different instruction is required for each mode. The ARM
7762 cpu's can also be big or little endian. Thus four different
7763 instructions are needed to support all cases.
7765 Note: ARMv4 defines several new instructions that will take the
7766 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7767 not in fact add the new instructions. The new undefined
7768 instructions in ARMv4 are all instructions that had no defined
7769 behaviour in earlier chips. There is no guarantee that they will
7770 raise an exception, but may be treated as NOP's. In practice, it
7771 may only safe to rely on instructions matching:
7773 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7774 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7775 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7777 Even this may only true if the condition predicate is true. The
7778 following use a condition predicate of ALWAYS so it is always TRUE.
7780 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7781 and NetBSD all use a software interrupt rather than an undefined
7782 instruction to force a trap. This can be handled by by the
7783 abi-specific code during establishment of the gdbarch vector. */
7785 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7786 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7787 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7788 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7790 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7791 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7792 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7793 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7795 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7796 the program counter value to determine whether a 16-bit or 32-bit
7797 breakpoint should be used. It returns a pointer to a string of
7798 bytes that encode a breakpoint instruction, stores the length of
7799 the string to *lenptr, and adjusts the program counter (if
7800 necessary) to point to the actual memory location where the
7801 breakpoint should be inserted. */
7803 static const unsigned char *
7804 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7806 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7807 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7809 if (arm_pc_is_thumb (gdbarch, *pcptr))
7811 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7813 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7814 check whether we are replacing a 32-bit instruction. */
7815 if (tdep->thumb2_breakpoint != NULL)
7818 if (target_read_memory (*pcptr, buf, 2) == 0)
7820 unsigned short inst1;
7821 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7822 if (thumb_insn_size (inst1) == 4)
7824 *lenptr = tdep->thumb2_breakpoint_size;
7825 return tdep->thumb2_breakpoint;
7830 *lenptr = tdep->thumb_breakpoint_size;
7831 return tdep->thumb_breakpoint;
7835 *lenptr = tdep->arm_breakpoint_size;
7836 return tdep->arm_breakpoint;
7841 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7844 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7846 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7847 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7848 that this is not confused with a 32-bit ARM breakpoint. */
7852 /* Extract from an array REGBUF containing the (raw) register state a
7853 function return value of type TYPE, and copy that, in virtual
7854 format, into VALBUF. */
7857 arm_extract_return_value (struct type *type, struct regcache *regs,
7860 struct gdbarch *gdbarch = get_regcache_arch (regs);
7861 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7863 if (TYPE_CODE_FLT == TYPE_CODE (type))
7865 switch (gdbarch_tdep (gdbarch)->fp_model)
7869 /* The value is in register F0 in internal format. We need to
7870 extract the raw value and then convert it to the desired
7872 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7874 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7875 convert_from_extended (floatformat_from_type (type), tmpbuf,
7876 valbuf, gdbarch_byte_order (gdbarch));
7880 case ARM_FLOAT_SOFT_FPA:
7881 case ARM_FLOAT_SOFT_VFP:
7882 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7883 not using the VFP ABI code. */
7885 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7886 if (TYPE_LENGTH (type) > 4)
7887 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7888 valbuf + INT_REGISTER_SIZE);
7892 internal_error (__FILE__, __LINE__,
7893 _("arm_extract_return_value: "
7894 "Floating point model not supported"));
7898 else if (TYPE_CODE (type) == TYPE_CODE_INT
7899 || TYPE_CODE (type) == TYPE_CODE_CHAR
7900 || TYPE_CODE (type) == TYPE_CODE_BOOL
7901 || TYPE_CODE (type) == TYPE_CODE_PTR
7902 || TYPE_CODE (type) == TYPE_CODE_REF
7903 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7905 /* If the type is a plain integer, then the access is
7906 straight-forward. Otherwise we have to play around a bit
7908 int len = TYPE_LENGTH (type);
7909 int regno = ARM_A1_REGNUM;
7914 /* By using store_unsigned_integer we avoid having to do
7915 anything special for small big-endian values. */
7916 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7917 store_unsigned_integer (valbuf,
7918 (len > INT_REGISTER_SIZE
7919 ? INT_REGISTER_SIZE : len),
7921 len -= INT_REGISTER_SIZE;
7922 valbuf += INT_REGISTER_SIZE;
7927 /* For a structure or union the behaviour is as if the value had
7928 been stored to word-aligned memory and then loaded into
7929 registers with 32-bit load instruction(s). */
7930 int len = TYPE_LENGTH (type);
7931 int regno = ARM_A1_REGNUM;
7932 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7936 regcache_cooked_read (regs, regno++, tmpbuf);
7937 memcpy (valbuf, tmpbuf,
7938 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7939 len -= INT_REGISTER_SIZE;
7940 valbuf += INT_REGISTER_SIZE;
7946 /* Will a function return an aggregate type in memory or in a
7947 register? Return 0 if an aggregate type can be returned in a
7948 register, 1 if it must be returned in memory. */
7951 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7953 enum type_code code;
7955 type = check_typedef (type);
7957 /* Simple, non-aggregate types (ie not including vectors and
7958 complex) are always returned in a register (or registers). */
7959 code = TYPE_CODE (type);
7960 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7961 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7964 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7966 /* Vector values should be returned using ARM registers if they
7967 are not over 16 bytes. */
7968 return (TYPE_LENGTH (type) > 16);
7971 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7973 /* The AAPCS says all aggregates not larger than a word are returned
7975 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7984 /* All aggregate types that won't fit in a register must be returned
7986 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7989 /* In the ARM ABI, "integer" like aggregate types are returned in
7990 registers. For an aggregate type to be integer like, its size
7991 must be less than or equal to INT_REGISTER_SIZE and the
7992 offset of each addressable subfield must be zero. Note that bit
7993 fields are not addressable, and all addressable subfields of
7994 unions always start at offset zero.
7996 This function is based on the behaviour of GCC 2.95.1.
7997 See: gcc/arm.c: arm_return_in_memory() for details.
7999 Note: All versions of GCC before GCC 2.95.2 do not set up the
8000 parameters correctly for a function returning the following
8001 structure: struct { float f;}; This should be returned in memory,
8002 not a register. Richard Earnshaw sent me a patch, but I do not
8003 know of any way to detect if a function like the above has been
8004 compiled with the correct calling convention. */
8006 /* Assume all other aggregate types can be returned in a register.
8007 Run a check for structures, unions and arrays. */
8010 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8013 /* Need to check if this struct/union is "integer" like. For
8014 this to be true, its size must be less than or equal to
8015 INT_REGISTER_SIZE and the offset of each addressable
8016 subfield must be zero. Note that bit fields are not
8017 addressable, and unions always start at offset zero. If any
8018 of the subfields is a floating point type, the struct/union
8019 cannot be an integer type. */
8021 /* For each field in the object, check:
8022 1) Is it FP? --> yes, nRc = 1;
8023 2) Is it addressable (bitpos != 0) and
8024 not packed (bitsize == 0)?
8028 for (i = 0; i < TYPE_NFIELDS (type); i++)
8030 enum type_code field_type_code;
8033 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8036 /* Is it a floating point type field? */
8037 if (field_type_code == TYPE_CODE_FLT)
8043 /* If bitpos != 0, then we have to care about it. */
8044 if (TYPE_FIELD_BITPOS (type, i) != 0)
8046 /* Bitfields are not addressable. If the field bitsize is
8047 zero, then the field is not packed. Hence it cannot be
8048 a bitfield or any other packed type. */
8049 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8062 /* Write into appropriate registers a function return value of type
8063 TYPE, given in virtual format. */
8066 arm_store_return_value (struct type *type, struct regcache *regs,
8067 const gdb_byte *valbuf)
8069 struct gdbarch *gdbarch = get_regcache_arch (regs);
8070 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8072 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8074 gdb_byte buf[MAX_REGISTER_SIZE];
8076 switch (gdbarch_tdep (gdbarch)->fp_model)
8080 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8081 gdbarch_byte_order (gdbarch));
8082 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8085 case ARM_FLOAT_SOFT_FPA:
8086 case ARM_FLOAT_SOFT_VFP:
8087 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8088 not using the VFP ABI code. */
8090 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8091 if (TYPE_LENGTH (type) > 4)
8092 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8093 valbuf + INT_REGISTER_SIZE);
8097 internal_error (__FILE__, __LINE__,
8098 _("arm_store_return_value: Floating "
8099 "point model not supported"));
8103 else if (TYPE_CODE (type) == TYPE_CODE_INT
8104 || TYPE_CODE (type) == TYPE_CODE_CHAR
8105 || TYPE_CODE (type) == TYPE_CODE_BOOL
8106 || TYPE_CODE (type) == TYPE_CODE_PTR
8107 || TYPE_CODE (type) == TYPE_CODE_REF
8108 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8110 if (TYPE_LENGTH (type) <= 4)
8112 /* Values of one word or less are zero/sign-extended and
8114 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8115 LONGEST val = unpack_long (type, valbuf);
8117 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8118 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8122 /* Integral values greater than one word are stored in consecutive
8123 registers starting with r0. This will always be a multiple of
8124 the regiser size. */
8125 int len = TYPE_LENGTH (type);
8126 int regno = ARM_A1_REGNUM;
8130 regcache_cooked_write (regs, regno++, valbuf);
8131 len -= INT_REGISTER_SIZE;
8132 valbuf += INT_REGISTER_SIZE;
8138 /* For a structure or union the behaviour is as if the value had
8139 been stored to word-aligned memory and then loaded into
8140 registers with 32-bit load instruction(s). */
8141 int len = TYPE_LENGTH (type);
8142 int regno = ARM_A1_REGNUM;
8143 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8147 memcpy (tmpbuf, valbuf,
8148 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8149 regcache_cooked_write (regs, regno++, tmpbuf);
8150 len -= INT_REGISTER_SIZE;
8151 valbuf += INT_REGISTER_SIZE;
8157 /* Handle function return values. */
8159 static enum return_value_convention
8160 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8161 struct type *valtype, struct regcache *regcache,
8162 gdb_byte *readbuf, const gdb_byte *writebuf)
8164 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8165 struct type *func_type = function ? value_type (function) : NULL;
8166 enum arm_vfp_cprc_base_type vfp_base_type;
8169 if (arm_vfp_abi_for_function (gdbarch, func_type)
8170 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8172 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8173 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8175 for (i = 0; i < vfp_base_count; i++)
8177 if (reg_char == 'q')
8180 arm_neon_quad_write (gdbarch, regcache, i,
8181 writebuf + i * unit_length);
8184 arm_neon_quad_read (gdbarch, regcache, i,
8185 readbuf + i * unit_length);
8192 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8193 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8196 regcache_cooked_write (regcache, regnum,
8197 writebuf + i * unit_length);
8199 regcache_cooked_read (regcache, regnum,
8200 readbuf + i * unit_length);
8203 return RETURN_VALUE_REGISTER_CONVENTION;
8206 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8207 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8208 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8210 if (tdep->struct_return == pcc_struct_return
8211 || arm_return_in_memory (gdbarch, valtype))
8212 return RETURN_VALUE_STRUCT_CONVENTION;
8214 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8216 if (arm_return_in_memory (gdbarch, valtype))
8217 return RETURN_VALUE_STRUCT_CONVENTION;
8221 arm_store_return_value (valtype, regcache, writebuf);
8224 arm_extract_return_value (valtype, regcache, readbuf);
8226 return RETURN_VALUE_REGISTER_CONVENTION;
8231 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8233 struct gdbarch *gdbarch = get_frame_arch (frame);
8234 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8235 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8237 gdb_byte buf[INT_REGISTER_SIZE];
8239 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8241 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8245 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8249 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8250 return the target PC. Otherwise return 0. */
8253 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8257 CORE_ADDR start_addr;
8259 /* Find the starting address and name of the function containing the PC. */
8260 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8262 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8264 start_addr = arm_skip_bx_reg (frame, pc);
8265 if (start_addr != 0)
8271 /* If PC is in a Thumb call or return stub, return the address of the
8272 target PC, which is in a register. The thunk functions are called
8273 _call_via_xx, where x is the register name. The possible names
8274 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8275 functions, named __ARM_call_via_r[0-7]. */
8276 if (startswith (name, "_call_via_")
8277 || startswith (name, "__ARM_call_via_"))
8279 /* Use the name suffix to determine which register contains the
8281 static char *table[15] =
8282 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8283 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8286 int offset = strlen (name) - 2;
8288 for (regno = 0; regno <= 14; regno++)
8289 if (strcmp (&name[offset], table[regno]) == 0)
8290 return get_frame_register_unsigned (frame, regno);
8293 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8294 non-interworking calls to foo. We could decode the stubs
8295 to find the target but it's easier to use the symbol table. */
8296 namelen = strlen (name);
8297 if (name[0] == '_' && name[1] == '_'
8298 && ((namelen > 2 + strlen ("_from_thumb")
8299 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8300 || (namelen > 2 + strlen ("_from_arm")
8301 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8304 int target_len = namelen - 2;
8305 struct bound_minimal_symbol minsym;
8306 struct objfile *objfile;
8307 struct obj_section *sec;
8309 if (name[namelen - 1] == 'b')
8310 target_len -= strlen ("_from_thumb");
8312 target_len -= strlen ("_from_arm");
8314 target_name = (char *) alloca (target_len + 1);
8315 memcpy (target_name, name + 2, target_len);
8316 target_name[target_len] = '\0';
8318 sec = find_pc_section (pc);
8319 objfile = (sec == NULL) ? NULL : sec->objfile;
8320 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8321 if (minsym.minsym != NULL)
8322 return BMSYMBOL_VALUE_ADDRESS (minsym);
8327 return 0; /* not a stub */
8331 set_arm_command (char *args, int from_tty)
8333 printf_unfiltered (_("\
8334 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8335 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8339 show_arm_command (char *args, int from_tty)
8341 cmd_show_list (showarmcmdlist, from_tty, "");
8345 arm_update_current_architecture (void)
8347 struct gdbarch_info info;
8349 /* If the current architecture is not ARM, we have nothing to do. */
8350 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8353 /* Update the architecture. */
8354 gdbarch_info_init (&info);
8356 if (!gdbarch_update_p (info))
8357 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8361 set_fp_model_sfunc (char *args, int from_tty,
8362 struct cmd_list_element *c)
8366 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8367 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8369 arm_fp_model = (enum arm_float_model) fp_model;
8373 if (fp_model == ARM_FLOAT_LAST)
8374 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8377 arm_update_current_architecture ();
8381 show_fp_model (struct ui_file *file, int from_tty,
8382 struct cmd_list_element *c, const char *value)
8384 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8386 if (arm_fp_model == ARM_FLOAT_AUTO
8387 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8388 fprintf_filtered (file, _("\
8389 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8390 fp_model_strings[tdep->fp_model]);
8392 fprintf_filtered (file, _("\
8393 The current ARM floating point model is \"%s\".\n"),
8394 fp_model_strings[arm_fp_model]);
8398 arm_set_abi (char *args, int from_tty,
8399 struct cmd_list_element *c)
8403 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8404 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8406 arm_abi_global = (enum arm_abi_kind) arm_abi;
8410 if (arm_abi == ARM_ABI_LAST)
8411 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8414 arm_update_current_architecture ();
8418 arm_show_abi (struct ui_file *file, int from_tty,
8419 struct cmd_list_element *c, const char *value)
8421 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8423 if (arm_abi_global == ARM_ABI_AUTO
8424 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8425 fprintf_filtered (file, _("\
8426 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8427 arm_abi_strings[tdep->arm_abi]);
8429 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8434 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8435 struct cmd_list_element *c, const char *value)
8437 fprintf_filtered (file,
8438 _("The current execution mode assumed "
8439 "(when symbols are unavailable) is \"%s\".\n"),
8440 arm_fallback_mode_string);
8444 arm_show_force_mode (struct ui_file *file, int from_tty,
8445 struct cmd_list_element *c, const char *value)
8447 fprintf_filtered (file,
8448 _("The current execution mode assumed "
8449 "(even when symbols are available) is \"%s\".\n"),
8450 arm_force_mode_string);
8453 /* If the user changes the register disassembly style used for info
8454 register and other commands, we have to also switch the style used
8455 in opcodes for disassembly output. This function is run in the "set
8456 arm disassembly" command, and does that. */
8459 set_disassembly_style_sfunc (char *args, int from_tty,
8460 struct cmd_list_element *c)
8462 set_disassembly_style ();
8465 /* Return the ARM register name corresponding to register I. */
8467 arm_register_name (struct gdbarch *gdbarch, int i)
8469 const int num_regs = gdbarch_num_regs (gdbarch);
8471 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8472 && i >= num_regs && i < num_regs + 32)
8474 static const char *const vfp_pseudo_names[] = {
8475 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8476 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8477 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8478 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8481 return vfp_pseudo_names[i - num_regs];
8484 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8485 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8487 static const char *const neon_pseudo_names[] = {
8488 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8489 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8492 return neon_pseudo_names[i - num_regs - 32];
8495 if (i >= ARRAY_SIZE (arm_register_names))
8496 /* These registers are only supported on targets which supply
8497 an XML description. */
8500 return arm_register_names[i];
8504 set_disassembly_style (void)
8508 /* Find the style that the user wants. */
8509 for (current = 0; current < num_disassembly_options; current++)
8510 if (disassembly_style == valid_disassembly_styles[current])
8512 gdb_assert (current < num_disassembly_options);
8514 /* Synchronize the disassembler. */
8515 set_arm_regname_option (current);
8518 /* Test whether the coff symbol specific value corresponds to a Thumb
8522 coff_sym_is_thumb (int val)
8524 return (val == C_THUMBEXT
8525 || val == C_THUMBSTAT
8526 || val == C_THUMBEXTFUNC
8527 || val == C_THUMBSTATFUNC
8528 || val == C_THUMBLABEL);
8531 /* arm_coff_make_msymbol_special()
8532 arm_elf_make_msymbol_special()
8534 These functions test whether the COFF or ELF symbol corresponds to
8535 an address in thumb code, and set a "special" bit in a minimal
8536 symbol to indicate that it does. */
8539 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8541 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8543 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8544 == ST_BRANCH_TO_THUMB)
8545 MSYMBOL_SET_SPECIAL (msym);
8549 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8551 if (coff_sym_is_thumb (val))
8552 MSYMBOL_SET_SPECIAL (msym);
8556 arm_objfile_data_free (struct objfile *objfile, void *arg)
8558 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8561 for (i = 0; i < objfile->obfd->section_count; i++)
8562 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8566 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8569 const char *name = bfd_asymbol_name (sym);
8570 struct arm_per_objfile *data;
8571 VEC(arm_mapping_symbol_s) **map_p;
8572 struct arm_mapping_symbol new_map_sym;
8574 gdb_assert (name[0] == '$');
8575 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8578 data = (struct arm_per_objfile *) objfile_data (objfile,
8579 arm_objfile_data_key);
8582 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8583 struct arm_per_objfile);
8584 set_objfile_data (objfile, arm_objfile_data_key, data);
8585 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8586 objfile->obfd->section_count,
8587 VEC(arm_mapping_symbol_s) *);
8589 map_p = &data->section_maps[bfd_get_section (sym)->index];
8591 new_map_sym.value = sym->value;
8592 new_map_sym.type = name[1];
8594 /* Assume that most mapping symbols appear in order of increasing
8595 value. If they were randomly distributed, it would be faster to
8596 always push here and then sort at first use. */
8597 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8599 struct arm_mapping_symbol *prev_map_sym;
8601 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8602 if (prev_map_sym->value >= sym->value)
8605 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8606 arm_compare_mapping_symbols);
8607 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8612 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8616 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8618 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8619 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8621 /* If necessary, set the T bit. */
8624 ULONGEST val, t_bit;
8625 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8626 t_bit = arm_psr_thumb_bit (gdbarch);
8627 if (arm_pc_is_thumb (gdbarch, pc))
8628 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8631 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8636 /* Read the contents of a NEON quad register, by reading from two
8637 double registers. This is used to implement the quad pseudo
8638 registers, and for argument passing in case the quad registers are
8639 missing; vectors are passed in quad registers when using the VFP
8640 ABI, even if a NEON unit is not present. REGNUM is the index of
8641 the quad register, in [0, 15]. */
8643 static enum register_status
8644 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8645 int regnum, gdb_byte *buf)
8648 gdb_byte reg_buf[8];
8649 int offset, double_regnum;
8650 enum register_status status;
8652 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8653 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8656 /* d0 is always the least significant half of q0. */
8657 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8662 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8663 if (status != REG_VALID)
8665 memcpy (buf + offset, reg_buf, 8);
8667 offset = 8 - offset;
8668 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8669 if (status != REG_VALID)
8671 memcpy (buf + offset, reg_buf, 8);
8676 static enum register_status
8677 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8678 int regnum, gdb_byte *buf)
8680 const int num_regs = gdbarch_num_regs (gdbarch);
8682 gdb_byte reg_buf[8];
8683 int offset, double_regnum;
8685 gdb_assert (regnum >= num_regs);
8688 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8689 /* Quad-precision register. */
8690 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8693 enum register_status status;
8695 /* Single-precision register. */
8696 gdb_assert (regnum < 32);
8698 /* s0 is always the least significant half of d0. */
8699 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8700 offset = (regnum & 1) ? 0 : 4;
8702 offset = (regnum & 1) ? 4 : 0;
8704 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8705 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8708 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8709 if (status == REG_VALID)
8710 memcpy (buf, reg_buf + offset, 4);
8715 /* Store the contents of BUF to a NEON quad register, by writing to
8716 two double registers. This is used to implement the quad pseudo
8717 registers, and for argument passing in case the quad registers are
8718 missing; vectors are passed in quad registers when using the VFP
8719 ABI, even if a NEON unit is not present. REGNUM is the index
8720 of the quad register, in [0, 15]. */
8723 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8724 int regnum, const gdb_byte *buf)
8727 int offset, double_regnum;
8729 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8730 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8733 /* d0 is always the least significant half of q0. */
8734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8739 regcache_raw_write (regcache, double_regnum, buf + offset);
8740 offset = 8 - offset;
8741 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8745 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8746 int regnum, const gdb_byte *buf)
8748 const int num_regs = gdbarch_num_regs (gdbarch);
8750 gdb_byte reg_buf[8];
8751 int offset, double_regnum;
8753 gdb_assert (regnum >= num_regs);
8756 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8757 /* Quad-precision register. */
8758 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8761 /* Single-precision register. */
8762 gdb_assert (regnum < 32);
8764 /* s0 is always the least significant half of d0. */
8765 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8766 offset = (regnum & 1) ? 0 : 4;
8768 offset = (regnum & 1) ? 4 : 0;
8770 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8771 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8774 regcache_raw_read (regcache, double_regnum, reg_buf);
8775 memcpy (reg_buf + offset, buf, 4);
8776 regcache_raw_write (regcache, double_regnum, reg_buf);
8780 static struct value *
8781 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8783 const int *reg_p = (const int *) baton;
8784 return value_of_register (*reg_p, frame);
8787 static enum gdb_osabi
8788 arm_elf_osabi_sniffer (bfd *abfd)
8790 unsigned int elfosabi;
8791 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8793 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8795 if (elfosabi == ELFOSABI_ARM)
8796 /* GNU tools use this value. Check note sections in this case,
8798 bfd_map_over_sections (abfd,
8799 generic_elf_osabi_sniff_abi_tag_sections,
8802 /* Anything else will be handled by the generic ELF sniffer. */
8807 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8808 struct reggroup *group)
8810 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8811 this, FPS register belongs to save_regroup, restore_reggroup, and
8812 all_reggroup, of course. */
8813 if (regnum == ARM_FPS_REGNUM)
8814 return (group == float_reggroup
8815 || group == save_reggroup
8816 || group == restore_reggroup
8817 || group == all_reggroup);
8819 return default_register_reggroup_p (gdbarch, regnum, group);
8823 /* For backward-compatibility we allow two 'g' packet lengths with
8824 the remote protocol depending on whether FPA registers are
8825 supplied. M-profile targets do not have FPA registers, but some
8826 stubs already exist in the wild which use a 'g' packet which
8827 supplies them albeit with dummy values. The packet format which
8828 includes FPA registers should be considered deprecated for
8829 M-profile targets. */
8832 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8834 if (gdbarch_tdep (gdbarch)->is_m)
8836 /* If we know from the executable this is an M-profile target,
8837 cater for remote targets whose register set layout is the
8838 same as the FPA layout. */
8839 register_remote_g_packet_guess (gdbarch,
8840 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8841 (16 * INT_REGISTER_SIZE)
8842 + (8 * FP_REGISTER_SIZE)
8843 + (2 * INT_REGISTER_SIZE),
8844 tdesc_arm_with_m_fpa_layout);
8846 /* The regular M-profile layout. */
8847 register_remote_g_packet_guess (gdbarch,
8848 /* r0-r12,sp,lr,pc; xpsr */
8849 (16 * INT_REGISTER_SIZE)
8850 + INT_REGISTER_SIZE,
8853 /* M-profile plus M4F VFP. */
8854 register_remote_g_packet_guess (gdbarch,
8855 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8856 (16 * INT_REGISTER_SIZE)
8857 + (16 * VFP_REGISTER_SIZE)
8858 + (2 * INT_REGISTER_SIZE),
8859 tdesc_arm_with_m_vfp_d16);
8862 /* Otherwise we don't have a useful guess. */
8866 /* Initialize the current architecture based on INFO. If possible,
8867 re-use an architecture from ARCHES, which is a list of
8868 architectures already created during this debugging session.
8870 Called e.g. at program startup, when reading a core file, and when
8871 reading a binary file. */
8873 static struct gdbarch *
8874 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8876 struct gdbarch_tdep *tdep;
8877 struct gdbarch *gdbarch;
8878 struct gdbarch_list *best_arch;
8879 enum arm_abi_kind arm_abi = arm_abi_global;
8880 enum arm_float_model fp_model = arm_fp_model;
8881 struct tdesc_arch_data *tdesc_data = NULL;
8883 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8884 int have_wmmx_registers = 0;
8886 int have_fpa_registers = 1;
8887 const struct target_desc *tdesc = info.target_desc;
8889 /* If we have an object to base this architecture on, try to determine
8892 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8894 int ei_osabi, e_flags;
8896 switch (bfd_get_flavour (info.abfd))
8898 case bfd_target_aout_flavour:
8899 /* Assume it's an old APCS-style ABI. */
8900 arm_abi = ARM_ABI_APCS;
8903 case bfd_target_coff_flavour:
8904 /* Assume it's an old APCS-style ABI. */
8906 arm_abi = ARM_ABI_APCS;
8909 case bfd_target_elf_flavour:
8910 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8911 e_flags = elf_elfheader (info.abfd)->e_flags;
8913 if (ei_osabi == ELFOSABI_ARM)
8915 /* GNU tools used to use this value, but do not for EABI
8916 objects. There's nowhere to tag an EABI version
8917 anyway, so assume APCS. */
8918 arm_abi = ARM_ABI_APCS;
8920 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8922 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8923 int attr_arch, attr_profile;
8927 case EF_ARM_EABI_UNKNOWN:
8928 /* Assume GNU tools. */
8929 arm_abi = ARM_ABI_APCS;
8932 case EF_ARM_EABI_VER4:
8933 case EF_ARM_EABI_VER5:
8934 arm_abi = ARM_ABI_AAPCS;
8935 /* EABI binaries default to VFP float ordering.
8936 They may also contain build attributes that can
8937 be used to identify if the VFP argument-passing
8939 if (fp_model == ARM_FLOAT_AUTO)
8942 switch (bfd_elf_get_obj_attr_int (info.abfd,
8946 case AEABI_VFP_args_base:
8947 /* "The user intended FP parameter/result
8948 passing to conform to AAPCS, base
8950 fp_model = ARM_FLOAT_SOFT_VFP;
8952 case AEABI_VFP_args_vfp:
8953 /* "The user intended FP parameter/result
8954 passing to conform to AAPCS, VFP
8956 fp_model = ARM_FLOAT_VFP;
8958 case AEABI_VFP_args_toolchain:
8959 /* "The user intended FP parameter/result
8960 passing to conform to tool chain-specific
8961 conventions" - we don't know any such
8962 conventions, so leave it as "auto". */
8964 case AEABI_VFP_args_compatible:
8965 /* "Code is compatible with both the base
8966 and VFP variants; the user did not permit
8967 non-variadic functions to pass FP
8968 parameters/results" - leave it as
8972 /* Attribute value not mentioned in the
8973 November 2012 ABI, so leave it as
8978 fp_model = ARM_FLOAT_SOFT_VFP;
8984 /* Leave it as "auto". */
8985 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8990 /* Detect M-profile programs. This only works if the
8991 executable file includes build attributes; GCC does
8992 copy them to the executable, but e.g. RealView does
8994 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8996 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8998 Tag_CPU_arch_profile);
8999 /* GCC specifies the profile for v6-M; RealView only
9000 specifies the profile for architectures starting with
9001 V7 (as opposed to architectures with a tag
9002 numerically greater than TAG_CPU_ARCH_V7). */
9003 if (!tdesc_has_registers (tdesc)
9004 && (attr_arch == TAG_CPU_ARCH_V6_M
9005 || attr_arch == TAG_CPU_ARCH_V6S_M
9006 || attr_profile == 'M'))
9011 if (fp_model == ARM_FLOAT_AUTO)
9013 int e_flags = elf_elfheader (info.abfd)->e_flags;
9015 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9018 /* Leave it as "auto". Strictly speaking this case
9019 means FPA, but almost nobody uses that now, and
9020 many toolchains fail to set the appropriate bits
9021 for the floating-point model they use. */
9023 case EF_ARM_SOFT_FLOAT:
9024 fp_model = ARM_FLOAT_SOFT_FPA;
9026 case EF_ARM_VFP_FLOAT:
9027 fp_model = ARM_FLOAT_VFP;
9029 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9030 fp_model = ARM_FLOAT_SOFT_VFP;
9035 if (e_flags & EF_ARM_BE8)
9036 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9041 /* Leave it as "auto". */
9046 /* Check any target description for validity. */
9047 if (tdesc_has_registers (tdesc))
9049 /* For most registers we require GDB's default names; but also allow
9050 the numeric names for sp / lr / pc, as a convenience. */
9051 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9052 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9053 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9055 const struct tdesc_feature *feature;
9058 feature = tdesc_find_feature (tdesc,
9059 "org.gnu.gdb.arm.core");
9060 if (feature == NULL)
9062 feature = tdesc_find_feature (tdesc,
9063 "org.gnu.gdb.arm.m-profile");
9064 if (feature == NULL)
9070 tdesc_data = tdesc_data_alloc ();
9073 for (i = 0; i < ARM_SP_REGNUM; i++)
9074 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9075 arm_register_names[i]);
9076 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9079 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9082 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9086 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9087 ARM_PS_REGNUM, "xpsr");
9089 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9090 ARM_PS_REGNUM, "cpsr");
9094 tdesc_data_cleanup (tdesc_data);
9098 feature = tdesc_find_feature (tdesc,
9099 "org.gnu.gdb.arm.fpa");
9100 if (feature != NULL)
9103 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9104 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9105 arm_register_names[i]);
9108 tdesc_data_cleanup (tdesc_data);
9113 have_fpa_registers = 0;
9115 feature = tdesc_find_feature (tdesc,
9116 "org.gnu.gdb.xscale.iwmmxt");
9117 if (feature != NULL)
9119 static const char *const iwmmxt_names[] = {
9120 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9121 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9122 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9123 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9127 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9129 &= tdesc_numbered_register (feature, tdesc_data, i,
9130 iwmmxt_names[i - ARM_WR0_REGNUM]);
9132 /* Check for the control registers, but do not fail if they
9134 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9135 tdesc_numbered_register (feature, tdesc_data, i,
9136 iwmmxt_names[i - ARM_WR0_REGNUM]);
9138 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9140 &= tdesc_numbered_register (feature, tdesc_data, i,
9141 iwmmxt_names[i - ARM_WR0_REGNUM]);
9145 tdesc_data_cleanup (tdesc_data);
9149 have_wmmx_registers = 1;
9152 /* If we have a VFP unit, check whether the single precision registers
9153 are present. If not, then we will synthesize them as pseudo
9155 feature = tdesc_find_feature (tdesc,
9156 "org.gnu.gdb.arm.vfp");
9157 if (feature != NULL)
9159 static const char *const vfp_double_names[] = {
9160 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9161 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9162 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9163 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9166 /* Require the double precision registers. There must be either
9169 for (i = 0; i < 32; i++)
9171 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9173 vfp_double_names[i]);
9177 if (!valid_p && i == 16)
9180 /* Also require FPSCR. */
9181 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9182 ARM_FPSCR_REGNUM, "fpscr");
9185 tdesc_data_cleanup (tdesc_data);
9189 if (tdesc_unnumbered_register (feature, "s0") == 0)
9190 have_vfp_pseudos = 1;
9192 vfp_register_count = i;
9194 /* If we have VFP, also check for NEON. The architecture allows
9195 NEON without VFP (integer vector operations only), but GDB
9196 does not support that. */
9197 feature = tdesc_find_feature (tdesc,
9198 "org.gnu.gdb.arm.neon");
9199 if (feature != NULL)
9201 /* NEON requires 32 double-precision registers. */
9204 tdesc_data_cleanup (tdesc_data);
9208 /* If there are quad registers defined by the stub, use
9209 their type; otherwise (normally) provide them with
9210 the default type. */
9211 if (tdesc_unnumbered_register (feature, "q0") == 0)
9212 have_neon_pseudos = 1;
9219 /* If there is already a candidate, use it. */
9220 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9222 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9224 if (arm_abi != ARM_ABI_AUTO
9225 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9228 if (fp_model != ARM_FLOAT_AUTO
9229 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9232 /* There are various other properties in tdep that we do not
9233 need to check here: those derived from a target description,
9234 since gdbarches with a different target description are
9235 automatically disqualified. */
9237 /* Do check is_m, though, since it might come from the binary. */
9238 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9241 /* Found a match. */
9245 if (best_arch != NULL)
9247 if (tdesc_data != NULL)
9248 tdesc_data_cleanup (tdesc_data);
9249 return best_arch->gdbarch;
9252 tdep = XCNEW (struct gdbarch_tdep);
9253 gdbarch = gdbarch_alloc (&info, tdep);
9255 /* Record additional information about the architecture we are defining.
9256 These are gdbarch discriminators, like the OSABI. */
9257 tdep->arm_abi = arm_abi;
9258 tdep->fp_model = fp_model;
9260 tdep->have_fpa_registers = have_fpa_registers;
9261 tdep->have_wmmx_registers = have_wmmx_registers;
9262 gdb_assert (vfp_register_count == 0
9263 || vfp_register_count == 16
9264 || vfp_register_count == 32);
9265 tdep->vfp_register_count = vfp_register_count;
9266 tdep->have_vfp_pseudos = have_vfp_pseudos;
9267 tdep->have_neon_pseudos = have_neon_pseudos;
9268 tdep->have_neon = have_neon;
9270 arm_register_g_packet_guesses (gdbarch);
9273 switch (info.byte_order_for_code)
9275 case BFD_ENDIAN_BIG:
9276 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9277 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9278 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9279 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9283 case BFD_ENDIAN_LITTLE:
9284 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9285 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9286 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9287 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9292 internal_error (__FILE__, __LINE__,
9293 _("arm_gdbarch_init: bad byte order for float format"));
9296 /* On ARM targets char defaults to unsigned. */
9297 set_gdbarch_char_signed (gdbarch, 0);
9299 /* Note: for displaced stepping, this includes the breakpoint, and one word
9300 of additional scratch space. This setting isn't used for anything beside
9301 displaced stepping at present. */
9302 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9304 /* This should be low enough for everything. */
9305 tdep->lowest_pc = 0x20;
9306 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9308 /* The default, for both APCS and AAPCS, is to return small
9309 structures in registers. */
9310 tdep->struct_return = reg_struct_return;
9312 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9313 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9315 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9317 /* Frame handling. */
9318 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9319 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9320 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9322 frame_base_set_default (gdbarch, &arm_normal_base);
9324 /* Address manipulation. */
9325 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9327 /* Advance PC across function entry code. */
9328 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9330 /* Detect whether PC is at a point where the stack has been destroyed. */
9331 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9333 /* Skip trampolines. */
9334 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9336 /* The stack grows downward. */
9337 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9339 /* Breakpoint manipulation. */
9340 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9341 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9342 arm_remote_breakpoint_from_pc);
9344 /* Information about registers, etc. */
9345 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9346 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9347 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9348 set_gdbarch_register_type (gdbarch, arm_register_type);
9349 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9351 /* This "info float" is FPA-specific. Use the generic version if we
9353 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9354 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9356 /* Internal <-> external register number maps. */
9357 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9358 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9360 set_gdbarch_register_name (gdbarch, arm_register_name);
9362 /* Returning results. */
9363 set_gdbarch_return_value (gdbarch, arm_return_value);
9366 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9368 /* Minsymbol frobbing. */
9369 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9370 set_gdbarch_coff_make_msymbol_special (gdbarch,
9371 arm_coff_make_msymbol_special);
9372 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9374 /* Thumb-2 IT block support. */
9375 set_gdbarch_adjust_breakpoint_address (gdbarch,
9376 arm_adjust_breakpoint_address);
9378 /* Virtual tables. */
9379 set_gdbarch_vbit_in_delta (gdbarch, 1);
9381 /* Hook in the ABI-specific overrides, if they have been registered. */
9382 gdbarch_init_osabi (info, gdbarch);
9384 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9386 /* Add some default predicates. */
9388 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9389 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9390 dwarf2_append_unwinders (gdbarch);
9391 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9392 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9393 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9395 /* Now we have tuned the configuration, set a few final things,
9396 based on what the OS ABI has told us. */
9398 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9399 binaries are always marked. */
9400 if (tdep->arm_abi == ARM_ABI_AUTO)
9401 tdep->arm_abi = ARM_ABI_APCS;
9403 /* Watchpoints are not steppable. */
9404 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9406 /* We used to default to FPA for generic ARM, but almost nobody
9407 uses that now, and we now provide a way for the user to force
9408 the model. So default to the most useful variant. */
9409 if (tdep->fp_model == ARM_FLOAT_AUTO)
9410 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9412 if (tdep->jb_pc >= 0)
9413 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9415 /* Floating point sizes and format. */
9416 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9417 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9419 set_gdbarch_double_format
9420 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9421 set_gdbarch_long_double_format
9422 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9426 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9427 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9430 if (have_vfp_pseudos)
9432 /* NOTE: These are the only pseudo registers used by
9433 the ARM target at the moment. If more are added, a
9434 little more care in numbering will be needed. */
9436 int num_pseudos = 32;
9437 if (have_neon_pseudos)
9439 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9440 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9441 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9446 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9448 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9450 /* Override tdesc_register_type to adjust the types of VFP
9451 registers for NEON. */
9452 set_gdbarch_register_type (gdbarch, arm_register_type);
9455 /* Add standard register aliases. We add aliases even for those
9456 nanes which are used by the current architecture - it's simpler,
9457 and does no harm, since nothing ever lists user registers. */
9458 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9459 user_reg_add (gdbarch, arm_register_aliases[i].name,
9460 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9466 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9468 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9473 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9474 (unsigned long) tdep->lowest_pc);
9477 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9480 _initialize_arm_tdep (void)
9482 struct ui_file *stb;
9484 const char *setname;
9485 const char *setdesc;
9486 const char *const *regnames;
9488 static char *helptext;
9489 char regdesc[1024], *rdptr = regdesc;
9490 size_t rest = sizeof (regdesc);
9492 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9494 arm_objfile_data_key
9495 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9497 /* Add ourselves to objfile event chain. */
9498 observer_attach_new_objfile (arm_exidx_new_objfile);
9500 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9502 /* Register an ELF OS ABI sniffer for ARM binaries. */
9503 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9504 bfd_target_elf_flavour,
9505 arm_elf_osabi_sniffer);
9507 /* Initialize the standard target descriptions. */
9508 initialize_tdesc_arm_with_m ();
9509 initialize_tdesc_arm_with_m_fpa_layout ();
9510 initialize_tdesc_arm_with_m_vfp_d16 ();
9511 initialize_tdesc_arm_with_iwmmxt ();
9512 initialize_tdesc_arm_with_vfpv2 ();
9513 initialize_tdesc_arm_with_vfpv3 ();
9514 initialize_tdesc_arm_with_neon ();
9516 /* Get the number of possible sets of register names defined in opcodes. */
9517 num_disassembly_options = get_arm_regname_num_options ();
9519 /* Add root prefix command for all "set arm"/"show arm" commands. */
9520 add_prefix_cmd ("arm", no_class, set_arm_command,
9521 _("Various ARM-specific commands."),
9522 &setarmcmdlist, "set arm ", 0, &setlist);
9524 add_prefix_cmd ("arm", no_class, show_arm_command,
9525 _("Various ARM-specific commands."),
9526 &showarmcmdlist, "show arm ", 0, &showlist);
9528 /* Sync the opcode insn printer with our register viewer. */
9529 parse_arm_disassembler_option ("reg-names-std");
9531 /* Initialize the array that will be passed to
9532 add_setshow_enum_cmd(). */
9533 valid_disassembly_styles = XNEWVEC (const char *,
9534 num_disassembly_options + 1);
9535 for (i = 0; i < num_disassembly_options; i++)
9537 get_arm_regnames (i, &setname, &setdesc, ®names);
9538 valid_disassembly_styles[i] = setname;
9539 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9542 /* When we find the default names, tell the disassembler to use
9544 if (!strcmp (setname, "std"))
9546 disassembly_style = setname;
9547 set_arm_regname_option (i);
9550 /* Mark the end of valid options. */
9551 valid_disassembly_styles[num_disassembly_options] = NULL;
9553 /* Create the help text. */
9554 stb = mem_fileopen ();
9555 fprintf_unfiltered (stb, "%s%s%s",
9556 _("The valid values are:\n"),
9558 _("The default is \"std\"."));
9559 helptext = ui_file_xstrdup (stb, NULL);
9560 ui_file_delete (stb);
9562 add_setshow_enum_cmd("disassembler", no_class,
9563 valid_disassembly_styles, &disassembly_style,
9564 _("Set the disassembly style."),
9565 _("Show the disassembly style."),
9567 set_disassembly_style_sfunc,
9568 NULL, /* FIXME: i18n: The disassembly style is
9570 &setarmcmdlist, &showarmcmdlist);
9572 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9573 _("Set usage of ARM 32-bit mode."),
9574 _("Show usage of ARM 32-bit mode."),
9575 _("When off, a 26-bit PC will be used."),
9577 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9579 &setarmcmdlist, &showarmcmdlist);
9581 /* Add a command to allow the user to force the FPU model. */
9582 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9583 _("Set the floating point type."),
9584 _("Show the floating point type."),
9585 _("auto - Determine the FP typefrom the OS-ABI.\n\
9586 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9587 fpa - FPA co-processor (GCC compiled).\n\
9588 softvfp - Software FP with pure-endian doubles.\n\
9589 vfp - VFP co-processor."),
9590 set_fp_model_sfunc, show_fp_model,
9591 &setarmcmdlist, &showarmcmdlist);
9593 /* Add a command to allow the user to force the ABI. */
9594 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9597 NULL, arm_set_abi, arm_show_abi,
9598 &setarmcmdlist, &showarmcmdlist);
9600 /* Add two commands to allow the user to force the assumed
9602 add_setshow_enum_cmd ("fallback-mode", class_support,
9603 arm_mode_strings, &arm_fallback_mode_string,
9604 _("Set the mode assumed when symbols are unavailable."),
9605 _("Show the mode assumed when symbols are unavailable."),
9606 NULL, NULL, arm_show_fallback_mode,
9607 &setarmcmdlist, &showarmcmdlist);
9608 add_setshow_enum_cmd ("force-mode", class_support,
9609 arm_mode_strings, &arm_force_mode_string,
9610 _("Set the mode assumed even when symbols are available."),
9611 _("Show the mode assumed even when symbols are available."),
9612 NULL, NULL, arm_show_force_mode,
9613 &setarmcmdlist, &showarmcmdlist);
9615 /* Debugging flag. */
9616 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9617 _("Set ARM debugging."),
9618 _("Show ARM debugging."),
9619 _("When on, arm-specific debugging is enabled."),
9621 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9622 &setdebuglist, &showdebuglist);
9625 /* ARM-reversible process record data structures. */
9627 #define ARM_INSN_SIZE_BYTES 4
9628 #define THUMB_INSN_SIZE_BYTES 2
9629 #define THUMB2_INSN_SIZE_BYTES 4
9632 /* Position of the bit within a 32-bit ARM instruction
9633 that defines whether the instruction is a load or store. */
9634 #define INSN_S_L_BIT_NUM 20
9636 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9639 unsigned int reg_len = LENGTH; \
9642 REGS = XNEWVEC (uint32_t, reg_len); \
9643 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9648 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9651 unsigned int mem_len = LENGTH; \
9654 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9655 memcpy(&MEMS->len, &RECORD_BUF[0], \
9656 sizeof(struct arm_mem_r) * LENGTH); \
9661 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9662 #define INSN_RECORDED(ARM_RECORD) \
9663 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9665 /* ARM memory record structure. */
9668 uint32_t len; /* Record length. */
9669 uint32_t addr; /* Memory address. */
9672 /* ARM instruction record contains opcode of current insn
9673 and execution state (before entry to decode_insn()),
9674 contains list of to-be-modified registers and
9675 memory blocks (on return from decode_insn()). */
9677 typedef struct insn_decode_record_t
9679 struct gdbarch *gdbarch;
9680 struct regcache *regcache;
9681 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9682 uint32_t arm_insn; /* Should accommodate thumb. */
9683 uint32_t cond; /* Condition code. */
9684 uint32_t opcode; /* Insn opcode. */
9685 uint32_t decode; /* Insn decode bits. */
9686 uint32_t mem_rec_count; /* No of mem records. */
9687 uint32_t reg_rec_count; /* No of reg records. */
9688 uint32_t *arm_regs; /* Registers to be saved for this record. */
9689 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9690 } insn_decode_record;
9693 /* Checks ARM SBZ and SBO mandatory fields. */
9696 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9698 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9717 enum arm_record_result
9719 ARM_RECORD_SUCCESS = 0,
9720 ARM_RECORD_FAILURE = 1
9727 } arm_record_strx_t;
9738 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9739 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9742 struct regcache *reg_cache = arm_insn_r->regcache;
9743 ULONGEST u_regval[2]= {0};
9745 uint32_t reg_src1 = 0, reg_src2 = 0;
9746 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9748 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9749 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9751 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9753 /* 1) Handle misc store, immediate offset. */
9754 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9755 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9756 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9757 regcache_raw_read_unsigned (reg_cache, reg_src1,
9759 if (ARM_PC_REGNUM == reg_src1)
9761 /* If R15 was used as Rn, hence current PC+8. */
9762 u_regval[0] = u_regval[0] + 8;
9764 offset_8 = (immed_high << 4) | immed_low;
9765 /* Calculate target store address. */
9766 if (14 == arm_insn_r->opcode)
9768 tgt_mem_addr = u_regval[0] + offset_8;
9772 tgt_mem_addr = u_regval[0] - offset_8;
9774 if (ARM_RECORD_STRH == str_type)
9776 record_buf_mem[0] = 2;
9777 record_buf_mem[1] = tgt_mem_addr;
9778 arm_insn_r->mem_rec_count = 1;
9780 else if (ARM_RECORD_STRD == str_type)
9782 record_buf_mem[0] = 4;
9783 record_buf_mem[1] = tgt_mem_addr;
9784 record_buf_mem[2] = 4;
9785 record_buf_mem[3] = tgt_mem_addr + 4;
9786 arm_insn_r->mem_rec_count = 2;
9789 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9791 /* 2) Store, register offset. */
9793 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9795 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9796 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9797 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9800 /* If R15 was used as Rn, hence current PC+8. */
9801 u_regval[0] = u_regval[0] + 8;
9803 /* Calculate target store address, Rn +/- Rm, register offset. */
9804 if (12 == arm_insn_r->opcode)
9806 tgt_mem_addr = u_regval[0] + u_regval[1];
9810 tgt_mem_addr = u_regval[1] - u_regval[0];
9812 if (ARM_RECORD_STRH == str_type)
9814 record_buf_mem[0] = 2;
9815 record_buf_mem[1] = tgt_mem_addr;
9816 arm_insn_r->mem_rec_count = 1;
9818 else if (ARM_RECORD_STRD == str_type)
9820 record_buf_mem[0] = 4;
9821 record_buf_mem[1] = tgt_mem_addr;
9822 record_buf_mem[2] = 4;
9823 record_buf_mem[3] = tgt_mem_addr + 4;
9824 arm_insn_r->mem_rec_count = 2;
9827 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9828 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9830 /* 3) Store, immediate pre-indexed. */
9831 /* 5) Store, immediate post-indexed. */
9832 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9833 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9834 offset_8 = (immed_high << 4) | immed_low;
9835 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9836 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9837 /* Calculate target store address, Rn +/- Rm, register offset. */
9838 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9840 tgt_mem_addr = u_regval[0] + offset_8;
9844 tgt_mem_addr = u_regval[0] - offset_8;
9846 if (ARM_RECORD_STRH == str_type)
9848 record_buf_mem[0] = 2;
9849 record_buf_mem[1] = tgt_mem_addr;
9850 arm_insn_r->mem_rec_count = 1;
9852 else if (ARM_RECORD_STRD == str_type)
9854 record_buf_mem[0] = 4;
9855 record_buf_mem[1] = tgt_mem_addr;
9856 record_buf_mem[2] = 4;
9857 record_buf_mem[3] = tgt_mem_addr + 4;
9858 arm_insn_r->mem_rec_count = 2;
9860 /* Record Rn also as it changes. */
9861 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9862 arm_insn_r->reg_rec_count = 1;
9864 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9865 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9867 /* 4) Store, register pre-indexed. */
9868 /* 6) Store, register post -indexed. */
9869 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9870 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9871 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9872 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9873 /* Calculate target store address, Rn +/- Rm, register offset. */
9874 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9876 tgt_mem_addr = u_regval[0] + u_regval[1];
9880 tgt_mem_addr = u_regval[1] - u_regval[0];
9882 if (ARM_RECORD_STRH == str_type)
9884 record_buf_mem[0] = 2;
9885 record_buf_mem[1] = tgt_mem_addr;
9886 arm_insn_r->mem_rec_count = 1;
9888 else if (ARM_RECORD_STRD == str_type)
9890 record_buf_mem[0] = 4;
9891 record_buf_mem[1] = tgt_mem_addr;
9892 record_buf_mem[2] = 4;
9893 record_buf_mem[3] = tgt_mem_addr + 4;
9894 arm_insn_r->mem_rec_count = 2;
9896 /* Record Rn also as it changes. */
9897 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9898 arm_insn_r->reg_rec_count = 1;
9903 /* Handling ARM extension space insns. */
9906 arm_record_extension_space (insn_decode_record *arm_insn_r)
9908 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9909 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9910 uint32_t record_buf[8], record_buf_mem[8];
9911 uint32_t reg_src1 = 0;
9912 struct regcache *reg_cache = arm_insn_r->regcache;
9913 ULONGEST u_regval = 0;
9915 gdb_assert (!INSN_RECORDED(arm_insn_r));
9916 /* Handle unconditional insn extension space. */
9918 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9919 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9920 if (arm_insn_r->cond)
9922 /* PLD has no affect on architectural state, it just affects
9924 if (5 == ((opcode1 & 0xE0) >> 5))
9927 record_buf[0] = ARM_PS_REGNUM;
9928 record_buf[1] = ARM_LR_REGNUM;
9929 arm_insn_r->reg_rec_count = 2;
9931 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9935 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9936 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9939 /* Undefined instruction on ARM V5; need to handle if later
9940 versions define it. */
9943 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9944 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9945 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9947 /* Handle arithmetic insn extension space. */
9948 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9949 && !INSN_RECORDED(arm_insn_r))
9951 /* Handle MLA(S) and MUL(S). */
9952 if (0 <= insn_op1 && 3 >= insn_op1)
9954 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9955 record_buf[1] = ARM_PS_REGNUM;
9956 arm_insn_r->reg_rec_count = 2;
9958 else if (4 <= insn_op1 && 15 >= insn_op1)
9960 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9961 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9962 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9963 record_buf[2] = ARM_PS_REGNUM;
9964 arm_insn_r->reg_rec_count = 3;
9968 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9969 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9970 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9972 /* Handle control insn extension space. */
9974 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9975 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9977 if (!bit (arm_insn_r->arm_insn,25))
9979 if (!bits (arm_insn_r->arm_insn, 4, 7))
9981 if ((0 == insn_op1) || (2 == insn_op1))
9984 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9985 arm_insn_r->reg_rec_count = 1;
9987 else if (1 == insn_op1)
9989 /* CSPR is going to be changed. */
9990 record_buf[0] = ARM_PS_REGNUM;
9991 arm_insn_r->reg_rec_count = 1;
9993 else if (3 == insn_op1)
9995 /* SPSR is going to be changed. */
9996 /* We need to get SPSR value, which is yet to be done. */
10000 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10005 record_buf[0] = ARM_PS_REGNUM;
10006 arm_insn_r->reg_rec_count = 1;
10008 else if (3 == insn_op1)
10011 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10012 arm_insn_r->reg_rec_count = 1;
10015 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10018 record_buf[0] = ARM_PS_REGNUM;
10019 record_buf[1] = ARM_LR_REGNUM;
10020 arm_insn_r->reg_rec_count = 2;
10022 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10024 /* QADD, QSUB, QDADD, QDSUB */
10025 record_buf[0] = ARM_PS_REGNUM;
10026 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10027 arm_insn_r->reg_rec_count = 2;
10029 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10032 record_buf[0] = ARM_PS_REGNUM;
10033 record_buf[1] = ARM_LR_REGNUM;
10034 arm_insn_r->reg_rec_count = 2;
10036 /* Save SPSR also;how? */
10039 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10040 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10041 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10042 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10045 if (0 == insn_op1 || 1 == insn_op1)
10047 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10048 /* We dont do optimization for SMULW<y> where we
10050 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10051 record_buf[1] = ARM_PS_REGNUM;
10052 arm_insn_r->reg_rec_count = 2;
10054 else if (2 == insn_op1)
10057 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10058 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10059 arm_insn_r->reg_rec_count = 2;
10061 else if (3 == insn_op1)
10064 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10065 arm_insn_r->reg_rec_count = 1;
10071 /* MSR : immediate form. */
10074 /* CSPR is going to be changed. */
10075 record_buf[0] = ARM_PS_REGNUM;
10076 arm_insn_r->reg_rec_count = 1;
10078 else if (3 == insn_op1)
10080 /* SPSR is going to be changed. */
10081 /* we need to get SPSR value, which is yet to be done */
10087 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10088 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10089 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10091 /* Handle load/store insn extension space. */
10093 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10094 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10095 && !INSN_RECORDED(arm_insn_r))
10100 /* These insn, changes register and memory as well. */
10101 /* SWP or SWPB insn. */
10102 /* Get memory address given by Rn. */
10103 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10104 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10105 /* SWP insn ?, swaps word. */
10106 if (8 == arm_insn_r->opcode)
10108 record_buf_mem[0] = 4;
10112 /* SWPB insn, swaps only byte. */
10113 record_buf_mem[0] = 1;
10115 record_buf_mem[1] = u_regval;
10116 arm_insn_r->mem_rec_count = 1;
10117 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10118 arm_insn_r->reg_rec_count = 1;
10120 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10123 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10126 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10129 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10130 record_buf[1] = record_buf[0] + 1;
10131 arm_insn_r->reg_rec_count = 2;
10133 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10136 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10139 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10141 /* LDRH, LDRSB, LDRSH. */
10142 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10143 arm_insn_r->reg_rec_count = 1;
10148 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10149 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10150 && !INSN_RECORDED(arm_insn_r))
10153 /* Handle coprocessor insn extension space. */
10156 /* To be done for ARMv5 and later; as of now we return -1. */
10160 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10161 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10166 /* Handling opcode 000 insns. */
10169 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10171 struct regcache *reg_cache = arm_insn_r->regcache;
10172 uint32_t record_buf[8], record_buf_mem[8];
10173 ULONGEST u_regval[2] = {0};
10175 uint32_t reg_src1 = 0, reg_dest = 0;
10176 uint32_t opcode1 = 0;
10178 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10179 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10180 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10182 /* Data processing insn /multiply insn. */
10183 if (9 == arm_insn_r->decode
10184 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10185 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10187 /* Handle multiply instructions. */
10188 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10189 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10191 /* Handle MLA and MUL. */
10192 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10193 record_buf[1] = ARM_PS_REGNUM;
10194 arm_insn_r->reg_rec_count = 2;
10196 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10198 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10199 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10200 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10201 record_buf[2] = ARM_PS_REGNUM;
10202 arm_insn_r->reg_rec_count = 3;
10205 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10206 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10208 /* Handle misc load insns, as 20th bit (L = 1). */
10209 /* LDR insn has a capability to do branching, if
10210 MOV LR, PC is precceded by LDR insn having Rn as R15
10211 in that case, it emulates branch and link insn, and hence we
10212 need to save CSPR and PC as well. I am not sure this is right
10213 place; as opcode = 010 LDR insn make this happen, if R15 was
10215 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10216 if (15 != reg_dest)
10218 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10219 arm_insn_r->reg_rec_count = 1;
10223 record_buf[0] = reg_dest;
10224 record_buf[1] = ARM_PS_REGNUM;
10225 arm_insn_r->reg_rec_count = 2;
10228 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10229 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10230 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10231 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10233 /* Handle MSR insn. */
10234 if (9 == arm_insn_r->opcode)
10236 /* CSPR is going to be changed. */
10237 record_buf[0] = ARM_PS_REGNUM;
10238 arm_insn_r->reg_rec_count = 1;
10242 /* SPSR is going to be changed. */
10243 /* How to read SPSR value? */
10247 else if (9 == arm_insn_r->decode
10248 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10249 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10251 /* Handling SWP, SWPB. */
10252 /* These insn, changes register and memory as well. */
10253 /* SWP or SWPB insn. */
10255 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10256 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10257 /* SWP insn ?, swaps word. */
10258 if (8 == arm_insn_r->opcode)
10260 record_buf_mem[0] = 4;
10264 /* SWPB insn, swaps only byte. */
10265 record_buf_mem[0] = 1;
10267 record_buf_mem[1] = u_regval[0];
10268 arm_insn_r->mem_rec_count = 1;
10269 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10270 arm_insn_r->reg_rec_count = 1;
10272 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10273 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10275 /* Handle BLX, branch and link/exchange. */
10276 if (9 == arm_insn_r->opcode)
10278 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10279 and R14 stores the return address. */
10280 record_buf[0] = ARM_PS_REGNUM;
10281 record_buf[1] = ARM_LR_REGNUM;
10282 arm_insn_r->reg_rec_count = 2;
10285 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10287 /* Handle enhanced software breakpoint insn, BKPT. */
10288 /* CPSR is changed to be executed in ARM state, disabling normal
10289 interrupts, entering abort mode. */
10290 /* According to high vector configuration PC is set. */
10291 /* user hit breakpoint and type reverse, in
10292 that case, we need to go back with previous CPSR and
10293 Program Counter. */
10294 record_buf[0] = ARM_PS_REGNUM;
10295 record_buf[1] = ARM_LR_REGNUM;
10296 arm_insn_r->reg_rec_count = 2;
10298 /* Save SPSR also; how? */
10301 else if (11 == arm_insn_r->decode
10302 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10304 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10306 /* Handle str(x) insn */
10307 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10310 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10311 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10313 /* Handle BX, branch and link/exchange. */
10314 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10315 record_buf[0] = ARM_PS_REGNUM;
10316 arm_insn_r->reg_rec_count = 1;
10318 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10319 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10320 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10322 /* Count leading zeros: CLZ. */
10323 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10324 arm_insn_r->reg_rec_count = 1;
10326 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10327 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10328 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10329 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10332 /* Handle MRS insn. */
10333 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10334 arm_insn_r->reg_rec_count = 1;
10336 else if (arm_insn_r->opcode <= 15)
10338 /* Normal data processing insns. */
10339 /* Out of 11 shifter operands mode, all the insn modifies destination
10340 register, which is specified by 13-16 decode. */
10341 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10342 record_buf[1] = ARM_PS_REGNUM;
10343 arm_insn_r->reg_rec_count = 2;
10350 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10351 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10355 /* Handling opcode 001 insns. */
10358 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10360 uint32_t record_buf[8], record_buf_mem[8];
10362 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10363 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10365 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10366 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10367 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10370 /* Handle MSR insn. */
10371 if (9 == arm_insn_r->opcode)
10373 /* CSPR is going to be changed. */
10374 record_buf[0] = ARM_PS_REGNUM;
10375 arm_insn_r->reg_rec_count = 1;
10379 /* SPSR is going to be changed. */
10382 else if (arm_insn_r->opcode <= 15)
10384 /* Normal data processing insns. */
10385 /* Out of 11 shifter operands mode, all the insn modifies destination
10386 register, which is specified by 13-16 decode. */
10387 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10388 record_buf[1] = ARM_PS_REGNUM;
10389 arm_insn_r->reg_rec_count = 2;
10396 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10397 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10402 arm_record_media (insn_decode_record *arm_insn_r)
10404 uint32_t record_buf[8];
10406 switch (bits (arm_insn_r->arm_insn, 22, 24))
10409 /* Parallel addition and subtraction, signed */
10411 /* Parallel addition and subtraction, unsigned */
10414 /* Packing, unpacking, saturation and reversal */
10416 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10418 record_buf[arm_insn_r->reg_rec_count++] = rd;
10424 /* Signed multiplies */
10426 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10427 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10429 record_buf[arm_insn_r->reg_rec_count++] = rd;
10431 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10432 else if (op1 == 0x4)
10433 record_buf[arm_insn_r->reg_rec_count++]
10434 = bits (arm_insn_r->arm_insn, 12, 15);
10440 if (bit (arm_insn_r->arm_insn, 21)
10441 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10444 record_buf[arm_insn_r->reg_rec_count++]
10445 = bits (arm_insn_r->arm_insn, 12, 15);
10447 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10448 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10450 /* USAD8 and USADA8 */
10451 record_buf[arm_insn_r->reg_rec_count++]
10452 = bits (arm_insn_r->arm_insn, 16, 19);
10459 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10460 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10462 /* Permanently UNDEFINED */
10467 /* BFC, BFI and UBFX */
10468 record_buf[arm_insn_r->reg_rec_count++]
10469 = bits (arm_insn_r->arm_insn, 12, 15);
10478 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10483 /* Handle ARM mode instructions with opcode 010. */
10486 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10488 struct regcache *reg_cache = arm_insn_r->regcache;
10490 uint32_t reg_base , reg_dest;
10491 uint32_t offset_12, tgt_mem_addr;
10492 uint32_t record_buf[8], record_buf_mem[8];
10493 unsigned char wback;
10496 /* Calculate wback. */
10497 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10498 || (bit (arm_insn_r->arm_insn, 21) == 1);
10500 arm_insn_r->reg_rec_count = 0;
10501 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10503 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10505 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10508 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10509 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10511 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10512 preceeds a LDR instruction having R15 as reg_base, it
10513 emulates a branch and link instruction, and hence we need to save
10514 CPSR and PC as well. */
10515 if (ARM_PC_REGNUM == reg_dest)
10516 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10518 /* If wback is true, also save the base register, which is going to be
10521 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10525 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10527 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10528 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10530 /* Handle bit U. */
10531 if (bit (arm_insn_r->arm_insn, 23))
10533 /* U == 1: Add the offset. */
10534 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10538 /* U == 0: subtract the offset. */
10539 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10542 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10544 if (bit (arm_insn_r->arm_insn, 22))
10546 /* STRB and STRBT: 1 byte. */
10547 record_buf_mem[0] = 1;
10551 /* STR and STRT: 4 bytes. */
10552 record_buf_mem[0] = 4;
10555 /* Handle bit P. */
10556 if (bit (arm_insn_r->arm_insn, 24))
10557 record_buf_mem[1] = tgt_mem_addr;
10559 record_buf_mem[1] = (uint32_t) u_regval;
10561 arm_insn_r->mem_rec_count = 1;
10563 /* If wback is true, also save the base register, which is going to be
10566 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10569 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10570 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10574 /* Handling opcode 011 insns. */
10577 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10579 struct regcache *reg_cache = arm_insn_r->regcache;
10581 uint32_t shift_imm = 0;
10582 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10583 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10584 uint32_t record_buf[8], record_buf_mem[8];
10587 ULONGEST u_regval[2];
10589 if (bit (arm_insn_r->arm_insn, 4))
10590 return arm_record_media (arm_insn_r);
10592 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10593 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10595 /* Handle enhanced store insns and LDRD DSP insn,
10596 order begins according to addressing modes for store insns
10600 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10602 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10603 /* LDR insn has a capability to do branching, if
10604 MOV LR, PC is precedded by LDR insn having Rn as R15
10605 in that case, it emulates branch and link insn, and hence we
10606 need to save CSPR and PC as well. */
10607 if (15 != reg_dest)
10609 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10610 arm_insn_r->reg_rec_count = 1;
10614 record_buf[0] = reg_dest;
10615 record_buf[1] = ARM_PS_REGNUM;
10616 arm_insn_r->reg_rec_count = 2;
10621 if (! bits (arm_insn_r->arm_insn, 4, 11))
10623 /* Store insn, register offset and register pre-indexed,
10624 register post-indexed. */
10626 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10628 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10629 regcache_raw_read_unsigned (reg_cache, reg_src1
10631 regcache_raw_read_unsigned (reg_cache, reg_src2
10633 if (15 == reg_src2)
10635 /* If R15 was used as Rn, hence current PC+8. */
10636 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10637 u_regval[0] = u_regval[0] + 8;
10639 /* Calculate target store address, Rn +/- Rm, register offset. */
10641 if (bit (arm_insn_r->arm_insn, 23))
10643 tgt_mem_addr = u_regval[0] + u_regval[1];
10647 tgt_mem_addr = u_regval[1] - u_regval[0];
10650 switch (arm_insn_r->opcode)
10664 record_buf_mem[0] = 4;
10679 record_buf_mem[0] = 1;
10683 gdb_assert_not_reached ("no decoding pattern found");
10686 record_buf_mem[1] = tgt_mem_addr;
10687 arm_insn_r->mem_rec_count = 1;
10689 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10690 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10691 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10692 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10693 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10694 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10697 /* Rn is going to be changed in pre-indexed mode and
10698 post-indexed mode as well. */
10699 record_buf[0] = reg_src2;
10700 arm_insn_r->reg_rec_count = 1;
10705 /* Store insn, scaled register offset; scaled pre-indexed. */
10706 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10708 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10710 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10711 /* Get shift_imm. */
10712 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10713 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10714 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10715 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10716 /* Offset_12 used as shift. */
10720 /* Offset_12 used as index. */
10721 offset_12 = u_regval[0] << shift_imm;
10725 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10731 if (bit (u_regval[0], 31))
10733 offset_12 = 0xFFFFFFFF;
10742 /* This is arithmetic shift. */
10743 offset_12 = s_word >> shift_imm;
10750 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10752 /* Get C flag value and shift it by 31. */
10753 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10754 | (u_regval[0]) >> 1);
10758 offset_12 = (u_regval[0] >> shift_imm) \
10760 (sizeof(uint32_t) - shift_imm));
10765 gdb_assert_not_reached ("no decoding pattern found");
10769 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10771 if (bit (arm_insn_r->arm_insn, 23))
10773 tgt_mem_addr = u_regval[1] + offset_12;
10777 tgt_mem_addr = u_regval[1] - offset_12;
10780 switch (arm_insn_r->opcode)
10794 record_buf_mem[0] = 4;
10809 record_buf_mem[0] = 1;
10813 gdb_assert_not_reached ("no decoding pattern found");
10816 record_buf_mem[1] = tgt_mem_addr;
10817 arm_insn_r->mem_rec_count = 1;
10819 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10820 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10821 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10822 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10823 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10824 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10827 /* Rn is going to be changed in register scaled pre-indexed
10828 mode,and scaled post indexed mode. */
10829 record_buf[0] = reg_src2;
10830 arm_insn_r->reg_rec_count = 1;
10835 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10836 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10840 /* Handle ARM mode instructions with opcode 100. */
10843 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10845 struct regcache *reg_cache = arm_insn_r->regcache;
10846 uint32_t register_count = 0, register_bits;
10847 uint32_t reg_base, addr_mode;
10848 uint32_t record_buf[24], record_buf_mem[48];
10852 /* Fetch the list of registers. */
10853 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10854 arm_insn_r->reg_rec_count = 0;
10856 /* Fetch the base register that contains the address we are loading data
10858 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10860 /* Calculate wback. */
10861 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10863 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10865 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10867 /* Find out which registers are going to be loaded from memory. */
10868 while (register_bits)
10870 if (register_bits & 0x00000001)
10871 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10872 register_bits = register_bits >> 1;
10877 /* If wback is true, also save the base register, which is going to be
10880 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10882 /* Save the CPSR register. */
10883 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10887 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10889 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10891 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10893 /* Find out how many registers are going to be stored to memory. */
10894 while (register_bits)
10896 if (register_bits & 0x00000001)
10898 register_bits = register_bits >> 1;
10903 /* STMDA (STMED): Decrement after. */
10905 record_buf_mem[1] = (uint32_t) u_regval
10906 - register_count * INT_REGISTER_SIZE + 4;
10908 /* STM (STMIA, STMEA): Increment after. */
10910 record_buf_mem[1] = (uint32_t) u_regval;
10912 /* STMDB (STMFD): Decrement before. */
10914 record_buf_mem[1] = (uint32_t) u_regval
10915 - register_count * INT_REGISTER_SIZE;
10917 /* STMIB (STMFA): Increment before. */
10919 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10922 gdb_assert_not_reached ("no decoding pattern found");
10926 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10927 arm_insn_r->mem_rec_count = 1;
10929 /* If wback is true, also save the base register, which is going to be
10932 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10935 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10936 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10940 /* Handling opcode 101 insns. */
10943 arm_record_b_bl (insn_decode_record *arm_insn_r)
10945 uint32_t record_buf[8];
10947 /* Handle B, BL, BLX(1) insns. */
10948 /* B simply branches so we do nothing here. */
10949 /* Note: BLX(1) doesnt fall here but instead it falls into
10950 extension space. */
10951 if (bit (arm_insn_r->arm_insn, 24))
10953 record_buf[0] = ARM_LR_REGNUM;
10954 arm_insn_r->reg_rec_count = 1;
10957 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10963 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10965 printf_unfiltered (_("Process record does not support instruction "
10966 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10967 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10972 /* Record handler for vector data transfer instructions. */
10975 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10977 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10978 uint32_t record_buf[4];
10980 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10981 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10982 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10983 bit_l = bit (arm_insn_r->arm_insn, 20);
10984 bit_c = bit (arm_insn_r->arm_insn, 8);
10986 /* Handle VMOV instruction. */
10987 if (bit_l && bit_c)
10989 record_buf[0] = reg_t;
10990 arm_insn_r->reg_rec_count = 1;
10992 else if (bit_l && !bit_c)
10994 /* Handle VMOV instruction. */
10995 if (bits_a == 0x00)
10997 record_buf[0] = reg_t;
10998 arm_insn_r->reg_rec_count = 1;
11000 /* Handle VMRS instruction. */
11001 else if (bits_a == 0x07)
11004 reg_t = ARM_PS_REGNUM;
11006 record_buf[0] = reg_t;
11007 arm_insn_r->reg_rec_count = 1;
11010 else if (!bit_l && !bit_c)
11012 /* Handle VMOV instruction. */
11013 if (bits_a == 0x00)
11015 record_buf[0] = ARM_D0_REGNUM + reg_v;
11017 arm_insn_r->reg_rec_count = 1;
11019 /* Handle VMSR instruction. */
11020 else if (bits_a == 0x07)
11022 record_buf[0] = ARM_FPSCR_REGNUM;
11023 arm_insn_r->reg_rec_count = 1;
11026 else if (!bit_l && bit_c)
11028 /* Handle VMOV instruction. */
11029 if (!(bits_a & 0x04))
11031 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11033 arm_insn_r->reg_rec_count = 1;
11035 /* Handle VDUP instruction. */
11038 if (bit (arm_insn_r->arm_insn, 21))
11040 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11041 record_buf[0] = reg_v + ARM_D0_REGNUM;
11042 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11043 arm_insn_r->reg_rec_count = 2;
11047 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11048 record_buf[0] = reg_v + ARM_D0_REGNUM;
11049 arm_insn_r->reg_rec_count = 1;
11054 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11058 /* Record handler for extension register load/store instructions. */
11061 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11063 uint32_t opcode, single_reg;
11064 uint8_t op_vldm_vstm;
11065 uint32_t record_buf[8], record_buf_mem[128];
11066 ULONGEST u_regval = 0;
11068 struct regcache *reg_cache = arm_insn_r->regcache;
11070 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11071 single_reg = !bit (arm_insn_r->arm_insn, 8);
11072 op_vldm_vstm = opcode & 0x1b;
11074 /* Handle VMOV instructions. */
11075 if ((opcode & 0x1e) == 0x04)
11077 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11079 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11080 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11081 arm_insn_r->reg_rec_count = 2;
11085 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11086 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11090 /* The first S register number m is REG_M:M (M is bit 5),
11091 the corresponding D register number is REG_M:M / 2, which
11093 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11094 /* The second S register number is REG_M:M + 1, the
11095 corresponding D register number is (REG_M:M + 1) / 2.
11096 IOW, if bit M is 1, the first and second S registers
11097 are mapped to different D registers, otherwise, they are
11098 in the same D register. */
11101 record_buf[arm_insn_r->reg_rec_count++]
11102 = ARM_D0_REGNUM + reg_m + 1;
11107 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11108 arm_insn_r->reg_rec_count = 1;
11112 /* Handle VSTM and VPUSH instructions. */
11113 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11114 || op_vldm_vstm == 0x12)
11116 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11117 uint32_t memory_index = 0;
11119 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11120 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11121 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11122 imm_off32 = imm_off8 << 2;
11123 memory_count = imm_off8;
11125 if (bit (arm_insn_r->arm_insn, 23))
11126 start_address = u_regval;
11128 start_address = u_regval - imm_off32;
11130 if (bit (arm_insn_r->arm_insn, 21))
11132 record_buf[0] = reg_rn;
11133 arm_insn_r->reg_rec_count = 1;
11136 while (memory_count > 0)
11140 record_buf_mem[memory_index] = 4;
11141 record_buf_mem[memory_index + 1] = start_address;
11142 start_address = start_address + 4;
11143 memory_index = memory_index + 2;
11147 record_buf_mem[memory_index] = 4;
11148 record_buf_mem[memory_index + 1] = start_address;
11149 record_buf_mem[memory_index + 2] = 4;
11150 record_buf_mem[memory_index + 3] = start_address + 4;
11151 start_address = start_address + 8;
11152 memory_index = memory_index + 4;
11156 arm_insn_r->mem_rec_count = (memory_index >> 1);
11158 /* Handle VLDM instructions. */
11159 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11160 || op_vldm_vstm == 0x13)
11162 uint32_t reg_count, reg_vd;
11163 uint32_t reg_index = 0;
11164 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11166 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11167 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11169 /* REG_VD is the first D register number. If the instruction
11170 loads memory to S registers (SINGLE_REG is TRUE), the register
11171 number is (REG_VD << 1 | bit D), so the corresponding D
11172 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11174 reg_vd = reg_vd | (bit_d << 4);
11176 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11177 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11179 /* If the instruction loads memory to D register, REG_COUNT should
11180 be divided by 2, according to the ARM Architecture Reference
11181 Manual. If the instruction loads memory to S register, divide by
11182 2 as well because two S registers are mapped to D register. */
11183 reg_count = reg_count / 2;
11184 if (single_reg && bit_d)
11186 /* Increase the register count if S register list starts from
11187 an odd number (bit d is one). */
11191 while (reg_count > 0)
11193 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11196 arm_insn_r->reg_rec_count = reg_index;
11198 /* VSTR Vector store register. */
11199 else if ((opcode & 0x13) == 0x10)
11201 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11202 uint32_t memory_index = 0;
11204 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11205 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11206 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11207 imm_off32 = imm_off8 << 2;
11209 if (bit (arm_insn_r->arm_insn, 23))
11210 start_address = u_regval + imm_off32;
11212 start_address = u_regval - imm_off32;
11216 record_buf_mem[memory_index] = 4;
11217 record_buf_mem[memory_index + 1] = start_address;
11218 arm_insn_r->mem_rec_count = 1;
11222 record_buf_mem[memory_index] = 4;
11223 record_buf_mem[memory_index + 1] = start_address;
11224 record_buf_mem[memory_index + 2] = 4;
11225 record_buf_mem[memory_index + 3] = start_address + 4;
11226 arm_insn_r->mem_rec_count = 2;
11229 /* VLDR Vector load register. */
11230 else if ((opcode & 0x13) == 0x11)
11232 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11236 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11237 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11241 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11242 /* Record register D rather than pseudo register S. */
11243 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11245 arm_insn_r->reg_rec_count = 1;
11248 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11249 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11253 /* Record handler for arm/thumb mode VFP data processing instructions. */
11256 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11258 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11259 uint32_t record_buf[4];
11260 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11261 enum insn_types curr_insn_type = INSN_INV;
11263 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11264 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11265 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11266 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11267 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11268 bit_d = bit (arm_insn_r->arm_insn, 22);
11269 opc1 = opc1 & 0x04;
11271 /* Handle VMLA, VMLS. */
11274 if (bit (arm_insn_r->arm_insn, 10))
11276 if (bit (arm_insn_r->arm_insn, 6))
11277 curr_insn_type = INSN_T0;
11279 curr_insn_type = INSN_T1;
11284 curr_insn_type = INSN_T1;
11286 curr_insn_type = INSN_T2;
11289 /* Handle VNMLA, VNMLS, VNMUL. */
11290 else if (opc1 == 0x01)
11293 curr_insn_type = INSN_T1;
11295 curr_insn_type = INSN_T2;
11298 else if (opc1 == 0x02 && !(opc3 & 0x01))
11300 if (bit (arm_insn_r->arm_insn, 10))
11302 if (bit (arm_insn_r->arm_insn, 6))
11303 curr_insn_type = INSN_T0;
11305 curr_insn_type = INSN_T1;
11310 curr_insn_type = INSN_T1;
11312 curr_insn_type = INSN_T2;
11315 /* Handle VADD, VSUB. */
11316 else if (opc1 == 0x03)
11318 if (!bit (arm_insn_r->arm_insn, 9))
11320 if (bit (arm_insn_r->arm_insn, 6))
11321 curr_insn_type = INSN_T0;
11323 curr_insn_type = INSN_T1;
11328 curr_insn_type = INSN_T1;
11330 curr_insn_type = INSN_T2;
11334 else if (opc1 == 0x0b)
11337 curr_insn_type = INSN_T1;
11339 curr_insn_type = INSN_T2;
11341 /* Handle all other vfp data processing instructions. */
11342 else if (opc1 == 0x0b)
11345 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11347 if (bit (arm_insn_r->arm_insn, 4))
11349 if (bit (arm_insn_r->arm_insn, 6))
11350 curr_insn_type = INSN_T0;
11352 curr_insn_type = INSN_T1;
11357 curr_insn_type = INSN_T1;
11359 curr_insn_type = INSN_T2;
11362 /* Handle VNEG and VABS. */
11363 else if ((opc2 == 0x01 && opc3 == 0x01)
11364 || (opc2 == 0x00 && opc3 == 0x03))
11366 if (!bit (arm_insn_r->arm_insn, 11))
11368 if (bit (arm_insn_r->arm_insn, 6))
11369 curr_insn_type = INSN_T0;
11371 curr_insn_type = INSN_T1;
11376 curr_insn_type = INSN_T1;
11378 curr_insn_type = INSN_T2;
11381 /* Handle VSQRT. */
11382 else if (opc2 == 0x01 && opc3 == 0x03)
11385 curr_insn_type = INSN_T1;
11387 curr_insn_type = INSN_T2;
11390 else if (opc2 == 0x07 && opc3 == 0x03)
11393 curr_insn_type = INSN_T1;
11395 curr_insn_type = INSN_T2;
11397 else if (opc3 & 0x01)
11400 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11402 if (!bit (arm_insn_r->arm_insn, 18))
11403 curr_insn_type = INSN_T2;
11407 curr_insn_type = INSN_T1;
11409 curr_insn_type = INSN_T2;
11413 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11416 curr_insn_type = INSN_T1;
11418 curr_insn_type = INSN_T2;
11420 /* Handle VCVTB, VCVTT. */
11421 else if ((opc2 & 0x0e) == 0x02)
11422 curr_insn_type = INSN_T2;
11423 /* Handle VCMP, VCMPE. */
11424 else if ((opc2 & 0x0e) == 0x04)
11425 curr_insn_type = INSN_T3;
11429 switch (curr_insn_type)
11432 reg_vd = reg_vd | (bit_d << 4);
11433 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11434 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11435 arm_insn_r->reg_rec_count = 2;
11439 reg_vd = reg_vd | (bit_d << 4);
11440 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11441 arm_insn_r->reg_rec_count = 1;
11445 reg_vd = (reg_vd << 1) | bit_d;
11446 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11447 arm_insn_r->reg_rec_count = 1;
11451 record_buf[0] = ARM_FPSCR_REGNUM;
11452 arm_insn_r->reg_rec_count = 1;
11456 gdb_assert_not_reached ("no decoding pattern found");
11460 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11464 /* Handling opcode 110 insns. */
11467 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11469 uint32_t op1, op1_ebit, coproc;
11471 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11472 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11473 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11475 if ((coproc & 0x0e) == 0x0a)
11477 /* Handle extension register ld/st instructions. */
11479 return arm_record_exreg_ld_st_insn (arm_insn_r);
11481 /* 64-bit transfers between arm core and extension registers. */
11482 if ((op1 & 0x3e) == 0x04)
11483 return arm_record_exreg_ld_st_insn (arm_insn_r);
11487 /* Handle coprocessor ld/st instructions. */
11492 return arm_record_unsupported_insn (arm_insn_r);
11495 return arm_record_unsupported_insn (arm_insn_r);
11498 /* Move to coprocessor from two arm core registers. */
11500 return arm_record_unsupported_insn (arm_insn_r);
11502 /* Move to two arm core registers from coprocessor. */
11507 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11508 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11509 arm_insn_r->reg_rec_count = 2;
11511 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11515 return arm_record_unsupported_insn (arm_insn_r);
11518 /* Handling opcode 111 insns. */
11521 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11523 uint32_t op, op1_sbit, op1_ebit, coproc;
11524 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11525 struct regcache *reg_cache = arm_insn_r->regcache;
11527 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11528 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11529 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11530 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11531 op = bit (arm_insn_r->arm_insn, 4);
11533 /* Handle arm SWI/SVC system call instructions. */
11536 if (tdep->arm_syscall_record != NULL)
11538 ULONGEST svc_operand, svc_number;
11540 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11542 if (svc_operand) /* OABI. */
11543 svc_number = svc_operand - 0x900000;
11545 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11547 return tdep->arm_syscall_record (reg_cache, svc_number);
11551 printf_unfiltered (_("no syscall record support\n"));
11556 if ((coproc & 0x0e) == 0x0a)
11558 /* VFP data-processing instructions. */
11559 if (!op1_sbit && !op)
11560 return arm_record_vfp_data_proc_insn (arm_insn_r);
11562 /* Advanced SIMD, VFP instructions. */
11563 if (!op1_sbit && op)
11564 return arm_record_vdata_transfer_insn (arm_insn_r);
11568 /* Coprocessor data operations. */
11569 if (!op1_sbit && !op)
11570 return arm_record_unsupported_insn (arm_insn_r);
11572 /* Move to Coprocessor from ARM core register. */
11573 if (!op1_sbit && !op1_ebit && op)
11574 return arm_record_unsupported_insn (arm_insn_r);
11576 /* Move to arm core register from coprocessor. */
11577 if (!op1_sbit && op1_ebit && op)
11579 uint32_t record_buf[1];
11581 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11582 if (record_buf[0] == 15)
11583 record_buf[0] = ARM_PS_REGNUM;
11585 arm_insn_r->reg_rec_count = 1;
11586 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11592 return arm_record_unsupported_insn (arm_insn_r);
11595 /* Handling opcode 000 insns. */
11598 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11600 uint32_t record_buf[8];
11601 uint32_t reg_src1 = 0;
11603 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11605 record_buf[0] = ARM_PS_REGNUM;
11606 record_buf[1] = reg_src1;
11607 thumb_insn_r->reg_rec_count = 2;
11609 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11615 /* Handling opcode 001 insns. */
11618 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11620 uint32_t record_buf[8];
11621 uint32_t reg_src1 = 0;
11623 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11625 record_buf[0] = ARM_PS_REGNUM;
11626 record_buf[1] = reg_src1;
11627 thumb_insn_r->reg_rec_count = 2;
11629 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11634 /* Handling opcode 010 insns. */
11637 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11639 struct regcache *reg_cache = thumb_insn_r->regcache;
11640 uint32_t record_buf[8], record_buf_mem[8];
11642 uint32_t reg_src1 = 0, reg_src2 = 0;
11643 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11645 ULONGEST u_regval[2] = {0};
11647 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11649 if (bit (thumb_insn_r->arm_insn, 12))
11651 /* Handle load/store register offset. */
11652 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11653 if (opcode2 >= 12 && opcode2 <= 15)
11655 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11656 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11657 record_buf[0] = reg_src1;
11658 thumb_insn_r->reg_rec_count = 1;
11660 else if (opcode2 >= 8 && opcode2 <= 10)
11662 /* STR(2), STRB(2), STRH(2) . */
11663 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11664 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11665 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11666 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11668 record_buf_mem[0] = 4; /* STR (2). */
11669 else if (10 == opcode2)
11670 record_buf_mem[0] = 1; /* STRB (2). */
11671 else if (9 == opcode2)
11672 record_buf_mem[0] = 2; /* STRH (2). */
11673 record_buf_mem[1] = u_regval[0] + u_regval[1];
11674 thumb_insn_r->mem_rec_count = 1;
11677 else if (bit (thumb_insn_r->arm_insn, 11))
11679 /* Handle load from literal pool. */
11681 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11682 record_buf[0] = reg_src1;
11683 thumb_insn_r->reg_rec_count = 1;
11687 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11688 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11689 if ((3 == opcode2) && (!opcode3))
11691 /* Branch with exchange. */
11692 record_buf[0] = ARM_PS_REGNUM;
11693 thumb_insn_r->reg_rec_count = 1;
11697 /* Format 8; special data processing insns. */
11698 record_buf[0] = ARM_PS_REGNUM;
11699 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11700 | bits (thumb_insn_r->arm_insn, 0, 2));
11701 thumb_insn_r->reg_rec_count = 2;
11706 /* Format 5; data processing insns. */
11707 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11708 if (bit (thumb_insn_r->arm_insn, 7))
11710 reg_src1 = reg_src1 + 8;
11712 record_buf[0] = ARM_PS_REGNUM;
11713 record_buf[1] = reg_src1;
11714 thumb_insn_r->reg_rec_count = 2;
11717 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11718 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11724 /* Handling opcode 001 insns. */
11727 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11729 struct regcache *reg_cache = thumb_insn_r->regcache;
11730 uint32_t record_buf[8], record_buf_mem[8];
11732 uint32_t reg_src1 = 0;
11733 uint32_t opcode = 0, immed_5 = 0;
11735 ULONGEST u_regval = 0;
11737 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11742 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11743 record_buf[0] = reg_src1;
11744 thumb_insn_r->reg_rec_count = 1;
11749 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11750 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11751 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11752 record_buf_mem[0] = 4;
11753 record_buf_mem[1] = u_regval + (immed_5 * 4);
11754 thumb_insn_r->mem_rec_count = 1;
11757 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11758 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11764 /* Handling opcode 100 insns. */
11767 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11769 struct regcache *reg_cache = thumb_insn_r->regcache;
11770 uint32_t record_buf[8], record_buf_mem[8];
11772 uint32_t reg_src1 = 0;
11773 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11775 ULONGEST u_regval = 0;
11777 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11782 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11783 record_buf[0] = reg_src1;
11784 thumb_insn_r->reg_rec_count = 1;
11786 else if (1 == opcode)
11789 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11790 record_buf[0] = reg_src1;
11791 thumb_insn_r->reg_rec_count = 1;
11793 else if (2 == opcode)
11796 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11797 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11798 record_buf_mem[0] = 4;
11799 record_buf_mem[1] = u_regval + (immed_8 * 4);
11800 thumb_insn_r->mem_rec_count = 1;
11802 else if (0 == opcode)
11805 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11806 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11807 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11808 record_buf_mem[0] = 2;
11809 record_buf_mem[1] = u_regval + (immed_5 * 2);
11810 thumb_insn_r->mem_rec_count = 1;
11813 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11814 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11820 /* Handling opcode 101 insns. */
11823 thumb_record_misc (insn_decode_record *thumb_insn_r)
11825 struct regcache *reg_cache = thumb_insn_r->regcache;
11827 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11828 uint32_t register_bits = 0, register_count = 0;
11829 uint32_t index = 0, start_address = 0;
11830 uint32_t record_buf[24], record_buf_mem[48];
11833 ULONGEST u_regval = 0;
11835 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11836 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11837 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11842 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11843 while (register_bits)
11845 if (register_bits & 0x00000001)
11846 record_buf[index++] = register_count;
11847 register_bits = register_bits >> 1;
11850 record_buf[index++] = ARM_PS_REGNUM;
11851 record_buf[index++] = ARM_SP_REGNUM;
11852 thumb_insn_r->reg_rec_count = index;
11854 else if (10 == opcode2)
11857 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11858 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11859 while (register_bits)
11861 if (register_bits & 0x00000001)
11863 register_bits = register_bits >> 1;
11865 start_address = u_regval - \
11866 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11867 thumb_insn_r->mem_rec_count = register_count;
11868 while (register_count)
11870 record_buf_mem[(register_count * 2) - 1] = start_address;
11871 record_buf_mem[(register_count * 2) - 2] = 4;
11872 start_address = start_address + 4;
11875 record_buf[0] = ARM_SP_REGNUM;
11876 thumb_insn_r->reg_rec_count = 1;
11878 else if (0x1E == opcode1)
11881 /* Handle enhanced software breakpoint insn, BKPT. */
11882 /* CPSR is changed to be executed in ARM state, disabling normal
11883 interrupts, entering abort mode. */
11884 /* According to high vector configuration PC is set. */
11885 /* User hits breakpoint and type reverse, in that case, we need to go back with
11886 previous CPSR and Program Counter. */
11887 record_buf[0] = ARM_PS_REGNUM;
11888 record_buf[1] = ARM_LR_REGNUM;
11889 thumb_insn_r->reg_rec_count = 2;
11890 /* We need to save SPSR value, which is not yet done. */
11891 printf_unfiltered (_("Process record does not support instruction "
11892 "0x%0x at address %s.\n"),
11893 thumb_insn_r->arm_insn,
11894 paddress (thumb_insn_r->gdbarch,
11895 thumb_insn_r->this_addr));
11898 else if ((0 == opcode) || (1 == opcode))
11900 /* ADD(5), ADD(6). */
11901 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11902 record_buf[0] = reg_src1;
11903 thumb_insn_r->reg_rec_count = 1;
11905 else if (2 == opcode)
11907 /* ADD(7), SUB(4). */
11908 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11909 record_buf[0] = ARM_SP_REGNUM;
11910 thumb_insn_r->reg_rec_count = 1;
11913 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11914 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11920 /* Handling opcode 110 insns. */
11923 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11925 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11926 struct regcache *reg_cache = thumb_insn_r->regcache;
11928 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11929 uint32_t reg_src1 = 0;
11930 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11931 uint32_t index = 0, start_address = 0;
11932 uint32_t record_buf[24], record_buf_mem[48];
11934 ULONGEST u_regval = 0;
11936 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11937 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11943 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11945 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11946 while (register_bits)
11948 if (register_bits & 0x00000001)
11949 record_buf[index++] = register_count;
11950 register_bits = register_bits >> 1;
11953 record_buf[index++] = reg_src1;
11954 thumb_insn_r->reg_rec_count = index;
11956 else if (0 == opcode2)
11958 /* It handles both STMIA. */
11959 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11961 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11962 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11963 while (register_bits)
11965 if (register_bits & 0x00000001)
11967 register_bits = register_bits >> 1;
11969 start_address = u_regval;
11970 thumb_insn_r->mem_rec_count = register_count;
11971 while (register_count)
11973 record_buf_mem[(register_count * 2) - 1] = start_address;
11974 record_buf_mem[(register_count * 2) - 2] = 4;
11975 start_address = start_address + 4;
11979 else if (0x1F == opcode1)
11981 /* Handle arm syscall insn. */
11982 if (tdep->arm_syscall_record != NULL)
11984 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11985 ret = tdep->arm_syscall_record (reg_cache, u_regval);
11989 printf_unfiltered (_("no syscall record support\n"));
11994 /* B (1), conditional branch is automatically taken care in process_record,
11995 as PC is saved there. */
11997 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11998 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12004 /* Handling opcode 111 insns. */
12007 thumb_record_branch (insn_decode_record *thumb_insn_r)
12009 uint32_t record_buf[8];
12010 uint32_t bits_h = 0;
12012 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12014 if (2 == bits_h || 3 == bits_h)
12017 record_buf[0] = ARM_LR_REGNUM;
12018 thumb_insn_r->reg_rec_count = 1;
12020 else if (1 == bits_h)
12023 record_buf[0] = ARM_PS_REGNUM;
12024 record_buf[1] = ARM_LR_REGNUM;
12025 thumb_insn_r->reg_rec_count = 2;
12028 /* B(2) is automatically taken care in process_record, as PC is
12031 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12036 /* Handler for thumb2 load/store multiple instructions. */
12039 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12041 struct regcache *reg_cache = thumb2_insn_r->regcache;
12043 uint32_t reg_rn, op;
12044 uint32_t register_bits = 0, register_count = 0;
12045 uint32_t index = 0, start_address = 0;
12046 uint32_t record_buf[24], record_buf_mem[48];
12048 ULONGEST u_regval = 0;
12050 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12051 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12053 if (0 == op || 3 == op)
12055 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12057 /* Handle RFE instruction. */
12058 record_buf[0] = ARM_PS_REGNUM;
12059 thumb2_insn_r->reg_rec_count = 1;
12063 /* Handle SRS instruction after reading banked SP. */
12064 return arm_record_unsupported_insn (thumb2_insn_r);
12067 else if (1 == op || 2 == op)
12069 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12071 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12072 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12073 while (register_bits)
12075 if (register_bits & 0x00000001)
12076 record_buf[index++] = register_count;
12079 register_bits = register_bits >> 1;
12081 record_buf[index++] = reg_rn;
12082 record_buf[index++] = ARM_PS_REGNUM;
12083 thumb2_insn_r->reg_rec_count = index;
12087 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12088 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12089 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12090 while (register_bits)
12092 if (register_bits & 0x00000001)
12095 register_bits = register_bits >> 1;
12100 /* Start address calculation for LDMDB/LDMEA. */
12101 start_address = u_regval;
12105 /* Start address calculation for LDMDB/LDMEA. */
12106 start_address = u_regval - register_count * 4;
12109 thumb2_insn_r->mem_rec_count = register_count;
12110 while (register_count)
12112 record_buf_mem[register_count * 2 - 1] = start_address;
12113 record_buf_mem[register_count * 2 - 2] = 4;
12114 start_address = start_address + 4;
12117 record_buf[0] = reg_rn;
12118 record_buf[1] = ARM_PS_REGNUM;
12119 thumb2_insn_r->reg_rec_count = 2;
12123 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12125 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12127 return ARM_RECORD_SUCCESS;
12130 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12134 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12136 struct regcache *reg_cache = thumb2_insn_r->regcache;
12138 uint32_t reg_rd, reg_rn, offset_imm;
12139 uint32_t reg_dest1, reg_dest2;
12140 uint32_t address, offset_addr;
12141 uint32_t record_buf[8], record_buf_mem[8];
12142 uint32_t op1, op2, op3;
12144 ULONGEST u_regval[2];
12146 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12147 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12148 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12150 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12152 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12154 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12155 record_buf[0] = reg_dest1;
12156 record_buf[1] = ARM_PS_REGNUM;
12157 thumb2_insn_r->reg_rec_count = 2;
12160 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12162 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12163 record_buf[2] = reg_dest2;
12164 thumb2_insn_r->reg_rec_count = 3;
12169 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12170 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12172 if (0 == op1 && 0 == op2)
12174 /* Handle STREX. */
12175 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12176 address = u_regval[0] + (offset_imm * 4);
12177 record_buf_mem[0] = 4;
12178 record_buf_mem[1] = address;
12179 thumb2_insn_r->mem_rec_count = 1;
12180 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12181 record_buf[0] = reg_rd;
12182 thumb2_insn_r->reg_rec_count = 1;
12184 else if (1 == op1 && 0 == op2)
12186 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12187 record_buf[0] = reg_rd;
12188 thumb2_insn_r->reg_rec_count = 1;
12189 address = u_regval[0];
12190 record_buf_mem[1] = address;
12194 /* Handle STREXB. */
12195 record_buf_mem[0] = 1;
12196 thumb2_insn_r->mem_rec_count = 1;
12200 /* Handle STREXH. */
12201 record_buf_mem[0] = 2 ;
12202 thumb2_insn_r->mem_rec_count = 1;
12206 /* Handle STREXD. */
12207 address = u_regval[0];
12208 record_buf_mem[0] = 4;
12209 record_buf_mem[2] = 4;
12210 record_buf_mem[3] = address + 4;
12211 thumb2_insn_r->mem_rec_count = 2;
12216 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12218 if (bit (thumb2_insn_r->arm_insn, 24))
12220 if (bit (thumb2_insn_r->arm_insn, 23))
12221 offset_addr = u_regval[0] + (offset_imm * 4);
12223 offset_addr = u_regval[0] - (offset_imm * 4);
12225 address = offset_addr;
12228 address = u_regval[0];
12230 record_buf_mem[0] = 4;
12231 record_buf_mem[1] = address;
12232 record_buf_mem[2] = 4;
12233 record_buf_mem[3] = address + 4;
12234 thumb2_insn_r->mem_rec_count = 2;
12235 record_buf[0] = reg_rn;
12236 thumb2_insn_r->reg_rec_count = 1;
12240 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12242 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12244 return ARM_RECORD_SUCCESS;
12247 /* Handler for thumb2 data processing (shift register and modified immediate)
12251 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12253 uint32_t reg_rd, op;
12254 uint32_t record_buf[8];
12256 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12257 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12259 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12261 record_buf[0] = ARM_PS_REGNUM;
12262 thumb2_insn_r->reg_rec_count = 1;
12266 record_buf[0] = reg_rd;
12267 record_buf[1] = ARM_PS_REGNUM;
12268 thumb2_insn_r->reg_rec_count = 2;
12271 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12273 return ARM_RECORD_SUCCESS;
12276 /* Generic handler for thumb2 instructions which effect destination and PS
12280 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12283 uint32_t record_buf[8];
12285 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12287 record_buf[0] = reg_rd;
12288 record_buf[1] = ARM_PS_REGNUM;
12289 thumb2_insn_r->reg_rec_count = 2;
12291 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12293 return ARM_RECORD_SUCCESS;
12296 /* Handler for thumb2 branch and miscellaneous control instructions. */
12299 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12301 uint32_t op, op1, op2;
12302 uint32_t record_buf[8];
12304 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12305 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12306 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12308 /* Handle MSR insn. */
12309 if (!(op1 & 0x2) && 0x38 == op)
12313 /* CPSR is going to be changed. */
12314 record_buf[0] = ARM_PS_REGNUM;
12315 thumb2_insn_r->reg_rec_count = 1;
12319 arm_record_unsupported_insn(thumb2_insn_r);
12323 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12326 record_buf[0] = ARM_PS_REGNUM;
12327 record_buf[1] = ARM_LR_REGNUM;
12328 thumb2_insn_r->reg_rec_count = 2;
12331 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12333 return ARM_RECORD_SUCCESS;
12336 /* Handler for thumb2 store single data item instructions. */
12339 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12341 struct regcache *reg_cache = thumb2_insn_r->regcache;
12343 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12344 uint32_t address, offset_addr;
12345 uint32_t record_buf[8], record_buf_mem[8];
12348 ULONGEST u_regval[2];
12350 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12351 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12352 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12353 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12355 if (bit (thumb2_insn_r->arm_insn, 23))
12358 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12359 offset_addr = u_regval[0] + offset_imm;
12360 address = offset_addr;
12365 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12367 /* Handle STRB (register). */
12368 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12369 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12370 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12371 offset_addr = u_regval[1] << shift_imm;
12372 address = u_regval[0] + offset_addr;
12376 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12377 if (bit (thumb2_insn_r->arm_insn, 10))
12379 if (bit (thumb2_insn_r->arm_insn, 9))
12380 offset_addr = u_regval[0] + offset_imm;
12382 offset_addr = u_regval[0] - offset_imm;
12384 address = offset_addr;
12387 address = u_regval[0];
12393 /* Store byte instructions. */
12396 record_buf_mem[0] = 1;
12398 /* Store half word instructions. */
12401 record_buf_mem[0] = 2;
12403 /* Store word instructions. */
12406 record_buf_mem[0] = 4;
12410 gdb_assert_not_reached ("no decoding pattern found");
12414 record_buf_mem[1] = address;
12415 thumb2_insn_r->mem_rec_count = 1;
12416 record_buf[0] = reg_rn;
12417 thumb2_insn_r->reg_rec_count = 1;
12419 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12421 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12423 return ARM_RECORD_SUCCESS;
12426 /* Handler for thumb2 load memory hints instructions. */
12429 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12431 uint32_t record_buf[8];
12432 uint32_t reg_rt, reg_rn;
12434 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12435 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12437 if (ARM_PC_REGNUM != reg_rt)
12439 record_buf[0] = reg_rt;
12440 record_buf[1] = reg_rn;
12441 record_buf[2] = ARM_PS_REGNUM;
12442 thumb2_insn_r->reg_rec_count = 3;
12444 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12446 return ARM_RECORD_SUCCESS;
12449 return ARM_RECORD_FAILURE;
12452 /* Handler for thumb2 load word instructions. */
12455 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12457 uint32_t record_buf[8];
12459 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12460 record_buf[1] = ARM_PS_REGNUM;
12461 thumb2_insn_r->reg_rec_count = 2;
12463 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12465 return ARM_RECORD_SUCCESS;
12468 /* Handler for thumb2 long multiply, long multiply accumulate, and
12469 divide instructions. */
12472 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12474 uint32_t opcode1 = 0, opcode2 = 0;
12475 uint32_t record_buf[8];
12477 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12478 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12480 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12482 /* Handle SMULL, UMULL, SMULAL. */
12483 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12484 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12485 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12486 record_buf[2] = ARM_PS_REGNUM;
12487 thumb2_insn_r->reg_rec_count = 3;
12489 else if (1 == opcode1 || 3 == opcode2)
12491 /* Handle SDIV and UDIV. */
12492 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12493 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12494 record_buf[2] = ARM_PS_REGNUM;
12495 thumb2_insn_r->reg_rec_count = 3;
12498 return ARM_RECORD_FAILURE;
12500 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12502 return ARM_RECORD_SUCCESS;
12505 /* Record handler for thumb32 coprocessor instructions. */
12508 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12510 if (bit (thumb2_insn_r->arm_insn, 25))
12511 return arm_record_coproc_data_proc (thumb2_insn_r);
12513 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12516 /* Record handler for advance SIMD structure load/store instructions. */
12519 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12521 struct regcache *reg_cache = thumb2_insn_r->regcache;
12522 uint32_t l_bit, a_bit, b_bits;
12523 uint32_t record_buf[128], record_buf_mem[128];
12524 uint32_t reg_rn, reg_vd, address, f_elem;
12525 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12528 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12529 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12530 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12531 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12532 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12533 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12534 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12535 f_elem = 8 / f_ebytes;
12539 ULONGEST u_regval = 0;
12540 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12541 address = u_regval;
12546 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12548 if (b_bits == 0x07)
12550 else if (b_bits == 0x0a)
12552 else if (b_bits == 0x06)
12554 else if (b_bits == 0x02)
12559 for (index_r = 0; index_r < bf_regs; index_r++)
12561 for (index_e = 0; index_e < f_elem; index_e++)
12563 record_buf_mem[index_m++] = f_ebytes;
12564 record_buf_mem[index_m++] = address;
12565 address = address + f_ebytes;
12566 thumb2_insn_r->mem_rec_count += 1;
12571 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12573 if (b_bits == 0x09 || b_bits == 0x08)
12575 else if (b_bits == 0x03)
12580 for (index_r = 0; index_r < bf_regs; index_r++)
12581 for (index_e = 0; index_e < f_elem; index_e++)
12583 for (loop_t = 0; loop_t < 2; loop_t++)
12585 record_buf_mem[index_m++] = f_ebytes;
12586 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12587 thumb2_insn_r->mem_rec_count += 1;
12589 address = address + (2 * f_ebytes);
12593 else if ((b_bits & 0x0e) == 0x04)
12595 for (index_e = 0; index_e < f_elem; index_e++)
12597 for (loop_t = 0; loop_t < 3; loop_t++)
12599 record_buf_mem[index_m++] = f_ebytes;
12600 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12601 thumb2_insn_r->mem_rec_count += 1;
12603 address = address + (3 * f_ebytes);
12607 else if (!(b_bits & 0x0e))
12609 for (index_e = 0; index_e < f_elem; index_e++)
12611 for (loop_t = 0; loop_t < 4; loop_t++)
12613 record_buf_mem[index_m++] = f_ebytes;
12614 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12615 thumb2_insn_r->mem_rec_count += 1;
12617 address = address + (4 * f_ebytes);
12623 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12625 if (bft_size == 0x00)
12627 else if (bft_size == 0x01)
12629 else if (bft_size == 0x02)
12635 if (!(b_bits & 0x0b) || b_bits == 0x08)
12636 thumb2_insn_r->mem_rec_count = 1;
12638 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12639 thumb2_insn_r->mem_rec_count = 2;
12641 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12642 thumb2_insn_r->mem_rec_count = 3;
12644 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12645 thumb2_insn_r->mem_rec_count = 4;
12647 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12649 record_buf_mem[index_m] = f_ebytes;
12650 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12659 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12660 thumb2_insn_r->reg_rec_count = 1;
12662 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12663 thumb2_insn_r->reg_rec_count = 2;
12665 else if ((b_bits & 0x0e) == 0x04)
12666 thumb2_insn_r->reg_rec_count = 3;
12668 else if (!(b_bits & 0x0e))
12669 thumb2_insn_r->reg_rec_count = 4;
12674 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12675 thumb2_insn_r->reg_rec_count = 1;
12677 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12678 thumb2_insn_r->reg_rec_count = 2;
12680 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12681 thumb2_insn_r->reg_rec_count = 3;
12683 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12684 thumb2_insn_r->reg_rec_count = 4;
12686 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12687 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12691 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12693 record_buf[index_r] = reg_rn;
12694 thumb2_insn_r->reg_rec_count += 1;
12697 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12699 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12704 /* Decodes thumb2 instruction type and invokes its record handler. */
12706 static unsigned int
12707 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12709 uint32_t op, op1, op2;
12711 op = bit (thumb2_insn_r->arm_insn, 15);
12712 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12713 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12717 if (!(op2 & 0x64 ))
12719 /* Load/store multiple instruction. */
12720 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12722 else if (!((op2 & 0x64) ^ 0x04))
12724 /* Load/store (dual/exclusive) and table branch instruction. */
12725 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12727 else if (!((op2 & 0x20) ^ 0x20))
12729 /* Data-processing (shifted register). */
12730 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12732 else if (op2 & 0x40)
12734 /* Co-processor instructions. */
12735 return thumb2_record_coproc_insn (thumb2_insn_r);
12738 else if (op1 == 0x02)
12742 /* Branches and miscellaneous control instructions. */
12743 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12745 else if (op2 & 0x20)
12747 /* Data-processing (plain binary immediate) instruction. */
12748 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12752 /* Data-processing (modified immediate). */
12753 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12756 else if (op1 == 0x03)
12758 if (!(op2 & 0x71 ))
12760 /* Store single data item. */
12761 return thumb2_record_str_single_data (thumb2_insn_r);
12763 else if (!((op2 & 0x71) ^ 0x10))
12765 /* Advanced SIMD or structure load/store instructions. */
12766 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12768 else if (!((op2 & 0x67) ^ 0x01))
12770 /* Load byte, memory hints instruction. */
12771 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12773 else if (!((op2 & 0x67) ^ 0x03))
12775 /* Load halfword, memory hints instruction. */
12776 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12778 else if (!((op2 & 0x67) ^ 0x05))
12780 /* Load word instruction. */
12781 return thumb2_record_ld_word (thumb2_insn_r);
12783 else if (!((op2 & 0x70) ^ 0x20))
12785 /* Data-processing (register) instruction. */
12786 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12788 else if (!((op2 & 0x78) ^ 0x30))
12790 /* Multiply, multiply accumulate, abs diff instruction. */
12791 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12793 else if (!((op2 & 0x78) ^ 0x38))
12795 /* Long multiply, long multiply accumulate, and divide. */
12796 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12798 else if (op2 & 0x40)
12800 /* Co-processor instructions. */
12801 return thumb2_record_coproc_insn (thumb2_insn_r);
12808 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12809 and positive val on fauilure. */
12812 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12814 gdb_byte buf[insn_size];
12816 memset (&buf[0], 0, insn_size);
12818 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12820 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12822 gdbarch_byte_order_for_code (insn_record->gdbarch));
12826 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12828 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12832 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12833 uint32_t insn_size)
12836 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12838 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12840 arm_record_data_proc_misc_ld_str, /* 000. */
12841 arm_record_data_proc_imm, /* 001. */
12842 arm_record_ld_st_imm_offset, /* 010. */
12843 arm_record_ld_st_reg_offset, /* 011. */
12844 arm_record_ld_st_multiple, /* 100. */
12845 arm_record_b_bl, /* 101. */
12846 arm_record_asimd_vfp_coproc, /* 110. */
12847 arm_record_coproc_data_proc /* 111. */
12850 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12852 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12854 thumb_record_shift_add_sub, /* 000. */
12855 thumb_record_add_sub_cmp_mov, /* 001. */
12856 thumb_record_ld_st_reg_offset, /* 010. */
12857 thumb_record_ld_st_imm_offset, /* 011. */
12858 thumb_record_ld_st_stack, /* 100. */
12859 thumb_record_misc, /* 101. */
12860 thumb_record_ldm_stm_swi, /* 110. */
12861 thumb_record_branch /* 111. */
12864 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12865 uint32_t insn_id = 0;
12867 if (extract_arm_insn (arm_record, insn_size))
12871 printf_unfiltered (_("Process record: error reading memory at "
12872 "addr %s len = %d.\n"),
12873 paddress (arm_record->gdbarch,
12874 arm_record->this_addr), insn_size);
12878 else if (ARM_RECORD == record_type)
12880 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12881 insn_id = bits (arm_record->arm_insn, 25, 27);
12883 if (arm_record->cond == 0xf)
12884 ret = arm_record_extension_space (arm_record);
12887 /* If this insn has fallen into extension space
12888 then we need not decode it anymore. */
12889 ret = arm_handle_insn[insn_id] (arm_record);
12891 if (ret != ARM_RECORD_SUCCESS)
12893 arm_record_unsupported_insn (arm_record);
12897 else if (THUMB_RECORD == record_type)
12899 /* As thumb does not have condition codes, we set negative. */
12900 arm_record->cond = -1;
12901 insn_id = bits (arm_record->arm_insn, 13, 15);
12902 ret = thumb_handle_insn[insn_id] (arm_record);
12903 if (ret != ARM_RECORD_SUCCESS)
12905 arm_record_unsupported_insn (arm_record);
12909 else if (THUMB2_RECORD == record_type)
12911 /* As thumb does not have condition codes, we set negative. */
12912 arm_record->cond = -1;
12914 /* Swap first half of 32bit thumb instruction with second half. */
12915 arm_record->arm_insn
12916 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12918 ret = thumb2_record_decode_insn_handler (arm_record);
12920 if (ret != ARM_RECORD_SUCCESS)
12922 arm_record_unsupported_insn (arm_record);
12928 /* Throw assertion. */
12929 gdb_assert_not_reached ("not a valid instruction, could not decode");
12936 /* Cleans up local record registers and memory allocations. */
12939 deallocate_reg_mem (insn_decode_record *record)
12941 xfree (record->arm_regs);
12942 xfree (record->arm_mems);
12946 /* Parse the current instruction and record the values of the registers and
12947 memory that will be changed in current instruction to record_arch_list".
12948 Return -1 if something is wrong. */
12951 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12952 CORE_ADDR insn_addr)
12955 uint32_t no_of_rec = 0;
12956 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12957 ULONGEST t_bit = 0, insn_id = 0;
12959 ULONGEST u_regval = 0;
12961 insn_decode_record arm_record;
12963 memset (&arm_record, 0, sizeof (insn_decode_record));
12964 arm_record.regcache = regcache;
12965 arm_record.this_addr = insn_addr;
12966 arm_record.gdbarch = gdbarch;
12969 if (record_debug > 1)
12971 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12973 paddress (gdbarch, arm_record.this_addr));
12976 if (extract_arm_insn (&arm_record, 2))
12980 printf_unfiltered (_("Process record: error reading memory at "
12981 "addr %s len = %d.\n"),
12982 paddress (arm_record.gdbarch,
12983 arm_record.this_addr), 2);
12988 /* Check the insn, whether it is thumb or arm one. */
12990 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12991 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12994 if (!(u_regval & t_bit))
12996 /* We are decoding arm insn. */
12997 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13001 insn_id = bits (arm_record.arm_insn, 11, 15);
13002 /* is it thumb2 insn? */
13003 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13005 ret = decode_insn (&arm_record, THUMB2_RECORD,
13006 THUMB2_INSN_SIZE_BYTES);
13010 /* We are decoding thumb insn. */
13011 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13017 /* Record registers. */
13018 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13019 if (arm_record.arm_regs)
13021 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13023 if (record_full_arch_list_add_reg
13024 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13028 /* Record memories. */
13029 if (arm_record.arm_mems)
13031 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13033 if (record_full_arch_list_add_mem
13034 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13035 arm_record.arm_mems[no_of_rec].len))
13040 if (record_full_arch_list_add_end ())
13045 deallocate_reg_mem (&arm_record);