1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 struct displaced_step_closure* dsc
403 = get_displaced_step_closure_by_addr(memaddr);
405 /* If checking the mode of displaced instruction in copy area, the mode
406 should be determined by instruction on the original address. */
410 fprintf_unfiltered (gdb_stdlog,
411 "displaced: check mode of %.8lx instead of %.8lx\n",
412 (unsigned long) dsc->insn_addr,
413 (unsigned long) memaddr);
414 memaddr = dsc->insn_addr;
417 /* If bit 0 of the address is set, assume this is a Thumb address. */
418 if (IS_THUMB_ADDR (memaddr))
421 /* If the user wants to override the symbol table, let him. */
422 if (strcmp (arm_force_mode_string, "arm") == 0)
424 if (strcmp (arm_force_mode_string, "thumb") == 0)
427 /* ARM v6-M and v7-M are always in Thumb mode. */
428 if (gdbarch_tdep (gdbarch)->is_m)
431 /* If there are mapping symbols, consult them. */
432 type = arm_find_mapping_symbol (memaddr, NULL);
436 /* Thumb functions have a "special" bit set in minimal symbols. */
437 sym = lookup_minimal_symbol_by_pc (memaddr);
439 return (MSYMBOL_IS_SPECIAL (sym.minsym));
441 /* If the user wants to override the fallback mode, let them. */
442 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
447 /* If we couldn't find any symbol, but we're talking to a running
448 target, then trust the current value of $cpsr. This lets
449 "display/i $pc" always show the correct mode (though if there is
450 a symbol table we will not reach here, so it still may not be
451 displayed in the mode it will be executed). */
452 if (target_has_registers)
453 return arm_frame_is_thumb (get_current_frame ());
455 /* Otherwise we're out of luck; we assume ARM. */
459 /* Determine if the address specified equals any of these magic return
460 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
463 From ARMv6-M Reference Manual B1.5.8
464 Table B1-5 Exception return behavior
466 EXC_RETURN Return To Return Stack
467 0xFFFFFFF1 Handler mode Main
468 0xFFFFFFF9 Thread mode Main
469 0xFFFFFFFD Thread mode Process
471 From ARMv7-M Reference Manual B1.5.8
472 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
474 EXC_RETURN Return To Return Stack
475 0xFFFFFFF1 Handler mode Main
476 0xFFFFFFF9 Thread mode Main
477 0xFFFFFFFD Thread mode Process
479 Table B1-9 EXC_RETURN definition of exception return behavior, with
482 EXC_RETURN Return To Return Stack Frame Type
483 0xFFFFFFE1 Handler mode Main Extended
484 0xFFFFFFE9 Thread mode Main Extended
485 0xFFFFFFED Thread mode Process Extended
486 0xFFFFFFF1 Handler mode Main Basic
487 0xFFFFFFF9 Thread mode Main Basic
488 0xFFFFFFFD Thread mode Process Basic
490 For more details see "B1.5.8 Exception return behavior"
491 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
494 arm_m_addr_is_magic (CORE_ADDR addr)
498 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
499 the exception return behavior. */
506 /* Address is magic. */
510 /* Address is not magic. */
515 /* Remove useless bits from addresses in a running program. */
517 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
519 /* On M-profile devices, do not strip the low bit from EXC_RETURN
520 (the magic exception return address). */
521 if (gdbarch_tdep (gdbarch)->is_m
522 && arm_m_addr_is_magic (val))
526 return UNMAKE_THUMB_ADDR (val);
528 return (val & 0x03fffffc);
531 /* Return 1 if PC is the start of a compiler helper function which
532 can be safely ignored during prologue skipping. IS_THUMB is true
533 if the function is known to be a Thumb function due to the way it
536 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
539 struct bound_minimal_symbol msym;
541 msym = lookup_minimal_symbol_by_pc (pc);
542 if (msym.minsym != NULL
543 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
544 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
546 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
548 /* The GNU linker's Thumb call stub to foo is named
550 if (strstr (name, "_from_thumb") != NULL)
553 /* On soft-float targets, __truncdfsf2 is called to convert promoted
554 arguments to their argument types in non-prototyped
556 if (startswith (name, "__truncdfsf2"))
558 if (startswith (name, "__aeabi_d2f"))
561 /* Internal functions related to thread-local storage. */
562 if (startswith (name, "__tls_get_addr"))
564 if (startswith (name, "__aeabi_read_tp"))
569 /* If we run against a stripped glibc, we may be unable to identify
570 special functions by name. Check for one important case,
571 __aeabi_read_tp, by comparing the *code* against the default
572 implementation (this is hand-written ARM assembler in glibc). */
575 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
576 == 0xe3e00a0f /* mov r0, #0xffff0fff */
577 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
578 == 0xe240f01f) /* sub pc, r0, #31 */
585 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
586 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
589 ((bits ((insn1), 0, 3) << 12) \
590 | (bits ((insn1), 10, 10) << 11) \
591 | (bits ((insn2), 12, 14) << 8) \
592 | bits ((insn2), 0, 7))
594 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
595 the 32-bit instruction. */
596 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
597 ((bits ((insn), 16, 19) << 12) \
598 | bits ((insn), 0, 11))
600 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
603 thumb_expand_immediate (unsigned int imm)
605 unsigned int count = imm >> 7;
613 return (imm & 0xff) | ((imm & 0xff) << 16);
615 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 return (imm & 0xff) | ((imm & 0xff) << 8)
618 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
621 return (0x80 | (imm & 0x7f)) << (32 - count);
624 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
625 epilogue, 0 otherwise. */
628 thumb_instruction_restores_sp (unsigned short insn)
630 return (insn == 0x46bd /* mov sp, r7 */
631 || (insn & 0xff80) == 0xb000 /* add sp, imm */
632 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
635 /* Analyze a Thumb prologue, looking for a recognizable stack frame
636 and frame pointer. Scan until we encounter a store that could
637 clobber the stack frame unexpectedly, or an unknown instruction.
638 Return the last address which is definitely safe to skip for an
639 initial breakpoint. */
642 thumb_analyze_prologue (struct gdbarch *gdbarch,
643 CORE_ADDR start, CORE_ADDR limit,
644 struct arm_prologue_cache *cache)
646 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
650 struct pv_area *stack;
651 struct cleanup *back_to;
653 CORE_ADDR unrecognized_pc = 0;
655 for (i = 0; i < 16; i++)
656 regs[i] = pv_register (i, 0);
657 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
658 back_to = make_cleanup_free_pv_area (stack);
660 while (start < limit)
664 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
666 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
671 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
674 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
675 whether to save LR (R14). */
676 mask = (insn & 0xff) | ((insn & 0x100) << 6);
678 /* Calculate offsets of saved R0-R7 and LR. */
679 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
680 if (mask & (1 << regno))
682 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
684 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
687 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
689 offset = (insn & 0x7f) << 2; /* get scaled offset */
690 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
693 else if (thumb_instruction_restores_sp (insn))
695 /* Don't scan past the epilogue. */
698 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
699 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
701 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
702 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
703 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
705 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
706 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
707 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
709 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
710 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
711 && pv_is_constant (regs[bits (insn, 3, 5)]))
712 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
713 regs[bits (insn, 6, 8)]);
714 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
715 && pv_is_constant (regs[bits (insn, 3, 6)]))
717 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
718 int rm = bits (insn, 3, 6);
719 regs[rd] = pv_add (regs[rd], regs[rm]);
721 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
723 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
724 int src_reg = (insn & 0x78) >> 3;
725 regs[dst_reg] = regs[src_reg];
727 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
729 /* Handle stores to the stack. Normally pushes are used,
730 but with GCC -mtpcs-frame, there may be other stores
731 in the prologue to create the frame. */
732 int regno = (insn >> 8) & 0x7;
735 offset = (insn & 0xff) << 2;
736 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
738 if (pv_area_store_would_trash (stack, addr))
741 pv_area_store (stack, addr, 4, regs[regno]);
743 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
745 int rd = bits (insn, 0, 2);
746 int rn = bits (insn, 3, 5);
749 offset = bits (insn, 6, 10) << 2;
750 addr = pv_add_constant (regs[rn], offset);
752 if (pv_area_store_would_trash (stack, addr))
755 pv_area_store (stack, addr, 4, regs[rd]);
757 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
758 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
759 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
760 /* Ignore stores of argument registers to the stack. */
762 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
763 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
764 /* Ignore block loads from the stack, potentially copying
765 parameters from memory. */
767 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
768 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
769 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
770 /* Similarly ignore single loads from the stack. */
772 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
773 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
774 /* Skip register copies, i.e. saves to another register
775 instead of the stack. */
777 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
778 /* Recognize constant loads; even with small stacks these are necessary
780 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
781 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
783 /* Constant pool loads, for the same reason. */
784 unsigned int constant;
787 loc = start + 4 + bits (insn, 0, 7) * 4;
788 constant = read_memory_unsigned_integer (loc, 4, byte_order);
789 regs[bits (insn, 8, 10)] = pv_constant (constant);
791 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
793 unsigned short inst2;
795 inst2 = read_code_unsigned_integer (start + 2, 2,
796 byte_order_for_code);
798 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
800 /* BL, BLX. Allow some special function calls when
801 skipping the prologue; GCC generates these before
802 storing arguments to the stack. */
804 int j1, j2, imm1, imm2;
806 imm1 = sbits (insn, 0, 10);
807 imm2 = bits (inst2, 0, 10);
808 j1 = bit (inst2, 13);
809 j2 = bit (inst2, 11);
811 offset = ((imm1 << 12) + (imm2 << 1));
812 offset ^= ((!j2) << 22) | ((!j1) << 23);
814 nextpc = start + 4 + offset;
815 /* For BLX make sure to clear the low bits. */
816 if (bit (inst2, 12) == 0)
817 nextpc = nextpc & 0xfffffffc;
819 if (!skip_prologue_function (gdbarch, nextpc,
820 bit (inst2, 12) != 0))
824 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
826 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
828 pv_t addr = regs[bits (insn, 0, 3)];
831 if (pv_area_store_would_trash (stack, addr))
834 /* Calculate offsets of saved registers. */
835 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
836 if (inst2 & (1 << regno))
838 addr = pv_add_constant (addr, -4);
839 pv_area_store (stack, addr, 4, regs[regno]);
843 regs[bits (insn, 0, 3)] = addr;
846 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
848 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
850 int regno1 = bits (inst2, 12, 15);
851 int regno2 = bits (inst2, 8, 11);
852 pv_t addr = regs[bits (insn, 0, 3)];
854 offset = inst2 & 0xff;
856 addr = pv_add_constant (addr, offset);
858 addr = pv_add_constant (addr, -offset);
860 if (pv_area_store_would_trash (stack, addr))
863 pv_area_store (stack, addr, 4, regs[regno1]);
864 pv_area_store (stack, pv_add_constant (addr, 4),
868 regs[bits (insn, 0, 3)] = addr;
871 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
872 && (inst2 & 0x0c00) == 0x0c00
873 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 int regno = bits (inst2, 12, 15);
876 pv_t addr = regs[bits (insn, 0, 3)];
878 offset = inst2 & 0xff;
880 addr = pv_add_constant (addr, offset);
882 addr = pv_add_constant (addr, -offset);
884 if (pv_area_store_would_trash (stack, addr))
887 pv_area_store (stack, addr, 4, regs[regno]);
890 regs[bits (insn, 0, 3)] = addr;
893 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
894 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
896 int regno = bits (inst2, 12, 15);
899 offset = inst2 & 0xfff;
900 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
902 if (pv_area_store_would_trash (stack, addr))
905 pv_area_store (stack, addr, 4, regs[regno]);
908 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
909 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
910 /* Ignore stores of argument registers to the stack. */
913 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
914 && (inst2 & 0x0d00) == 0x0c00
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 /* Ignore stores of argument registers to the stack. */
919 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
921 && (inst2 & 0x8000) == 0x0000
922 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 /* Ignore block loads from the stack, potentially copying
924 parameters from memory. */
927 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Similarly ignore dual loads from the stack. */
933 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
934 && (inst2 & 0x0d00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 /* Similarly ignore single loads from the stack. */
939 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
941 /* Similarly ignore single loads from the stack. */
944 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
945 && (inst2 & 0x8000) == 0x0000)
947 unsigned int imm = ((bits (insn, 10, 10) << 11)
948 | (bits (inst2, 12, 14) << 8)
949 | bits (inst2, 0, 7));
951 regs[bits (inst2, 8, 11)]
952 = pv_add_constant (regs[bits (insn, 0, 3)],
953 thumb_expand_immediate (imm));
956 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
957 && (inst2 & 0x8000) == 0x0000)
959 unsigned int imm = ((bits (insn, 10, 10) << 11)
960 | (bits (inst2, 12, 14) << 8)
961 | bits (inst2, 0, 7));
963 regs[bits (inst2, 8, 11)]
964 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
967 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
968 && (inst2 & 0x8000) == 0x0000)
970 unsigned int imm = ((bits (insn, 10, 10) << 11)
971 | (bits (inst2, 12, 14) << 8)
972 | bits (inst2, 0, 7));
974 regs[bits (inst2, 8, 11)]
975 = pv_add_constant (regs[bits (insn, 0, 3)],
976 - (CORE_ADDR) thumb_expand_immediate (imm));
979 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
980 && (inst2 & 0x8000) == 0x0000)
982 unsigned int imm = ((bits (insn, 10, 10) << 11)
983 | (bits (inst2, 12, 14) << 8)
984 | bits (inst2, 0, 7));
986 regs[bits (inst2, 8, 11)]
987 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
990 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
992 unsigned int imm = ((bits (insn, 10, 10) << 11)
993 | (bits (inst2, 12, 14) << 8)
994 | bits (inst2, 0, 7));
996 regs[bits (inst2, 8, 11)]
997 = pv_constant (thumb_expand_immediate (imm));
1000 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1003 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1005 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1008 else if (insn == 0xea5f /* mov.w Rd,Rm */
1009 && (inst2 & 0xf0f0) == 0)
1011 int dst_reg = (inst2 & 0x0f00) >> 8;
1012 int src_reg = inst2 & 0xf;
1013 regs[dst_reg] = regs[src_reg];
1016 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1018 /* Constant pool loads. */
1019 unsigned int constant;
1022 offset = bits (inst2, 0, 11);
1024 loc = start + 4 + offset;
1026 loc = start + 4 - offset;
1028 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1029 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1032 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1034 /* Constant pool loads. */
1035 unsigned int constant;
1038 offset = bits (inst2, 0, 7) << 2;
1040 loc = start + 4 + offset;
1042 loc = start + 4 - offset;
1044 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1045 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1047 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1048 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1051 else if (thumb2_instruction_changes_pc (insn, inst2))
1053 /* Don't scan past anything that might change control flow. */
1058 /* The optimizer might shove anything into the prologue,
1059 so we just skip what we don't recognize. */
1060 unrecognized_pc = start;
1065 else if (thumb_instruction_changes_pc (insn))
1067 /* Don't scan past anything that might change control flow. */
1072 /* The optimizer might shove anything into the prologue,
1073 so we just skip what we don't recognize. */
1074 unrecognized_pc = start;
1081 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1082 paddress (gdbarch, start));
1084 if (unrecognized_pc == 0)
1085 unrecognized_pc = start;
1089 do_cleanups (back_to);
1090 return unrecognized_pc;
1093 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1095 /* Frame pointer is fp. Frame size is constant. */
1096 cache->framereg = ARM_FP_REGNUM;
1097 cache->framesize = -regs[ARM_FP_REGNUM].k;
1099 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1101 /* Frame pointer is r7. Frame size is constant. */
1102 cache->framereg = THUMB_FP_REGNUM;
1103 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1107 /* Try the stack pointer... this is a bit desperate. */
1108 cache->framereg = ARM_SP_REGNUM;
1109 cache->framesize = -regs[ARM_SP_REGNUM].k;
1112 for (i = 0; i < 16; i++)
1113 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1114 cache->saved_regs[i].addr = offset;
1116 do_cleanups (back_to);
1117 return unrecognized_pc;
1121 /* Try to analyze the instructions starting from PC, which load symbol
1122 __stack_chk_guard. Return the address of instruction after loading this
1123 symbol, set the dest register number to *BASEREG, and set the size of
1124 instructions for loading symbol in OFFSET. Return 0 if instructions are
1128 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1129 unsigned int *destreg, int *offset)
1131 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1132 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1133 unsigned int low, high, address;
1138 unsigned short insn1
1139 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1141 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1143 *destreg = bits (insn1, 8, 10);
1145 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1146 address = read_memory_unsigned_integer (address, 4,
1147 byte_order_for_code);
1149 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1151 unsigned short insn2
1152 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1154 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1157 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1159 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1161 /* movt Rd, #const */
1162 if ((insn1 & 0xfbc0) == 0xf2c0)
1164 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1165 *destreg = bits (insn2, 8, 11);
1167 address = (high << 16 | low);
1174 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1176 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1178 address = bits (insn, 0, 11) + pc + 8;
1179 address = read_memory_unsigned_integer (address, 4,
1180 byte_order_for_code);
1182 *destreg = bits (insn, 12, 15);
1185 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1187 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1190 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1192 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1194 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1195 *destreg = bits (insn, 12, 15);
1197 address = (high << 16 | low);
1205 /* Try to skip a sequence of instructions used for stack protector. If PC
1206 points to the first instruction of this sequence, return the address of
1207 first instruction after this sequence, otherwise, return original PC.
1209 On arm, this sequence of instructions is composed of mainly three steps,
1210 Step 1: load symbol __stack_chk_guard,
1211 Step 2: load from address of __stack_chk_guard,
1212 Step 3: store it to somewhere else.
1214 Usually, instructions on step 2 and step 3 are the same on various ARM
1215 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1216 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1217 instructions in step 1 vary from different ARM architectures. On ARMv7,
1220 movw Rn, #:lower16:__stack_chk_guard
1221 movt Rn, #:upper16:__stack_chk_guard
1228 .word __stack_chk_guard
1230 Since ldr/str is a very popular instruction, we can't use them as
1231 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1232 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1233 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1236 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1238 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1239 unsigned int basereg;
1240 struct bound_minimal_symbol stack_chk_guard;
1242 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1245 /* Try to parse the instructions in Step 1. */
1246 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1251 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1252 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1253 Otherwise, this sequence cannot be for stack protector. */
1254 if (stack_chk_guard.minsym == NULL
1255 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1260 unsigned int destreg;
1262 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1264 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1265 if ((insn & 0xf800) != 0x6800)
1267 if (bits (insn, 3, 5) != basereg)
1269 destreg = bits (insn, 0, 2);
1271 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1272 byte_order_for_code);
1273 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1274 if ((insn & 0xf800) != 0x6000)
1276 if (destreg != bits (insn, 0, 2))
1281 unsigned int destreg;
1283 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1285 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1286 if ((insn & 0x0e500000) != 0x04100000)
1288 if (bits (insn, 16, 19) != basereg)
1290 destreg = bits (insn, 12, 15);
1291 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1292 insn = read_code_unsigned_integer (pc + offset + 4,
1293 4, byte_order_for_code);
1294 if ((insn & 0x0e500000) != 0x04000000)
1296 if (bits (insn, 12, 15) != destreg)
1299 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1302 return pc + offset + 4;
1304 return pc + offset + 8;
1307 /* Advance the PC across any function entry prologue instructions to
1308 reach some "real" code.
1310 The APCS (ARM Procedure Call Standard) defines the following
1314 [stmfd sp!, {a1,a2,a3,a4}]
1315 stmfd sp!, {...,fp,ip,lr,pc}
1316 [stfe f7, [sp, #-12]!]
1317 [stfe f6, [sp, #-12]!]
1318 [stfe f5, [sp, #-12]!]
1319 [stfe f4, [sp, #-12]!]
1320 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1323 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1325 CORE_ADDR func_addr, limit_pc;
1327 /* See if we can determine the end of the prologue via the symbol table.
1328 If so, then return either PC, or the PC after the prologue, whichever
1330 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1332 CORE_ADDR post_prologue_pc
1333 = skip_prologue_using_sal (gdbarch, func_addr);
1334 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1336 if (post_prologue_pc)
1338 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1341 /* GCC always emits a line note before the prologue and another
1342 one after, even if the two are at the same address or on the
1343 same line. Take advantage of this so that we do not need to
1344 know every instruction that might appear in the prologue. We
1345 will have producer information for most binaries; if it is
1346 missing (e.g. for -gstabs), assuming the GNU tools. */
1347 if (post_prologue_pc
1349 || COMPUNIT_PRODUCER (cust) == NULL
1350 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1351 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1352 return post_prologue_pc;
1354 if (post_prologue_pc != 0)
1356 CORE_ADDR analyzed_limit;
1358 /* For non-GCC compilers, make sure the entire line is an
1359 acceptable prologue; GDB will round this function's
1360 return value up to the end of the following line so we
1361 can not skip just part of a line (and we do not want to).
1363 RealView does not treat the prologue specially, but does
1364 associate prologue code with the opening brace; so this
1365 lets us skip the first line if we think it is the opening
1367 if (arm_pc_is_thumb (gdbarch, func_addr))
1368 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1369 post_prologue_pc, NULL);
1371 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1372 post_prologue_pc, NULL);
1374 if (analyzed_limit != post_prologue_pc)
1377 return post_prologue_pc;
1381 /* Can't determine prologue from the symbol table, need to examine
1384 /* Find an upper limit on the function prologue using the debug
1385 information. If the debug information could not be used to provide
1386 that bound, then use an arbitrary large number as the upper bound. */
1387 /* Like arm_scan_prologue, stop no later than pc + 64. */
1388 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1390 limit_pc = pc + 64; /* Magic. */
1393 /* Check if this is Thumb code. */
1394 if (arm_pc_is_thumb (gdbarch, pc))
1395 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1397 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1402 This function decodes a Thumb function prologue to determine:
1403 1) the size of the stack frame
1404 2) which registers are saved on it
1405 3) the offsets of saved regs
1406 4) the offset from the stack pointer to the frame pointer
1408 A typical Thumb function prologue would create this stack frame
1409 (offsets relative to FP)
1410 old SP -> 24 stack parameters
1413 R7 -> 0 local variables (16 bytes)
1414 SP -> -12 additional stack space (12 bytes)
1415 The frame size would thus be 36 bytes, and the frame offset would be
1416 12 bytes. The frame register is R7.
1418 The comments for thumb_skip_prolog() describe the algorithm we use
1419 to detect the end of the prolog. */
1423 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1424 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1426 CORE_ADDR prologue_start;
1427 CORE_ADDR prologue_end;
1429 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1432 /* See comment in arm_scan_prologue for an explanation of
1434 if (prologue_end > prologue_start + 64)
1436 prologue_end = prologue_start + 64;
1440 /* We're in the boondocks: we have no idea where the start of the
1444 prologue_end = std::min (prologue_end, prev_pc);
1446 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1449 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1453 arm_instruction_restores_sp (unsigned int insn)
1455 if (bits (insn, 28, 31) != INST_NV)
1457 if ((insn & 0x0df0f000) == 0x0080d000
1458 /* ADD SP (register or immediate). */
1459 || (insn & 0x0df0f000) == 0x0040d000
1460 /* SUB SP (register or immediate). */
1461 || (insn & 0x0ffffff0) == 0x01a0d000
1463 || (insn & 0x0fff0000) == 0x08bd0000
1465 || (insn & 0x0fff0000) == 0x049d0000)
1466 /* POP of a single register. */
1473 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1474 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1475 fill it in. Return the first address not recognized as a prologue
1478 We recognize all the instructions typically found in ARM prologues,
1479 plus harmless instructions which can be skipped (either for analysis
1480 purposes, or a more restrictive set that can be skipped when finding
1481 the end of the prologue). */
1484 arm_analyze_prologue (struct gdbarch *gdbarch,
1485 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1486 struct arm_prologue_cache *cache)
1488 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1490 CORE_ADDR offset, current_pc;
1491 pv_t regs[ARM_FPS_REGNUM];
1492 struct pv_area *stack;
1493 struct cleanup *back_to;
1494 CORE_ADDR unrecognized_pc = 0;
1496 /* Search the prologue looking for instructions that set up the
1497 frame pointer, adjust the stack pointer, and save registers.
1499 Be careful, however, and if it doesn't look like a prologue,
1500 don't try to scan it. If, for instance, a frameless function
1501 begins with stmfd sp!, then we will tell ourselves there is
1502 a frame, which will confuse stack traceback, as well as "finish"
1503 and other operations that rely on a knowledge of the stack
1506 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1507 regs[regno] = pv_register (regno, 0);
1508 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1509 back_to = make_cleanup_free_pv_area (stack);
1511 for (current_pc = prologue_start;
1512 current_pc < prologue_end;
1516 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1518 if (insn == 0xe1a0c00d) /* mov ip, sp */
1520 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1523 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1528 int rd = bits (insn, 12, 15);
1529 imm = (imm >> rot) | (imm << (32 - rot));
1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1533 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1534 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1536 unsigned imm = insn & 0xff; /* immediate value */
1537 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1538 int rd = bits (insn, 12, 15);
1539 imm = (imm >> rot) | (imm << (32 - rot));
1540 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1543 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1546 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1548 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1549 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1550 regs[bits (insn, 12, 15)]);
1553 else if ((insn & 0xffff0000) == 0xe92d0000)
1554 /* stmfd sp!, {..., fp, ip, lr, pc}
1556 stmfd sp!, {a1, a2, a3, a4} */
1558 int mask = insn & 0xffff;
1560 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1563 /* Calculate offsets of saved registers. */
1564 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1565 if (mask & (1 << regno))
1568 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1569 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1572 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1573 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1574 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1576 /* No need to add this to saved_regs -- it's just an arg reg. */
1579 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1580 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1581 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1586 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 /* No need to add this to saved_regs -- it's just arg regs. */
1593 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1595 unsigned imm = insn & 0xff; /* immediate value */
1596 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1597 imm = (imm >> rot) | (imm << (32 - rot));
1598 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1600 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1604 imm = (imm >> rot) | (imm << (32 - rot));
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1607 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1609 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1611 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1615 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1616 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1618 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1620 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1622 int n_saved_fp_regs;
1623 unsigned int fp_start_reg, fp_bound_reg;
1625 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1628 if ((insn & 0x800) == 0x800) /* N0 is set */
1630 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1631 n_saved_fp_regs = 3;
1633 n_saved_fp_regs = 1;
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 2;
1640 n_saved_fp_regs = 4;
1643 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1644 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1645 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1647 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1648 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1649 regs[fp_start_reg++]);
1652 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1654 /* Allow some special function calls when skipping the
1655 prologue; GCC generates these before storing arguments to
1657 CORE_ADDR dest = BranchDest (current_pc, insn);
1659 if (skip_prologue_function (gdbarch, dest, 0))
1664 else if ((insn & 0xf0000000) != 0xe0000000)
1665 break; /* Condition not true, exit early. */
1666 else if (arm_instruction_changes_pc (insn))
1667 /* Don't scan past anything that might change control flow. */
1669 else if (arm_instruction_restores_sp (insn))
1671 /* Don't scan past the epilogue. */
1674 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 /* Ignore block loads from the stack, potentially copying
1677 parameters from memory. */
1679 else if ((insn & 0xfc500000) == 0xe4100000
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Similarly ignore single loads from the stack. */
1683 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1684 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1685 register instead of the stack. */
1689 /* The optimizer might shove anything into the prologue, if
1690 we build up cache (cache != NULL) from scanning prologue,
1691 we just skip what we don't recognize and scan further to
1692 make cache as complete as possible. However, if we skip
1693 prologue, we'll stop immediately on unrecognized
1695 unrecognized_pc = current_pc;
1703 if (unrecognized_pc == 0)
1704 unrecognized_pc = current_pc;
1708 int framereg, framesize;
1710 /* The frame size is just the distance from the frame register
1711 to the original stack pointer. */
1712 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1714 /* Frame pointer is fp. */
1715 framereg = ARM_FP_REGNUM;
1716 framesize = -regs[ARM_FP_REGNUM].k;
1720 /* Try the stack pointer... this is a bit desperate. */
1721 framereg = ARM_SP_REGNUM;
1722 framesize = -regs[ARM_SP_REGNUM].k;
1725 cache->framereg = framereg;
1726 cache->framesize = framesize;
1728 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1729 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1730 cache->saved_regs[regno].addr = offset;
1734 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1735 paddress (gdbarch, unrecognized_pc));
1737 do_cleanups (back_to);
1738 return unrecognized_pc;
1742 arm_scan_prologue (struct frame_info *this_frame,
1743 struct arm_prologue_cache *cache)
1745 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1747 CORE_ADDR prologue_start, prologue_end;
1748 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1749 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1751 /* Assume there is no frame until proven otherwise. */
1752 cache->framereg = ARM_SP_REGNUM;
1753 cache->framesize = 0;
1755 /* Check for Thumb prologue. */
1756 if (arm_frame_is_thumb (this_frame))
1758 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1762 /* Find the function prologue. If we can't find the function in
1763 the symbol table, peek in the stack frame to find the PC. */
1764 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1767 /* One way to find the end of the prologue (which works well
1768 for unoptimized code) is to do the following:
1770 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1773 prologue_end = prev_pc;
1774 else if (sal.end < prologue_end)
1775 prologue_end = sal.end;
1777 This mechanism is very accurate so long as the optimizer
1778 doesn't move any instructions from the function body into the
1779 prologue. If this happens, sal.end will be the last
1780 instruction in the first hunk of prologue code just before
1781 the first instruction that the scheduler has moved from
1782 the body to the prologue.
1784 In order to make sure that we scan all of the prologue
1785 instructions, we use a slightly less accurate mechanism which
1786 may scan more than necessary. To help compensate for this
1787 lack of accuracy, the prologue scanning loop below contains
1788 several clauses which'll cause the loop to terminate early if
1789 an implausible prologue instruction is encountered.
1795 is a suitable endpoint since it accounts for the largest
1796 possible prologue plus up to five instructions inserted by
1799 if (prologue_end > prologue_start + 64)
1801 prologue_end = prologue_start + 64; /* See above. */
1806 /* We have no symbol information. Our only option is to assume this
1807 function has a standard stack frame and the normal frame register.
1808 Then, we can find the value of our frame pointer on entrance to
1809 the callee (or at the present moment if this is the innermost frame).
1810 The value stored there should be the address of the stmfd + 8. */
1811 CORE_ADDR frame_loc;
1812 ULONGEST return_value;
1814 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1815 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1820 prologue_start = gdbarch_addr_bits_remove
1821 (gdbarch, return_value) - 8;
1822 prologue_end = prologue_start + 64; /* See above. */
1826 if (prev_pc < prologue_end)
1827 prologue_end = prev_pc;
1829 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1832 static struct arm_prologue_cache *
1833 arm_make_prologue_cache (struct frame_info *this_frame)
1836 struct arm_prologue_cache *cache;
1837 CORE_ADDR unwound_fp;
1839 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1840 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1842 arm_scan_prologue (this_frame, cache);
1844 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1845 if (unwound_fp == 0)
1848 cache->prev_sp = unwound_fp + cache->framesize;
1850 /* Calculate actual addresses of saved registers using offsets
1851 determined by arm_scan_prologue. */
1852 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1853 if (trad_frame_addr_p (cache->saved_regs, reg))
1854 cache->saved_regs[reg].addr += cache->prev_sp;
1859 /* Implementation of the stop_reason hook for arm_prologue frames. */
1861 static enum unwind_stop_reason
1862 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1865 struct arm_prologue_cache *cache;
1868 if (*this_cache == NULL)
1869 *this_cache = arm_make_prologue_cache (this_frame);
1870 cache = (struct arm_prologue_cache *) *this_cache;
1872 /* This is meant to halt the backtrace at "_start". */
1873 pc = get_frame_pc (this_frame);
1874 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1875 return UNWIND_OUTERMOST;
1877 /* If we've hit a wall, stop. */
1878 if (cache->prev_sp == 0)
1879 return UNWIND_OUTERMOST;
1881 return UNWIND_NO_REASON;
1884 /* Our frame ID for a normal frame is the current function's starting PC
1885 and the caller's SP when we were called. */
1888 arm_prologue_this_id (struct frame_info *this_frame,
1890 struct frame_id *this_id)
1892 struct arm_prologue_cache *cache;
1896 if (*this_cache == NULL)
1897 *this_cache = arm_make_prologue_cache (this_frame);
1898 cache = (struct arm_prologue_cache *) *this_cache;
1900 /* Use function start address as part of the frame ID. If we cannot
1901 identify the start address (due to missing symbol information),
1902 fall back to just using the current PC. */
1903 pc = get_frame_pc (this_frame);
1904 func = get_frame_func (this_frame);
1908 id = frame_id_build (cache->prev_sp, func);
1912 static struct value *
1913 arm_prologue_prev_register (struct frame_info *this_frame,
1917 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1918 struct arm_prologue_cache *cache;
1920 if (*this_cache == NULL)
1921 *this_cache = arm_make_prologue_cache (this_frame);
1922 cache = (struct arm_prologue_cache *) *this_cache;
1924 /* If we are asked to unwind the PC, then we need to return the LR
1925 instead. The prologue may save PC, but it will point into this
1926 frame's prologue, not the next frame's resume location. Also
1927 strip the saved T bit. A valid LR may have the low bit set, but
1928 a valid PC never does. */
1929 if (prev_regnum == ARM_PC_REGNUM)
1933 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1934 return frame_unwind_got_constant (this_frame, prev_regnum,
1935 arm_addr_bits_remove (gdbarch, lr));
1938 /* SP is generally not saved to the stack, but this frame is
1939 identified by the next frame's stack pointer at the time of the call.
1940 The value was already reconstructed into PREV_SP. */
1941 if (prev_regnum == ARM_SP_REGNUM)
1942 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1944 /* The CPSR may have been changed by the call instruction and by the
1945 called function. The only bit we can reconstruct is the T bit,
1946 by checking the low bit of LR as of the call. This is a reliable
1947 indicator of Thumb-ness except for some ARM v4T pre-interworking
1948 Thumb code, which could get away with a clear low bit as long as
1949 the called function did not use bx. Guess that all other
1950 bits are unchanged; the condition flags are presumably lost,
1951 but the processor status is likely valid. */
1952 if (prev_regnum == ARM_PS_REGNUM)
1955 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1957 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1958 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1959 if (IS_THUMB_ADDR (lr))
1963 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1966 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1970 struct frame_unwind arm_prologue_unwind = {
1972 arm_prologue_unwind_stop_reason,
1973 arm_prologue_this_id,
1974 arm_prologue_prev_register,
1976 default_frame_sniffer
1979 /* Maintain a list of ARM exception table entries per objfile, similar to the
1980 list of mapping symbols. We only cache entries for standard ARM-defined
1981 personality routines; the cache will contain only the frame unwinding
1982 instructions associated with the entry (not the descriptors). */
1984 static const struct objfile_data *arm_exidx_data_key;
1986 struct arm_exidx_entry
1991 typedef struct arm_exidx_entry arm_exidx_entry_s;
1992 DEF_VEC_O(arm_exidx_entry_s);
1994 struct arm_exidx_data
1996 VEC(arm_exidx_entry_s) **section_maps;
2000 arm_exidx_data_free (struct objfile *objfile, void *arg)
2002 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2005 for (i = 0; i < objfile->obfd->section_count; i++)
2006 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2010 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2011 const struct arm_exidx_entry *rhs)
2013 return lhs->addr < rhs->addr;
2016 static struct obj_section *
2017 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2019 struct obj_section *osect;
2021 ALL_OBJFILE_OSECTIONS (objfile, osect)
2022 if (bfd_get_section_flags (objfile->obfd,
2023 osect->the_bfd_section) & SEC_ALLOC)
2025 bfd_vma start, size;
2026 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2027 size = bfd_get_section_size (osect->the_bfd_section);
2029 if (start <= vma && vma < start + size)
2036 /* Parse contents of exception table and exception index sections
2037 of OBJFILE, and fill in the exception table entry cache.
2039 For each entry that refers to a standard ARM-defined personality
2040 routine, extract the frame unwinding instructions (from either
2041 the index or the table section). The unwinding instructions
2043 - extracting them from the rest of the table data
2044 - converting to host endianness
2045 - appending the implicit 0xb0 ("Finish") code
2047 The extracted and normalized instructions are stored for later
2048 retrieval by the arm_find_exidx_entry routine. */
2051 arm_exidx_new_objfile (struct objfile *objfile)
2053 struct cleanup *cleanups;
2054 struct arm_exidx_data *data;
2055 asection *exidx, *extab;
2056 bfd_vma exidx_vma = 0, extab_vma = 0;
2057 bfd_size_type exidx_size = 0, extab_size = 0;
2058 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2061 /* If we've already touched this file, do nothing. */
2062 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2064 cleanups = make_cleanup (null_cleanup, NULL);
2066 /* Read contents of exception table and index. */
2067 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2070 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2071 exidx_size = bfd_get_section_size (exidx);
2072 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2073 make_cleanup (xfree, exidx_data);
2075 if (!bfd_get_section_contents (objfile->obfd, exidx,
2076 exidx_data, 0, exidx_size))
2078 do_cleanups (cleanups);
2083 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2086 extab_vma = bfd_section_vma (objfile->obfd, extab);
2087 extab_size = bfd_get_section_size (extab);
2088 extab_data = (gdb_byte *) xmalloc (extab_size);
2089 make_cleanup (xfree, extab_data);
2091 if (!bfd_get_section_contents (objfile->obfd, extab,
2092 extab_data, 0, extab_size))
2094 do_cleanups (cleanups);
2099 /* Allocate exception table data structure. */
2100 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2101 set_objfile_data (objfile, arm_exidx_data_key, data);
2102 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2103 objfile->obfd->section_count,
2104 VEC(arm_exidx_entry_s) *);
2106 /* Fill in exception table. */
2107 for (i = 0; i < exidx_size / 8; i++)
2109 struct arm_exidx_entry new_exidx_entry;
2110 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2111 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2112 bfd_vma addr = 0, word = 0;
2113 int n_bytes = 0, n_words = 0;
2114 struct obj_section *sec;
2115 gdb_byte *entry = NULL;
2117 /* Extract address of start of function. */
2118 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2119 idx += exidx_vma + i * 8;
2121 /* Find section containing function and compute section offset. */
2122 sec = arm_obj_section_from_vma (objfile, idx);
2125 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2127 /* Determine address of exception table entry. */
2130 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2132 else if ((val & 0xff000000) == 0x80000000)
2134 /* Exception table entry embedded in .ARM.exidx
2135 -- must be short form. */
2139 else if (!(val & 0x80000000))
2141 /* Exception table entry in .ARM.extab. */
2142 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2143 addr += exidx_vma + i * 8 + 4;
2145 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2147 word = bfd_h_get_32 (objfile->obfd,
2148 extab_data + addr - extab_vma);
2151 if ((word & 0xff000000) == 0x80000000)
2156 else if ((word & 0xff000000) == 0x81000000
2157 || (word & 0xff000000) == 0x82000000)
2161 n_words = ((word >> 16) & 0xff);
2163 else if (!(word & 0x80000000))
2166 struct obj_section *pers_sec;
2167 int gnu_personality = 0;
2169 /* Custom personality routine. */
2170 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2171 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2173 /* Check whether we've got one of the variants of the
2174 GNU personality routines. */
2175 pers_sec = arm_obj_section_from_vma (objfile, pers);
2178 static const char *personality[] =
2180 "__gcc_personality_v0",
2181 "__gxx_personality_v0",
2182 "__gcj_personality_v0",
2183 "__gnu_objc_personality_v0",
2187 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2190 for (k = 0; personality[k]; k++)
2191 if (lookup_minimal_symbol_by_pc_name
2192 (pc, personality[k], objfile))
2194 gnu_personality = 1;
2199 /* If so, the next word contains a word count in the high
2200 byte, followed by the same unwind instructions as the
2201 pre-defined forms. */
2203 && addr + 4 <= extab_vma + extab_size)
2205 word = bfd_h_get_32 (objfile->obfd,
2206 extab_data + addr - extab_vma);
2209 n_words = ((word >> 24) & 0xff);
2215 /* Sanity check address. */
2217 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2218 n_words = n_bytes = 0;
2220 /* The unwind instructions reside in WORD (only the N_BYTES least
2221 significant bytes are valid), followed by N_WORDS words in the
2222 extab section starting at ADDR. */
2223 if (n_bytes || n_words)
2226 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2227 n_bytes + n_words * 4 + 1);
2230 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2234 word = bfd_h_get_32 (objfile->obfd,
2235 extab_data + addr - extab_vma);
2238 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2239 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2240 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2241 *p++ = (gdb_byte) (word & 0xff);
2244 /* Implied "Finish" to terminate the list. */
2248 /* Push entry onto vector. They are guaranteed to always
2249 appear in order of increasing addresses. */
2250 new_exidx_entry.addr = idx;
2251 new_exidx_entry.entry = entry;
2252 VEC_safe_push (arm_exidx_entry_s,
2253 data->section_maps[sec->the_bfd_section->index],
2257 do_cleanups (cleanups);
2260 /* Search for the exception table entry covering MEMADDR. If one is found,
2261 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2262 set *START to the start of the region covered by this entry. */
2265 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2267 struct obj_section *sec;
2269 sec = find_pc_section (memaddr);
2272 struct arm_exidx_data *data;
2273 VEC(arm_exidx_entry_s) *map;
2274 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2277 data = ((struct arm_exidx_data *)
2278 objfile_data (sec->objfile, arm_exidx_data_key));
2281 map = data->section_maps[sec->the_bfd_section->index];
2282 if (!VEC_empty (arm_exidx_entry_s, map))
2284 struct arm_exidx_entry *map_sym;
2286 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2287 arm_compare_exidx_entries);
2289 /* VEC_lower_bound finds the earliest ordered insertion
2290 point. If the following symbol starts at this exact
2291 address, we use that; otherwise, the preceding
2292 exception table entry covers this address. */
2293 if (idx < VEC_length (arm_exidx_entry_s, map))
2295 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2296 if (map_sym->addr == map_key.addr)
2299 *start = map_sym->addr + obj_section_addr (sec);
2300 return map_sym->entry;
2306 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2308 *start = map_sym->addr + obj_section_addr (sec);
2309 return map_sym->entry;
2318 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2319 instruction list from the ARM exception table entry ENTRY, allocate and
2320 return a prologue cache structure describing how to unwind this frame.
2322 Return NULL if the unwinding instruction list contains a "spare",
2323 "reserved" or "refuse to unwind" instruction as defined in section
2324 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2325 for the ARM Architecture" document. */
2327 static struct arm_prologue_cache *
2328 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2333 struct arm_prologue_cache *cache;
2334 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2335 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2341 /* Whenever we reload SP, we actually have to retrieve its
2342 actual value in the current frame. */
2345 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2347 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2348 vsp = get_frame_register_unsigned (this_frame, reg);
2352 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2353 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2359 /* Decode next unwind instruction. */
2362 if ((insn & 0xc0) == 0)
2364 int offset = insn & 0x3f;
2365 vsp += (offset << 2) + 4;
2367 else if ((insn & 0xc0) == 0x40)
2369 int offset = insn & 0x3f;
2370 vsp -= (offset << 2) + 4;
2372 else if ((insn & 0xf0) == 0x80)
2374 int mask = ((insn & 0xf) << 8) | *entry++;
2377 /* The special case of an all-zero mask identifies
2378 "Refuse to unwind". We return NULL to fall back
2379 to the prologue analyzer. */
2383 /* Pop registers r4..r15 under mask. */
2384 for (i = 0; i < 12; i++)
2385 if (mask & (1 << i))
2387 cache->saved_regs[4 + i].addr = vsp;
2391 /* Special-case popping SP -- we need to reload vsp. */
2392 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2395 else if ((insn & 0xf0) == 0x90)
2397 int reg = insn & 0xf;
2399 /* Reserved cases. */
2400 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2403 /* Set SP from another register and mark VSP for reload. */
2404 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2407 else if ((insn & 0xf0) == 0xa0)
2409 int count = insn & 0x7;
2410 int pop_lr = (insn & 0x8) != 0;
2413 /* Pop r4..r[4+count]. */
2414 for (i = 0; i <= count; i++)
2416 cache->saved_regs[4 + i].addr = vsp;
2420 /* If indicated by flag, pop LR as well. */
2423 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2427 else if (insn == 0xb0)
2429 /* We could only have updated PC by popping into it; if so, it
2430 will show up as address. Otherwise, copy LR into PC. */
2431 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2432 cache->saved_regs[ARM_PC_REGNUM]
2433 = cache->saved_regs[ARM_LR_REGNUM];
2438 else if (insn == 0xb1)
2440 int mask = *entry++;
2443 /* All-zero mask and mask >= 16 is "spare". */
2444 if (mask == 0 || mask >= 16)
2447 /* Pop r0..r3 under mask. */
2448 for (i = 0; i < 4; i++)
2449 if (mask & (1 << i))
2451 cache->saved_regs[i].addr = vsp;
2455 else if (insn == 0xb2)
2457 ULONGEST offset = 0;
2462 offset |= (*entry & 0x7f) << shift;
2465 while (*entry++ & 0x80);
2467 vsp += 0x204 + (offset << 2);
2469 else if (insn == 0xb3)
2471 int start = *entry >> 4;
2472 int count = (*entry++) & 0xf;
2475 /* Only registers D0..D15 are valid here. */
2476 if (start + count >= 16)
2479 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2480 for (i = 0; i <= count; i++)
2482 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2489 else if ((insn & 0xf8) == 0xb8)
2491 int count = insn & 0x7;
2494 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2495 for (i = 0; i <= count; i++)
2497 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2501 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2504 else if (insn == 0xc6)
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2510 /* Only registers WR0..WR15 are valid. */
2511 if (start + count >= 16)
2514 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2515 for (i = 0; i <= count; i++)
2517 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2521 else if (insn == 0xc7)
2523 int mask = *entry++;
2526 /* All-zero mask and mask >= 16 is "spare". */
2527 if (mask == 0 || mask >= 16)
2530 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2531 for (i = 0; i < 4; i++)
2532 if (mask & (1 << i))
2534 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2538 else if ((insn & 0xf8) == 0xc0)
2540 int count = insn & 0x7;
2543 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2544 for (i = 0; i <= count; i++)
2546 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2550 else if (insn == 0xc8)
2552 int start = *entry >> 4;
2553 int count = (*entry++) & 0xf;
2556 /* Only registers D0..D31 are valid. */
2557 if (start + count >= 16)
2560 /* Pop VFP double-precision registers
2561 D[16+start]..D[16+start+count]. */
2562 for (i = 0; i <= count; i++)
2564 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2568 else if (insn == 0xc9)
2570 int start = *entry >> 4;
2571 int count = (*entry++) & 0xf;
2574 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2575 for (i = 0; i <= count; i++)
2577 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2581 else if ((insn & 0xf8) == 0xd0)
2583 int count = insn & 0x7;
2586 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2587 for (i = 0; i <= count; i++)
2589 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2595 /* Everything else is "spare". */
2600 /* If we restore SP from a register, assume this was the frame register.
2601 Otherwise just fall back to SP as frame register. */
2602 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2603 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2605 cache->framereg = ARM_SP_REGNUM;
2607 /* Determine offset to previous frame. */
2609 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2611 /* We already got the previous SP. */
2612 cache->prev_sp = vsp;
2617 /* Unwinding via ARM exception table entries. Note that the sniffer
2618 already computes a filled-in prologue cache, which is then used
2619 with the same arm_prologue_this_id and arm_prologue_prev_register
2620 routines also used for prologue-parsing based unwinding. */
2623 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2624 struct frame_info *this_frame,
2625 void **this_prologue_cache)
2627 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2628 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2629 CORE_ADDR addr_in_block, exidx_region, func_start;
2630 struct arm_prologue_cache *cache;
2633 /* See if we have an ARM exception table entry covering this address. */
2634 addr_in_block = get_frame_address_in_block (this_frame);
2635 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2639 /* The ARM exception table does not describe unwind information
2640 for arbitrary PC values, but is guaranteed to be correct only
2641 at call sites. We have to decide here whether we want to use
2642 ARM exception table information for this frame, or fall back
2643 to using prologue parsing. (Note that if we have DWARF CFI,
2644 this sniffer isn't even called -- CFI is always preferred.)
2646 Before we make this decision, however, we check whether we
2647 actually have *symbol* information for the current frame.
2648 If not, prologue parsing would not work anyway, so we might
2649 as well use the exception table and hope for the best. */
2650 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2654 /* If the next frame is "normal", we are at a call site in this
2655 frame, so exception information is guaranteed to be valid. */
2656 if (get_next_frame (this_frame)
2657 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2660 /* We also assume exception information is valid if we're currently
2661 blocked in a system call. The system library is supposed to
2662 ensure this, so that e.g. pthread cancellation works. */
2663 if (arm_frame_is_thumb (this_frame))
2667 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2668 2, byte_order_for_code, &insn)
2669 && (insn & 0xff00) == 0xdf00 /* svc */)
2676 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2677 4, byte_order_for_code, &insn)
2678 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2682 /* Bail out if we don't know that exception information is valid. */
2686 /* The ARM exception index does not mark the *end* of the region
2687 covered by the entry, and some functions will not have any entry.
2688 To correctly recognize the end of the covered region, the linker
2689 should have inserted dummy records with a CANTUNWIND marker.
2691 Unfortunately, current versions of GNU ld do not reliably do
2692 this, and thus we may have found an incorrect entry above.
2693 As a (temporary) sanity check, we only use the entry if it
2694 lies *within* the bounds of the function. Note that this check
2695 might reject perfectly valid entries that just happen to cover
2696 multiple functions; therefore this check ought to be removed
2697 once the linker is fixed. */
2698 if (func_start > exidx_region)
2702 /* Decode the list of unwinding instructions into a prologue cache.
2703 Note that this may fail due to e.g. a "refuse to unwind" code. */
2704 cache = arm_exidx_fill_cache (this_frame, entry);
2708 *this_prologue_cache = cache;
2712 struct frame_unwind arm_exidx_unwind = {
2714 default_frame_unwind_stop_reason,
2715 arm_prologue_this_id,
2716 arm_prologue_prev_register,
2718 arm_exidx_unwind_sniffer
2721 static struct arm_prologue_cache *
2722 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2724 struct arm_prologue_cache *cache;
2727 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2728 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2730 /* Still rely on the offset calculated from prologue. */
2731 arm_scan_prologue (this_frame, cache);
2733 /* Since we are in epilogue, the SP has been restored. */
2734 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2736 /* Calculate actual addresses of saved registers using offsets
2737 determined by arm_scan_prologue. */
2738 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2739 if (trad_frame_addr_p (cache->saved_regs, reg))
2740 cache->saved_regs[reg].addr += cache->prev_sp;
2745 /* Implementation of function hook 'this_id' in
2746 'struct frame_uwnind' for epilogue unwinder. */
2749 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2751 struct frame_id *this_id)
2753 struct arm_prologue_cache *cache;
2756 if (*this_cache == NULL)
2757 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2758 cache = (struct arm_prologue_cache *) *this_cache;
2760 /* Use function start address as part of the frame ID. If we cannot
2761 identify the start address (due to missing symbol information),
2762 fall back to just using the current PC. */
2763 pc = get_frame_pc (this_frame);
2764 func = get_frame_func (this_frame);
2768 (*this_id) = frame_id_build (cache->prev_sp, pc);
2771 /* Implementation of function hook 'prev_register' in
2772 'struct frame_uwnind' for epilogue unwinder. */
2774 static struct value *
2775 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2776 void **this_cache, int regnum)
2778 if (*this_cache == NULL)
2779 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2781 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2784 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2786 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2789 /* Implementation of function hook 'sniffer' in
2790 'struct frame_uwnind' for epilogue unwinder. */
2793 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2794 struct frame_info *this_frame,
2795 void **this_prologue_cache)
2797 if (frame_relative_level (this_frame) == 0)
2799 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2800 CORE_ADDR pc = get_frame_pc (this_frame);
2802 if (arm_frame_is_thumb (this_frame))
2803 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2805 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2811 /* Frame unwinder from epilogue. */
2813 static const struct frame_unwind arm_epilogue_frame_unwind =
2816 default_frame_unwind_stop_reason,
2817 arm_epilogue_frame_this_id,
2818 arm_epilogue_frame_prev_register,
2820 arm_epilogue_frame_sniffer,
2823 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2824 trampoline, return the target PC. Otherwise return 0.
2826 void call0a (char c, short s, int i, long l) {}
2830 (*pointer_to_call0a) (c, s, i, l);
2833 Instead of calling a stub library function _call_via_xx (xx is
2834 the register name), GCC may inline the trampoline in the object
2835 file as below (register r2 has the address of call0a).
2838 .type main, %function
2847 The trampoline 'bx r2' doesn't belong to main. */
2850 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2852 /* The heuristics of recognizing such trampoline is that FRAME is
2853 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2854 if (arm_frame_is_thumb (frame))
2858 if (target_read_memory (pc, buf, 2) == 0)
2860 struct gdbarch *gdbarch = get_frame_arch (frame);
2861 enum bfd_endian byte_order_for_code
2862 = gdbarch_byte_order_for_code (gdbarch);
2864 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2866 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2869 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2871 /* Clear the LSB so that gdb core sets step-resume
2872 breakpoint at the right address. */
2873 return UNMAKE_THUMB_ADDR (dest);
2881 static struct arm_prologue_cache *
2882 arm_make_stub_cache (struct frame_info *this_frame)
2884 struct arm_prologue_cache *cache;
2886 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2887 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2889 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2894 /* Our frame ID for a stub frame is the current SP and LR. */
2897 arm_stub_this_id (struct frame_info *this_frame,
2899 struct frame_id *this_id)
2901 struct arm_prologue_cache *cache;
2903 if (*this_cache == NULL)
2904 *this_cache = arm_make_stub_cache (this_frame);
2905 cache = (struct arm_prologue_cache *) *this_cache;
2907 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2911 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2912 struct frame_info *this_frame,
2913 void **this_prologue_cache)
2915 CORE_ADDR addr_in_block;
2917 CORE_ADDR pc, start_addr;
2920 addr_in_block = get_frame_address_in_block (this_frame);
2921 pc = get_frame_pc (this_frame);
2922 if (in_plt_section (addr_in_block)
2923 /* We also use the stub winder if the target memory is unreadable
2924 to avoid having the prologue unwinder trying to read it. */
2925 || target_read_memory (pc, dummy, 4) != 0)
2928 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2929 && arm_skip_bx_reg (this_frame, pc) != 0)
2935 struct frame_unwind arm_stub_unwind = {
2937 default_frame_unwind_stop_reason,
2939 arm_prologue_prev_register,
2941 arm_stub_unwind_sniffer
2944 /* Put here the code to store, into CACHE->saved_regs, the addresses
2945 of the saved registers of frame described by THIS_FRAME. CACHE is
2948 static struct arm_prologue_cache *
2949 arm_m_exception_cache (struct frame_info *this_frame)
2951 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2952 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2953 struct arm_prologue_cache *cache;
2954 CORE_ADDR unwound_sp;
2957 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2958 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2960 unwound_sp = get_frame_register_unsigned (this_frame,
2963 /* The hardware saves eight 32-bit words, comprising xPSR,
2964 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2965 "B1.5.6 Exception entry behavior" in
2966 "ARMv7-M Architecture Reference Manual". */
2967 cache->saved_regs[0].addr = unwound_sp;
2968 cache->saved_regs[1].addr = unwound_sp + 4;
2969 cache->saved_regs[2].addr = unwound_sp + 8;
2970 cache->saved_regs[3].addr = unwound_sp + 12;
2971 cache->saved_regs[12].addr = unwound_sp + 16;
2972 cache->saved_regs[14].addr = unwound_sp + 20;
2973 cache->saved_regs[15].addr = unwound_sp + 24;
2974 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2976 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2977 aligner between the top of the 32-byte stack frame and the
2978 previous context's stack pointer. */
2979 cache->prev_sp = unwound_sp + 32;
2980 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2981 && (xpsr & (1 << 9)) != 0)
2982 cache->prev_sp += 4;
2987 /* Implementation of function hook 'this_id' in
2988 'struct frame_uwnind'. */
2991 arm_m_exception_this_id (struct frame_info *this_frame,
2993 struct frame_id *this_id)
2995 struct arm_prologue_cache *cache;
2997 if (*this_cache == NULL)
2998 *this_cache = arm_m_exception_cache (this_frame);
2999 cache = (struct arm_prologue_cache *) *this_cache;
3001 /* Our frame ID for a stub frame is the current SP and LR. */
3002 *this_id = frame_id_build (cache->prev_sp,
3003 get_frame_pc (this_frame));
3006 /* Implementation of function hook 'prev_register' in
3007 'struct frame_uwnind'. */
3009 static struct value *
3010 arm_m_exception_prev_register (struct frame_info *this_frame,
3014 struct arm_prologue_cache *cache;
3016 if (*this_cache == NULL)
3017 *this_cache = arm_m_exception_cache (this_frame);
3018 cache = (struct arm_prologue_cache *) *this_cache;
3020 /* The value was already reconstructed into PREV_SP. */
3021 if (prev_regnum == ARM_SP_REGNUM)
3022 return frame_unwind_got_constant (this_frame, prev_regnum,
3025 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3029 /* Implementation of function hook 'sniffer' in
3030 'struct frame_uwnind'. */
3033 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3034 struct frame_info *this_frame,
3035 void **this_prologue_cache)
3037 CORE_ADDR this_pc = get_frame_pc (this_frame);
3039 /* No need to check is_m; this sniffer is only registered for
3040 M-profile architectures. */
3042 /* Check if exception frame returns to a magic PC value. */
3043 return arm_m_addr_is_magic (this_pc);
3046 /* Frame unwinder for M-profile exceptions. */
3048 struct frame_unwind arm_m_exception_unwind =
3051 default_frame_unwind_stop_reason,
3052 arm_m_exception_this_id,
3053 arm_m_exception_prev_register,
3055 arm_m_exception_unwind_sniffer
3059 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3061 struct arm_prologue_cache *cache;
3063 if (*this_cache == NULL)
3064 *this_cache = arm_make_prologue_cache (this_frame);
3065 cache = (struct arm_prologue_cache *) *this_cache;
3067 return cache->prev_sp - cache->framesize;
3070 struct frame_base arm_normal_base = {
3071 &arm_prologue_unwind,
3072 arm_normal_frame_base,
3073 arm_normal_frame_base,
3074 arm_normal_frame_base
3077 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3078 dummy frame. The frame ID's base needs to match the TOS value
3079 saved by save_dummy_frame_tos() and returned from
3080 arm_push_dummy_call, and the PC needs to match the dummy frame's
3083 static struct frame_id
3084 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3086 return frame_id_build (get_frame_register_unsigned (this_frame,
3088 get_frame_pc (this_frame));
3091 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3092 be used to construct the previous frame's ID, after looking up the
3093 containing function). */
3096 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3099 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3100 return arm_addr_bits_remove (gdbarch, pc);
3104 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3106 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3109 static struct value *
3110 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3113 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3115 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3120 /* The PC is normally copied from the return column, which
3121 describes saves of LR. However, that version may have an
3122 extra bit set to indicate Thumb state. The bit is not
3124 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3125 return frame_unwind_got_constant (this_frame, regnum,
3126 arm_addr_bits_remove (gdbarch, lr));
3129 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3130 cpsr = get_frame_register_unsigned (this_frame, regnum);
3131 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3132 if (IS_THUMB_ADDR (lr))
3136 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3139 internal_error (__FILE__, __LINE__,
3140 _("Unexpected register %d"), regnum);
3145 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3146 struct dwarf2_frame_state_reg *reg,
3147 struct frame_info *this_frame)
3153 reg->how = DWARF2_FRAME_REG_FN;
3154 reg->loc.fn = arm_dwarf2_prev_register;
3157 reg->how = DWARF2_FRAME_REG_CFA;
3162 /* Implement the stack_frame_destroyed_p gdbarch method. */
3165 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3167 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3168 unsigned int insn, insn2;
3169 int found_return = 0, found_stack_adjust = 0;
3170 CORE_ADDR func_start, func_end;
3174 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3177 /* The epilogue is a sequence of instructions along the following lines:
3179 - add stack frame size to SP or FP
3180 - [if frame pointer used] restore SP from FP
3181 - restore registers from SP [may include PC]
3182 - a return-type instruction [if PC wasn't already restored]
3184 In a first pass, we scan forward from the current PC and verify the
3185 instructions we find as compatible with this sequence, ending in a
3188 However, this is not sufficient to distinguish indirect function calls
3189 within a function from indirect tail calls in the epilogue in some cases.
3190 Therefore, if we didn't already find any SP-changing instruction during
3191 forward scan, we add a backward scanning heuristic to ensure we actually
3192 are in the epilogue. */
3195 while (scan_pc < func_end && !found_return)
3197 if (target_read_memory (scan_pc, buf, 2))
3201 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3203 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3205 else if (insn == 0x46f7) /* mov pc, lr */
3207 else if (thumb_instruction_restores_sp (insn))
3209 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3212 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3214 if (target_read_memory (scan_pc, buf, 2))
3218 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3220 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3222 if (insn2 & 0x8000) /* <registers> include PC. */
3225 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3226 && (insn2 & 0x0fff) == 0x0b04)
3228 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3231 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3232 && (insn2 & 0x0e00) == 0x0a00)
3244 /* Since any instruction in the epilogue sequence, with the possible
3245 exception of return itself, updates the stack pointer, we need to
3246 scan backwards for at most one instruction. Try either a 16-bit or
3247 a 32-bit instruction. This is just a heuristic, so we do not worry
3248 too much about false positives. */
3250 if (pc - 4 < func_start)
3252 if (target_read_memory (pc - 4, buf, 4))
3255 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3256 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3258 if (thumb_instruction_restores_sp (insn2))
3259 found_stack_adjust = 1;
3260 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3261 found_stack_adjust = 1;
3262 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3263 && (insn2 & 0x0fff) == 0x0b04)
3264 found_stack_adjust = 1;
3265 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3266 && (insn2 & 0x0e00) == 0x0a00)
3267 found_stack_adjust = 1;
3269 return found_stack_adjust;
3273 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3275 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3278 CORE_ADDR func_start, func_end;
3280 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3283 /* We are in the epilogue if the previous instruction was a stack
3284 adjustment and the next instruction is a possible return (bx, mov
3285 pc, or pop). We could have to scan backwards to find the stack
3286 adjustment, or forwards to find the return, but this is a decent
3287 approximation. First scan forwards. */
3290 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3291 if (bits (insn, 28, 31) != INST_NV)
3293 if ((insn & 0x0ffffff0) == 0x012fff10)
3296 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3299 else if ((insn & 0x0fff0000) == 0x08bd0000
3300 && (insn & 0x0000c000) != 0)
3301 /* POP (LDMIA), including PC or LR. */
3308 /* Scan backwards. This is just a heuristic, so do not worry about
3309 false positives from mode changes. */
3311 if (pc < func_start + 4)
3314 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3315 if (arm_instruction_restores_sp (insn))
3321 /* Implement the stack_frame_destroyed_p gdbarch method. */
3324 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3326 if (arm_pc_is_thumb (gdbarch, pc))
3327 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3329 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3332 /* When arguments must be pushed onto the stack, they go on in reverse
3333 order. The code below implements a FILO (stack) to do this. */
3338 struct stack_item *prev;
3342 static struct stack_item *
3343 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3345 struct stack_item *si;
3346 si = XNEW (struct stack_item);
3347 si->data = (gdb_byte *) xmalloc (len);
3350 memcpy (si->data, contents, len);
3354 static struct stack_item *
3355 pop_stack_item (struct stack_item *si)
3357 struct stack_item *dead = si;
3365 /* Return the alignment (in bytes) of the given type. */
3368 arm_type_align (struct type *t)
3374 t = check_typedef (t);
3375 switch (TYPE_CODE (t))
3378 /* Should never happen. */
3379 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3383 case TYPE_CODE_ENUM:
3387 case TYPE_CODE_RANGE:
3389 case TYPE_CODE_RVALUE_REF:
3390 case TYPE_CODE_CHAR:
3391 case TYPE_CODE_BOOL:
3392 return TYPE_LENGTH (t);
3394 case TYPE_CODE_ARRAY:
3395 if (TYPE_VECTOR (t))
3397 /* Use the natural alignment for vector types (the same for
3398 scalar type), but the maximum alignment is 64-bit. */
3399 if (TYPE_LENGTH (t) > 8)
3402 return TYPE_LENGTH (t);
3405 return arm_type_align (TYPE_TARGET_TYPE (t));
3406 case TYPE_CODE_COMPLEX:
3407 return arm_type_align (TYPE_TARGET_TYPE (t));
3409 case TYPE_CODE_STRUCT:
3410 case TYPE_CODE_UNION:
3412 for (n = 0; n < TYPE_NFIELDS (t); n++)
3414 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3422 /* Possible base types for a candidate for passing and returning in
3425 enum arm_vfp_cprc_base_type
3434 /* The length of one element of base type B. */
3437 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3441 case VFP_CPRC_SINGLE:
3443 case VFP_CPRC_DOUBLE:
3445 case VFP_CPRC_VEC64:
3447 case VFP_CPRC_VEC128:
3450 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3455 /* The character ('s', 'd' or 'q') for the type of VFP register used
3456 for passing base type B. */
3459 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3463 case VFP_CPRC_SINGLE:
3465 case VFP_CPRC_DOUBLE:
3467 case VFP_CPRC_VEC64:
3469 case VFP_CPRC_VEC128:
3472 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3477 /* Determine whether T may be part of a candidate for passing and
3478 returning in VFP registers, ignoring the limit on the total number
3479 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3480 classification of the first valid component found; if it is not
3481 VFP_CPRC_UNKNOWN, all components must have the same classification
3482 as *BASE_TYPE. If it is found that T contains a type not permitted
3483 for passing and returning in VFP registers, a type differently
3484 classified from *BASE_TYPE, or two types differently classified
3485 from each other, return -1, otherwise return the total number of
3486 base-type elements found (possibly 0 in an empty structure or
3487 array). Vector types are not currently supported, matching the
3488 generic AAPCS support. */
3491 arm_vfp_cprc_sub_candidate (struct type *t,
3492 enum arm_vfp_cprc_base_type *base_type)
3494 t = check_typedef (t);
3495 switch (TYPE_CODE (t))
3498 switch (TYPE_LENGTH (t))
3501 if (*base_type == VFP_CPRC_UNKNOWN)
3502 *base_type = VFP_CPRC_SINGLE;
3503 else if (*base_type != VFP_CPRC_SINGLE)
3508 if (*base_type == VFP_CPRC_UNKNOWN)
3509 *base_type = VFP_CPRC_DOUBLE;
3510 else if (*base_type != VFP_CPRC_DOUBLE)
3519 case TYPE_CODE_COMPLEX:
3520 /* Arguments of complex T where T is one of the types float or
3521 double get treated as if they are implemented as:
3530 switch (TYPE_LENGTH (t))
3533 if (*base_type == VFP_CPRC_UNKNOWN)
3534 *base_type = VFP_CPRC_SINGLE;
3535 else if (*base_type != VFP_CPRC_SINGLE)
3540 if (*base_type == VFP_CPRC_UNKNOWN)
3541 *base_type = VFP_CPRC_DOUBLE;
3542 else if (*base_type != VFP_CPRC_DOUBLE)
3551 case TYPE_CODE_ARRAY:
3553 if (TYPE_VECTOR (t))
3555 /* A 64-bit or 128-bit containerized vector type are VFP
3557 switch (TYPE_LENGTH (t))
3560 if (*base_type == VFP_CPRC_UNKNOWN)
3561 *base_type = VFP_CPRC_VEC64;
3564 if (*base_type == VFP_CPRC_UNKNOWN)
3565 *base_type = VFP_CPRC_VEC128;
3576 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3580 if (TYPE_LENGTH (t) == 0)
3582 gdb_assert (count == 0);
3585 else if (count == 0)
3587 unitlen = arm_vfp_cprc_unit_length (*base_type);
3588 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3589 return TYPE_LENGTH (t) / unitlen;
3594 case TYPE_CODE_STRUCT:
3599 for (i = 0; i < TYPE_NFIELDS (t); i++)
3603 if (!field_is_static (&TYPE_FIELD (t, i)))
3604 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3606 if (sub_count == -1)
3610 if (TYPE_LENGTH (t) == 0)
3612 gdb_assert (count == 0);
3615 else if (count == 0)
3617 unitlen = arm_vfp_cprc_unit_length (*base_type);
3618 if (TYPE_LENGTH (t) != unitlen * count)
3623 case TYPE_CODE_UNION:
3628 for (i = 0; i < TYPE_NFIELDS (t); i++)
3630 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3632 if (sub_count == -1)
3634 count = (count > sub_count ? count : sub_count);
3636 if (TYPE_LENGTH (t) == 0)
3638 gdb_assert (count == 0);
3641 else if (count == 0)
3643 unitlen = arm_vfp_cprc_unit_length (*base_type);
3644 if (TYPE_LENGTH (t) != unitlen * count)
3656 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3657 if passed to or returned from a non-variadic function with the VFP
3658 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3659 *BASE_TYPE to the base type for T and *COUNT to the number of
3660 elements of that base type before returning. */
3663 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3666 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3667 int c = arm_vfp_cprc_sub_candidate (t, &b);
3668 if (c <= 0 || c > 4)
3675 /* Return 1 if the VFP ABI should be used for passing arguments to and
3676 returning values from a function of type FUNC_TYPE, 0
3680 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3682 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3683 /* Variadic functions always use the base ABI. Assume that functions
3684 without debug info are not variadic. */
3685 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3687 /* The VFP ABI is only supported as a variant of AAPCS. */
3688 if (tdep->arm_abi != ARM_ABI_AAPCS)
3690 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3693 /* We currently only support passing parameters in integer registers, which
3694 conforms with GCC's default model, and VFP argument passing following
3695 the VFP variant of AAPCS. Several other variants exist and
3696 we should probably support some of them based on the selected ABI. */
3699 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3700 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3701 struct value **args, CORE_ADDR sp, int struct_return,
3702 CORE_ADDR struct_addr)
3704 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3708 struct stack_item *si = NULL;
3711 unsigned vfp_regs_free = (1 << 16) - 1;
3713 /* Determine the type of this function and whether the VFP ABI
3715 ftype = check_typedef (value_type (function));
3716 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3717 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3718 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3720 /* Set the return address. For the ARM, the return breakpoint is
3721 always at BP_ADDR. */
3722 if (arm_pc_is_thumb (gdbarch, bp_addr))
3724 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3726 /* Walk through the list of args and determine how large a temporary
3727 stack is required. Need to take care here as structs may be
3728 passed on the stack, and we have to push them. */
3731 argreg = ARM_A1_REGNUM;
3734 /* The struct_return pointer occupies the first parameter
3735 passing register. */
3739 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3740 gdbarch_register_name (gdbarch, argreg),
3741 paddress (gdbarch, struct_addr));
3742 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3746 for (argnum = 0; argnum < nargs; argnum++)
3749 struct type *arg_type;
3750 struct type *target_type;
3751 enum type_code typecode;
3752 const bfd_byte *val;
3754 enum arm_vfp_cprc_base_type vfp_base_type;
3756 int may_use_core_reg = 1;
3758 arg_type = check_typedef (value_type (args[argnum]));
3759 len = TYPE_LENGTH (arg_type);
3760 target_type = TYPE_TARGET_TYPE (arg_type);
3761 typecode = TYPE_CODE (arg_type);
3762 val = value_contents (args[argnum]);
3764 align = arm_type_align (arg_type);
3765 /* Round alignment up to a whole number of words. */
3766 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3767 /* Different ABIs have different maximum alignments. */
3768 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3770 /* The APCS ABI only requires word alignment. */
3771 align = INT_REGISTER_SIZE;
3775 /* The AAPCS requires at most doubleword alignment. */
3776 if (align > INT_REGISTER_SIZE * 2)
3777 align = INT_REGISTER_SIZE * 2;
3781 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3789 /* Because this is a CPRC it cannot go in a core register or
3790 cause a core register to be skipped for alignment.
3791 Either it goes in VFP registers and the rest of this loop
3792 iteration is skipped for this argument, or it goes on the
3793 stack (and the stack alignment code is correct for this
3795 may_use_core_reg = 0;
3797 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3798 shift = unit_length / 4;
3799 mask = (1 << (shift * vfp_base_count)) - 1;
3800 for (regno = 0; regno < 16; regno += shift)
3801 if (((vfp_regs_free >> regno) & mask) == mask)
3810 vfp_regs_free &= ~(mask << regno);
3811 reg_scaled = regno / shift;
3812 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3813 for (i = 0; i < vfp_base_count; i++)
3817 if (reg_char == 'q')
3818 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3819 val + i * unit_length);
3822 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3823 reg_char, reg_scaled + i);
3824 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3826 regcache_cooked_write (regcache, regnum,
3827 val + i * unit_length);
3834 /* This CPRC could not go in VFP registers, so all VFP
3835 registers are now marked as used. */
3840 /* Push stack padding for dowubleword alignment. */
3841 if (nstack & (align - 1))
3843 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3844 nstack += INT_REGISTER_SIZE;
3847 /* Doubleword aligned quantities must go in even register pairs. */
3848 if (may_use_core_reg
3849 && argreg <= ARM_LAST_ARG_REGNUM
3850 && align > INT_REGISTER_SIZE
3854 /* If the argument is a pointer to a function, and it is a
3855 Thumb function, create a LOCAL copy of the value and set
3856 the THUMB bit in it. */
3857 if (TYPE_CODE_PTR == typecode
3858 && target_type != NULL
3859 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3861 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3862 if (arm_pc_is_thumb (gdbarch, regval))
3864 bfd_byte *copy = (bfd_byte *) alloca (len);
3865 store_unsigned_integer (copy, len, byte_order,
3866 MAKE_THUMB_ADDR (regval));
3871 /* Copy the argument to general registers or the stack in
3872 register-sized pieces. Large arguments are split between
3873 registers and stack. */
3876 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3878 = extract_unsigned_integer (val, partial_len, byte_order);
3880 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3882 /* The argument is being passed in a general purpose
3884 if (byte_order == BFD_ENDIAN_BIG)
3885 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3887 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3889 gdbarch_register_name
3891 phex (regval, INT_REGISTER_SIZE));
3892 regcache_cooked_write_unsigned (regcache, argreg, regval);
3897 gdb_byte buf[INT_REGISTER_SIZE];
3899 memset (buf, 0, sizeof (buf));
3900 store_unsigned_integer (buf, partial_len, byte_order, regval);
3902 /* Push the arguments onto the stack. */
3904 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3906 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3907 nstack += INT_REGISTER_SIZE;
3914 /* If we have an odd number of words to push, then decrement the stack
3915 by one word now, so first stack argument will be dword aligned. */
3922 write_memory (sp, si->data, si->len);
3923 si = pop_stack_item (si);
3926 /* Finally, update teh SP register. */
3927 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3933 /* Always align the frame to an 8-byte boundary. This is required on
3934 some platforms and harmless on the rest. */
3937 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3939 /* Align the stack to eight bytes. */
3940 return sp & ~ (CORE_ADDR) 7;
3944 print_fpu_flags (struct ui_file *file, int flags)
3946 if (flags & (1 << 0))
3947 fputs_filtered ("IVO ", file);
3948 if (flags & (1 << 1))
3949 fputs_filtered ("DVZ ", file);
3950 if (flags & (1 << 2))
3951 fputs_filtered ("OFL ", file);
3952 if (flags & (1 << 3))
3953 fputs_filtered ("UFL ", file);
3954 if (flags & (1 << 4))
3955 fputs_filtered ("INX ", file);
3956 fputc_filtered ('\n', file);
3959 /* Print interesting information about the floating point processor
3960 (if present) or emulator. */
3962 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3963 struct frame_info *frame, const char *args)
3965 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3968 type = (status >> 24) & 127;
3969 if (status & (1 << 31))
3970 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3972 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3973 /* i18n: [floating point unit] mask */
3974 fputs_filtered (_("mask: "), file);
3975 print_fpu_flags (file, status >> 16);
3976 /* i18n: [floating point unit] flags */
3977 fputs_filtered (_("flags: "), file);
3978 print_fpu_flags (file, status);
3981 /* Construct the ARM extended floating point type. */
3982 static struct type *
3983 arm_ext_type (struct gdbarch *gdbarch)
3985 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3987 if (!tdep->arm_ext_type)
3989 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3990 floatformats_arm_ext);
3992 return tdep->arm_ext_type;
3995 static struct type *
3996 arm_neon_double_type (struct gdbarch *gdbarch)
3998 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4000 if (tdep->neon_double_type == NULL)
4002 struct type *t, *elem;
4004 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4006 elem = builtin_type (gdbarch)->builtin_uint8;
4007 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4008 elem = builtin_type (gdbarch)->builtin_uint16;
4009 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4010 elem = builtin_type (gdbarch)->builtin_uint32;
4011 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4012 elem = builtin_type (gdbarch)->builtin_uint64;
4013 append_composite_type_field (t, "u64", elem);
4014 elem = builtin_type (gdbarch)->builtin_float;
4015 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4016 elem = builtin_type (gdbarch)->builtin_double;
4017 append_composite_type_field (t, "f64", elem);
4019 TYPE_VECTOR (t) = 1;
4020 TYPE_NAME (t) = "neon_d";
4021 tdep->neon_double_type = t;
4024 return tdep->neon_double_type;
4027 /* FIXME: The vector types are not correctly ordered on big-endian
4028 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4029 bits of d0 - regardless of what unit size is being held in d0. So
4030 the offset of the first uint8 in d0 is 7, but the offset of the
4031 first float is 4. This code works as-is for little-endian
4034 static struct type *
4035 arm_neon_quad_type (struct gdbarch *gdbarch)
4037 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4039 if (tdep->neon_quad_type == NULL)
4041 struct type *t, *elem;
4043 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4045 elem = builtin_type (gdbarch)->builtin_uint8;
4046 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4047 elem = builtin_type (gdbarch)->builtin_uint16;
4048 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4049 elem = builtin_type (gdbarch)->builtin_uint32;
4050 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_uint64;
4052 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4053 elem = builtin_type (gdbarch)->builtin_float;
4054 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4055 elem = builtin_type (gdbarch)->builtin_double;
4056 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4058 TYPE_VECTOR (t) = 1;
4059 TYPE_NAME (t) = "neon_q";
4060 tdep->neon_quad_type = t;
4063 return tdep->neon_quad_type;
4066 /* Return the GDB type object for the "standard" data type of data in
4069 static struct type *
4070 arm_register_type (struct gdbarch *gdbarch, int regnum)
4072 int num_regs = gdbarch_num_regs (gdbarch);
4074 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4075 && regnum >= num_regs && regnum < num_regs + 32)
4076 return builtin_type (gdbarch)->builtin_float;
4078 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4079 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4080 return arm_neon_quad_type (gdbarch);
4082 /* If the target description has register information, we are only
4083 in this function so that we can override the types of
4084 double-precision registers for NEON. */
4085 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4087 struct type *t = tdesc_register_type (gdbarch, regnum);
4089 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4090 && TYPE_CODE (t) == TYPE_CODE_FLT
4091 && gdbarch_tdep (gdbarch)->have_neon)
4092 return arm_neon_double_type (gdbarch);
4097 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4099 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4100 return builtin_type (gdbarch)->builtin_void;
4102 return arm_ext_type (gdbarch);
4104 else if (regnum == ARM_SP_REGNUM)
4105 return builtin_type (gdbarch)->builtin_data_ptr;
4106 else if (regnum == ARM_PC_REGNUM)
4107 return builtin_type (gdbarch)->builtin_func_ptr;
4108 else if (regnum >= ARRAY_SIZE (arm_register_names))
4109 /* These registers are only supported on targets which supply
4110 an XML description. */
4111 return builtin_type (gdbarch)->builtin_int0;
4113 return builtin_type (gdbarch)->builtin_uint32;
4116 /* Map a DWARF register REGNUM onto the appropriate GDB register
4120 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4122 /* Core integer regs. */
4123 if (reg >= 0 && reg <= 15)
4126 /* Legacy FPA encoding. These were once used in a way which
4127 overlapped with VFP register numbering, so their use is
4128 discouraged, but GDB doesn't support the ARM toolchain
4129 which used them for VFP. */
4130 if (reg >= 16 && reg <= 23)
4131 return ARM_F0_REGNUM + reg - 16;
4133 /* New assignments for the FPA registers. */
4134 if (reg >= 96 && reg <= 103)
4135 return ARM_F0_REGNUM + reg - 96;
4137 /* WMMX register assignments. */
4138 if (reg >= 104 && reg <= 111)
4139 return ARM_WCGR0_REGNUM + reg - 104;
4141 if (reg >= 112 && reg <= 127)
4142 return ARM_WR0_REGNUM + reg - 112;
4144 if (reg >= 192 && reg <= 199)
4145 return ARM_WC0_REGNUM + reg - 192;
4147 /* VFP v2 registers. A double precision value is actually
4148 in d1 rather than s2, but the ABI only defines numbering
4149 for the single precision registers. This will "just work"
4150 in GDB for little endian targets (we'll read eight bytes,
4151 starting in s0 and then progressing to s1), but will be
4152 reversed on big endian targets with VFP. This won't
4153 be a problem for the new Neon quad registers; you're supposed
4154 to use DW_OP_piece for those. */
4155 if (reg >= 64 && reg <= 95)
4159 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4160 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4164 /* VFP v3 / Neon registers. This range is also used for VFP v2
4165 registers, except that it now describes d0 instead of s0. */
4166 if (reg >= 256 && reg <= 287)
4170 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4171 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4178 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4180 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4183 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4185 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4186 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4188 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4189 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4191 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4192 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4194 if (reg < NUM_GREGS)
4195 return SIM_ARM_R0_REGNUM + reg;
4198 if (reg < NUM_FREGS)
4199 return SIM_ARM_FP0_REGNUM + reg;
4202 if (reg < NUM_SREGS)
4203 return SIM_ARM_FPS_REGNUM + reg;
4206 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4209 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4210 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4211 NULL if an error occurs. BUF is freed. */
4214 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4215 int old_len, int new_len)
4218 int bytes_to_read = new_len - old_len;
4220 new_buf = (gdb_byte *) xmalloc (new_len);
4221 memcpy (new_buf + bytes_to_read, buf, old_len);
4223 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4231 /* An IT block is at most the 2-byte IT instruction followed by
4232 four 4-byte instructions. The furthest back we must search to
4233 find an IT block that affects the current instruction is thus
4234 2 + 3 * 4 == 14 bytes. */
4235 #define MAX_IT_BLOCK_PREFIX 14
4237 /* Use a quick scan if there are more than this many bytes of
4239 #define IT_SCAN_THRESHOLD 32
4241 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4242 A breakpoint in an IT block may not be hit, depending on the
4245 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4249 CORE_ADDR boundary, func_start;
4251 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4252 int i, any, last_it, last_it_count;
4254 /* If we are using BKPT breakpoints, none of this is necessary. */
4255 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4258 /* ARM mode does not have this problem. */
4259 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4262 /* We are setting a breakpoint in Thumb code that could potentially
4263 contain an IT block. The first step is to find how much Thumb
4264 code there is; we do not need to read outside of known Thumb
4266 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4268 /* Thumb-2 code must have mapping symbols to have a chance. */
4271 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4273 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4274 && func_start > boundary)
4275 boundary = func_start;
4277 /* Search for a candidate IT instruction. We have to do some fancy
4278 footwork to distinguish a real IT instruction from the second
4279 half of a 32-bit instruction, but there is no need for that if
4280 there's no candidate. */
4281 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4283 /* No room for an IT instruction. */
4286 buf = (gdb_byte *) xmalloc (buf_len);
4287 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4290 for (i = 0; i < buf_len; i += 2)
4292 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4293 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4306 /* OK, the code bytes before this instruction contain at least one
4307 halfword which resembles an IT instruction. We know that it's
4308 Thumb code, but there are still two possibilities. Either the
4309 halfword really is an IT instruction, or it is the second half of
4310 a 32-bit Thumb instruction. The only way we can tell is to
4311 scan forwards from a known instruction boundary. */
4312 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4316 /* There's a lot of code before this instruction. Start with an
4317 optimistic search; it's easy to recognize halfwords that can
4318 not be the start of a 32-bit instruction, and use that to
4319 lock on to the instruction boundaries. */
4320 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4323 buf_len = IT_SCAN_THRESHOLD;
4326 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4328 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4329 if (thumb_insn_size (inst1) == 2)
4336 /* At this point, if DEFINITE, BUF[I] is the first place we
4337 are sure that we know the instruction boundaries, and it is far
4338 enough from BPADDR that we could not miss an IT instruction
4339 affecting BPADDR. If ! DEFINITE, give up - start from a
4343 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4347 buf_len = bpaddr - boundary;
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4356 buf_len = bpaddr - boundary;
4360 /* Scan forwards. Find the last IT instruction before BPADDR. */
4365 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4367 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4372 else if (inst1 & 0x0002)
4374 else if (inst1 & 0x0004)
4379 i += thumb_insn_size (inst1);
4385 /* There wasn't really an IT instruction after all. */
4388 if (last_it_count < 1)
4389 /* It was too far away. */
4392 /* This really is a trouble spot. Move the breakpoint to the IT
4394 return bpaddr - buf_len + last_it;
4397 /* ARM displaced stepping support.
4399 Generally ARM displaced stepping works as follows:
4401 1. When an instruction is to be single-stepped, it is first decoded by
4402 arm_process_displaced_insn. Depending on the type of instruction, it is
4403 then copied to a scratch location, possibly in a modified form. The
4404 copy_* set of functions performs such modification, as necessary. A
4405 breakpoint is placed after the modified instruction in the scratch space
4406 to return control to GDB. Note in particular that instructions which
4407 modify the PC will no longer do so after modification.
4409 2. The instruction is single-stepped, by setting the PC to the scratch
4410 location address, and resuming. Control returns to GDB when the
4413 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4414 function used for the current instruction. This function's job is to
4415 put the CPU/memory state back to what it would have been if the
4416 instruction had been executed unmodified in its original location. */
4418 /* NOP instruction (mov r0, r0). */
4419 #define ARM_NOP 0xe1a00000
4420 #define THUMB_NOP 0x4600
4422 /* Helper for register reads for displaced stepping. In particular, this
4423 returns the PC as it would be seen by the instruction at its original
4427 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4431 CORE_ADDR from = dsc->insn_addr;
4433 if (regno == ARM_PC_REGNUM)
4435 /* Compute pipeline offset:
4436 - When executing an ARM instruction, PC reads as the address of the
4437 current instruction plus 8.
4438 - When executing a Thumb instruction, PC reads as the address of the
4439 current instruction plus 4. */
4446 if (debug_displaced)
4447 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4448 (unsigned long) from);
4449 return (ULONGEST) from;
4453 regcache_cooked_read_unsigned (regs, regno, &ret);
4454 if (debug_displaced)
4455 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4456 regno, (unsigned long) ret);
4462 displaced_in_arm_mode (struct regcache *regs)
4465 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4467 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4469 return (ps & t_bit) == 0;
4472 /* Write to the PC as from a branch instruction. */
4475 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4479 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4480 architecture versions < 6. */
4481 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4482 val & ~(ULONGEST) 0x3);
4484 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4485 val & ~(ULONGEST) 0x1);
4488 /* Write to the PC as from a branch-exchange instruction. */
4491 bx_write_pc (struct regcache *regs, ULONGEST val)
4494 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4496 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4500 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4501 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4503 else if ((val & 2) == 0)
4505 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4506 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4510 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4511 mode, align dest to 4 bytes). */
4512 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4513 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4514 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4518 /* Write to the PC as if from a load instruction. */
4521 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4524 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4525 bx_write_pc (regs, val);
4527 branch_write_pc (regs, dsc, val);
4530 /* Write to the PC as if from an ALU instruction. */
4533 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4536 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4537 bx_write_pc (regs, val);
4539 branch_write_pc (regs, dsc, val);
4542 /* Helper for writing to registers for displaced stepping. Writing to the PC
4543 has a varying effects depending on the instruction which does the write:
4544 this is controlled by the WRITE_PC argument. */
4547 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4548 int regno, ULONGEST val, enum pc_write_style write_pc)
4550 if (regno == ARM_PC_REGNUM)
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4554 (unsigned long) val);
4557 case BRANCH_WRITE_PC:
4558 branch_write_pc (regs, dsc, val);
4562 bx_write_pc (regs, val);
4566 load_write_pc (regs, dsc, val);
4570 alu_write_pc (regs, dsc, val);
4573 case CANNOT_WRITE_PC:
4574 warning (_("Instruction wrote to PC in an unexpected way when "
4575 "single-stepping"));
4579 internal_error (__FILE__, __LINE__,
4580 _("Invalid argument to displaced_write_reg"));
4583 dsc->wrote_to_pc = 1;
4587 if (debug_displaced)
4588 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4589 regno, (unsigned long) val);
4590 regcache_cooked_write_unsigned (regs, regno, val);
4594 /* This function is used to concisely determine if an instruction INSN
4595 references PC. Register fields of interest in INSN should have the
4596 corresponding fields of BITMASK set to 0b1111. The function
4597 returns return 1 if any of these fields in INSN reference the PC
4598 (also 0b1111, r15), else it returns 0. */
4601 insn_references_pc (uint32_t insn, uint32_t bitmask)
4603 uint32_t lowbit = 1;
4605 while (bitmask != 0)
4609 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4615 mask = lowbit * 0xf;
4617 if ((insn & mask) == mask)
4626 /* The simplest copy function. Many instructions have the same effect no
4627 matter what address they are executed at: in those cases, use this. */
4630 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4631 const char *iname, struct displaced_step_closure *dsc)
4633 if (debug_displaced)
4634 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4635 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4638 dsc->modinsn[0] = insn;
4644 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4645 uint16_t insn2, const char *iname,
4646 struct displaced_step_closure *dsc)
4648 if (debug_displaced)
4649 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4650 "opcode/class '%s' unmodified\n", insn1, insn2,
4653 dsc->modinsn[0] = insn1;
4654 dsc->modinsn[1] = insn2;
4660 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4663 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4665 struct displaced_step_closure *dsc)
4667 if (debug_displaced)
4668 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4669 "opcode/class '%s' unmodified\n", insn,
4672 dsc->modinsn[0] = insn;
4677 /* Preload instructions with immediate offset. */
4680 cleanup_preload (struct gdbarch *gdbarch,
4681 struct regcache *regs, struct displaced_step_closure *dsc)
4683 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4684 if (!dsc->u.preload.immed)
4685 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4689 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4690 struct displaced_step_closure *dsc, unsigned int rn)
4693 /* Preload instructions:
4695 {pli/pld} [rn, #+/-imm]
4697 {pli/pld} [r0, #+/-imm]. */
4699 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4700 rn_val = displaced_read_reg (regs, dsc, rn);
4701 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4702 dsc->u.preload.immed = 1;
4704 dsc->cleanup = &cleanup_preload;
4708 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4709 struct displaced_step_closure *dsc)
4711 unsigned int rn = bits (insn, 16, 19);
4713 if (!insn_references_pc (insn, 0x000f0000ul))
4714 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4720 dsc->modinsn[0] = insn & 0xfff0ffff;
4722 install_preload (gdbarch, regs, dsc, rn);
4728 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4729 struct regcache *regs, struct displaced_step_closure *dsc)
4731 unsigned int rn = bits (insn1, 0, 3);
4732 unsigned int u_bit = bit (insn1, 7);
4733 int imm12 = bits (insn2, 0, 11);
4736 if (rn != ARM_PC_REGNUM)
4737 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4739 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4740 PLD (literal) Encoding T1. */
4741 if (debug_displaced)
4742 fprintf_unfiltered (gdb_stdlog,
4743 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4744 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4750 /* Rewrite instruction {pli/pld} PC imm12 into:
4751 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4755 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4760 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4762 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4763 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4764 dsc->u.preload.immed = 0;
4766 /* {pli/pld} [r0, r1] */
4767 dsc->modinsn[0] = insn1 & 0xfff0;
4768 dsc->modinsn[1] = 0xf001;
4771 dsc->cleanup = &cleanup_preload;
4775 /* Preload instructions with register offset. */
4778 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4779 struct displaced_step_closure *dsc, unsigned int rn,
4782 ULONGEST rn_val, rm_val;
4784 /* Preload register-offset instructions:
4786 {pli/pld} [rn, rm {, shift}]
4788 {pli/pld} [r0, r1 {, shift}]. */
4790 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4791 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4792 rn_val = displaced_read_reg (regs, dsc, rn);
4793 rm_val = displaced_read_reg (regs, dsc, rm);
4794 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4795 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4796 dsc->u.preload.immed = 0;
4798 dsc->cleanup = &cleanup_preload;
4802 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4803 struct regcache *regs,
4804 struct displaced_step_closure *dsc)
4806 unsigned int rn = bits (insn, 16, 19);
4807 unsigned int rm = bits (insn, 0, 3);
4810 if (!insn_references_pc (insn, 0x000f000ful))
4811 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4813 if (debug_displaced)
4814 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4815 (unsigned long) insn);
4817 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4819 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4823 /* Copy/cleanup coprocessor load and store instructions. */
4826 cleanup_copro_load_store (struct gdbarch *gdbarch,
4827 struct regcache *regs,
4828 struct displaced_step_closure *dsc)
4830 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4832 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4834 if (dsc->u.ldst.writeback)
4835 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4839 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4840 struct displaced_step_closure *dsc,
4841 int writeback, unsigned int rn)
4845 /* Coprocessor load/store instructions:
4847 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4849 {stc/stc2} [r0, #+/-imm].
4851 ldc/ldc2 are handled identically. */
4853 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4854 rn_val = displaced_read_reg (regs, dsc, rn);
4855 /* PC should be 4-byte aligned. */
4856 rn_val = rn_val & 0xfffffffc;
4857 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4859 dsc->u.ldst.writeback = writeback;
4860 dsc->u.ldst.rn = rn;
4862 dsc->cleanup = &cleanup_copro_load_store;
4866 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4867 struct regcache *regs,
4868 struct displaced_step_closure *dsc)
4870 unsigned int rn = bits (insn, 16, 19);
4872 if (!insn_references_pc (insn, 0x000f0000ul))
4873 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4875 if (debug_displaced)
4876 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4877 "load/store insn %.8lx\n", (unsigned long) insn);
4879 dsc->modinsn[0] = insn & 0xfff0ffff;
4881 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4887 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4888 uint16_t insn2, struct regcache *regs,
4889 struct displaced_step_closure *dsc)
4891 unsigned int rn = bits (insn1, 0, 3);
4893 if (rn != ARM_PC_REGNUM)
4894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4895 "copro load/store", dsc);
4897 if (debug_displaced)
4898 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4899 "load/store insn %.4x%.4x\n", insn1, insn2);
4901 dsc->modinsn[0] = insn1 & 0xfff0;
4902 dsc->modinsn[1] = insn2;
4905 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4906 doesn't support writeback, so pass 0. */
4907 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4912 /* Clean up branch instructions (actually perform the branch, by setting
4916 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4917 struct displaced_step_closure *dsc)
4919 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4920 int branch_taken = condition_true (dsc->u.branch.cond, status);
4921 enum pc_write_style write_pc = dsc->u.branch.exchange
4922 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4927 if (dsc->u.branch.link)
4929 /* The value of LR should be the next insn of current one. In order
4930 not to confuse logic hanlding later insn `bx lr', if current insn mode
4931 is Thumb, the bit 0 of LR value should be set to 1. */
4932 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4935 next_insn_addr |= 0x1;
4937 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4941 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4944 /* Copy B/BL/BLX instructions with immediate destinations. */
4947 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4948 struct displaced_step_closure *dsc,
4949 unsigned int cond, int exchange, int link, long offset)
4951 /* Implement "BL<cond> <label>" as:
4953 Preparation: cond <- instruction condition
4954 Insn: mov r0, r0 (nop)
4955 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4957 B<cond> similar, but don't set r14 in cleanup. */
4959 dsc->u.branch.cond = cond;
4960 dsc->u.branch.link = link;
4961 dsc->u.branch.exchange = exchange;
4963 dsc->u.branch.dest = dsc->insn_addr;
4964 if (link && exchange)
4965 /* For BLX, offset is computed from the Align (PC, 4). */
4966 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4969 dsc->u.branch.dest += 4 + offset;
4971 dsc->u.branch.dest += 8 + offset;
4973 dsc->cleanup = &cleanup_branch;
4976 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4977 struct regcache *regs, struct displaced_step_closure *dsc)
4979 unsigned int cond = bits (insn, 28, 31);
4980 int exchange = (cond == 0xf);
4981 int link = exchange || bit (insn, 24);
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4986 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4987 (unsigned long) insn);
4989 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4990 then arrange the switch into Thumb mode. */
4991 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4993 offset = bits (insn, 0, 23) << 2;
4995 if (bit (offset, 25))
4996 offset = offset | ~0x3ffffff;
4998 dsc->modinsn[0] = ARM_NOP;
5000 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5005 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5006 uint16_t insn2, struct regcache *regs,
5007 struct displaced_step_closure *dsc)
5009 int link = bit (insn2, 14);
5010 int exchange = link && !bit (insn2, 12);
5013 int j1 = bit (insn2, 13);
5014 int j2 = bit (insn2, 11);
5015 int s = sbits (insn1, 10, 10);
5016 int i1 = !(j1 ^ bit (insn1, 10));
5017 int i2 = !(j2 ^ bit (insn1, 10));
5019 if (!link && !exchange) /* B */
5021 offset = (bits (insn2, 0, 10) << 1);
5022 if (bit (insn2, 12)) /* Encoding T4 */
5024 offset |= (bits (insn1, 0, 9) << 12)
5030 else /* Encoding T3 */
5032 offset |= (bits (insn1, 0, 5) << 12)
5036 cond = bits (insn1, 6, 9);
5041 offset = (bits (insn1, 0, 9) << 12);
5042 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5043 offset |= exchange ?
5044 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5047 if (debug_displaced)
5048 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5049 "%.4x %.4x with offset %.8lx\n",
5050 link ? (exchange) ? "blx" : "bl" : "b",
5051 insn1, insn2, offset);
5053 dsc->modinsn[0] = THUMB_NOP;
5055 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5059 /* Copy B Thumb instructions. */
5061 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5062 struct displaced_step_closure *dsc)
5064 unsigned int cond = 0;
5066 unsigned short bit_12_15 = bits (insn, 12, 15);
5067 CORE_ADDR from = dsc->insn_addr;
5069 if (bit_12_15 == 0xd)
5071 /* offset = SignExtend (imm8:0, 32) */
5072 offset = sbits ((insn << 1), 0, 8);
5073 cond = bits (insn, 8, 11);
5075 else if (bit_12_15 == 0xe) /* Encoding T2 */
5077 offset = sbits ((insn << 1), 0, 11);
5081 if (debug_displaced)
5082 fprintf_unfiltered (gdb_stdlog,
5083 "displaced: copying b immediate insn %.4x "
5084 "with offset %d\n", insn, offset);
5086 dsc->u.branch.cond = cond;
5087 dsc->u.branch.link = 0;
5088 dsc->u.branch.exchange = 0;
5089 dsc->u.branch.dest = from + 4 + offset;
5091 dsc->modinsn[0] = THUMB_NOP;
5093 dsc->cleanup = &cleanup_branch;
5098 /* Copy BX/BLX with register-specified destinations. */
5101 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5102 struct displaced_step_closure *dsc, int link,
5103 unsigned int cond, unsigned int rm)
5105 /* Implement {BX,BLX}<cond> <reg>" as:
5107 Preparation: cond <- instruction condition
5108 Insn: mov r0, r0 (nop)
5109 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5111 Don't set r14 in cleanup for BX. */
5113 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5115 dsc->u.branch.cond = cond;
5116 dsc->u.branch.link = link;
5118 dsc->u.branch.exchange = 1;
5120 dsc->cleanup = &cleanup_branch;
5124 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5125 struct regcache *regs, struct displaced_step_closure *dsc)
5127 unsigned int cond = bits (insn, 28, 31);
5130 int link = bit (insn, 5);
5131 unsigned int rm = bits (insn, 0, 3);
5133 if (debug_displaced)
5134 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5135 (unsigned long) insn);
5137 dsc->modinsn[0] = ARM_NOP;
5139 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5144 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5145 struct regcache *regs,
5146 struct displaced_step_closure *dsc)
5148 int link = bit (insn, 7);
5149 unsigned int rm = bits (insn, 3, 6);
5151 if (debug_displaced)
5152 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5153 (unsigned short) insn);
5155 dsc->modinsn[0] = THUMB_NOP;
5157 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5163 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5166 cleanup_alu_imm (struct gdbarch *gdbarch,
5167 struct regcache *regs, struct displaced_step_closure *dsc)
5169 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5170 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5171 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5172 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5176 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5177 struct displaced_step_closure *dsc)
5179 unsigned int rn = bits (insn, 16, 19);
5180 unsigned int rd = bits (insn, 12, 15);
5181 unsigned int op = bits (insn, 21, 24);
5182 int is_mov = (op == 0xd);
5183 ULONGEST rd_val, rn_val;
5185 if (!insn_references_pc (insn, 0x000ff000ul))
5186 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5188 if (debug_displaced)
5189 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5190 "%.8lx\n", is_mov ? "move" : "ALU",
5191 (unsigned long) insn);
5193 /* Instruction is of form:
5195 <op><cond> rd, [rn,] #imm
5199 Preparation: tmp1, tmp2 <- r0, r1;
5201 Insn: <op><cond> r0, r1, #imm
5202 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5205 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5206 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5207 rn_val = displaced_read_reg (regs, dsc, rn);
5208 rd_val = displaced_read_reg (regs, dsc, rd);
5209 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5210 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5214 dsc->modinsn[0] = insn & 0xfff00fff;
5216 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5218 dsc->cleanup = &cleanup_alu_imm;
5224 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5225 uint16_t insn2, struct regcache *regs,
5226 struct displaced_step_closure *dsc)
5228 unsigned int op = bits (insn1, 5, 8);
5229 unsigned int rn, rm, rd;
5230 ULONGEST rd_val, rn_val;
5232 rn = bits (insn1, 0, 3); /* Rn */
5233 rm = bits (insn2, 0, 3); /* Rm */
5234 rd = bits (insn2, 8, 11); /* Rd */
5236 /* This routine is only called for instruction MOV. */
5237 gdb_assert (op == 0x2 && rn == 0xf);
5239 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5240 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5242 if (debug_displaced)
5243 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5244 "ALU", insn1, insn2);
5246 /* Instruction is of form:
5248 <op><cond> rd, [rn,] #imm
5252 Preparation: tmp1, tmp2 <- r0, r1;
5254 Insn: <op><cond> r0, r1, #imm
5255 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5258 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5259 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5260 rn_val = displaced_read_reg (regs, dsc, rn);
5261 rd_val = displaced_read_reg (regs, dsc, rd);
5262 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5263 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5266 dsc->modinsn[0] = insn1;
5267 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5270 dsc->cleanup = &cleanup_alu_imm;
5275 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5278 cleanup_alu_reg (struct gdbarch *gdbarch,
5279 struct regcache *regs, struct displaced_step_closure *dsc)
5284 rd_val = displaced_read_reg (regs, dsc, 0);
5286 for (i = 0; i < 3; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5293 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 struct displaced_step_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm)
5297 ULONGEST rd_val, rn_val, rm_val;
5299 /* Instruction is of form:
5301 <op><cond> rd, [rn,] rm [, <shift>]
5305 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5306 r0, r1, r2 <- rd, rn, rm
5307 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5308 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5311 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5312 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5313 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5314 rd_val = displaced_read_reg (regs, dsc, rd);
5315 rn_val = displaced_read_reg (regs, dsc, rn);
5316 rm_val = displaced_read_reg (regs, dsc, rm);
5317 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5318 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5319 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5322 dsc->cleanup = &cleanup_alu_reg;
5326 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5327 struct displaced_step_closure *dsc)
5329 unsigned int op = bits (insn, 21, 24);
5330 int is_mov = (op == 0xd);
5332 if (!insn_references_pc (insn, 0x000ff00ful))
5333 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5335 if (debug_displaced)
5336 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5337 is_mov ? "move" : "ALU", (unsigned long) insn);
5340 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5342 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5344 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5350 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5351 struct regcache *regs,
5352 struct displaced_step_closure *dsc)
5356 rm = bits (insn, 3, 6);
5357 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5359 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5360 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5362 if (debug_displaced)
5363 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5364 (unsigned short) insn);
5366 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5368 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5373 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5376 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5377 struct regcache *regs,
5378 struct displaced_step_closure *dsc)
5380 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5383 for (i = 0; i < 4; i++)
5384 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5386 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5390 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5391 struct displaced_step_closure *dsc,
5392 unsigned int rd, unsigned int rn, unsigned int rm,
5396 ULONGEST rd_val, rn_val, rm_val, rs_val;
5398 /* Instruction is of form:
5400 <op><cond> rd, [rn,] rm, <shift> rs
5404 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5405 r0, r1, r2, r3 <- rd, rn, rm, rs
5406 Insn: <op><cond> r0, r1, r2, <shift> r3
5408 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5412 for (i = 0; i < 4; i++)
5413 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5415 rd_val = displaced_read_reg (regs, dsc, rd);
5416 rn_val = displaced_read_reg (regs, dsc, rn);
5417 rm_val = displaced_read_reg (regs, dsc, rm);
5418 rs_val = displaced_read_reg (regs, dsc, rs);
5419 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5420 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5421 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5422 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5424 dsc->cleanup = &cleanup_alu_shifted_reg;
5428 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5429 struct regcache *regs,
5430 struct displaced_step_closure *dsc)
5432 unsigned int op = bits (insn, 21, 24);
5433 int is_mov = (op == 0xd);
5434 unsigned int rd, rn, rm, rs;
5436 if (!insn_references_pc (insn, 0x000fff0ful))
5437 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5439 if (debug_displaced)
5440 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5441 "%.8lx\n", is_mov ? "move" : "ALU",
5442 (unsigned long) insn);
5444 rn = bits (insn, 16, 19);
5445 rm = bits (insn, 0, 3);
5446 rs = bits (insn, 8, 11);
5447 rd = bits (insn, 12, 15);
5450 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5452 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5454 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5459 /* Clean up load instructions. */
5462 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5463 struct displaced_step_closure *dsc)
5465 ULONGEST rt_val, rt_val2 = 0, rn_val;
5467 rt_val = displaced_read_reg (regs, dsc, 0);
5468 if (dsc->u.ldst.xfersize == 8)
5469 rt_val2 = displaced_read_reg (regs, dsc, 1);
5470 rn_val = displaced_read_reg (regs, dsc, 2);
5472 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5473 if (dsc->u.ldst.xfersize > 4)
5474 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5475 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5476 if (!dsc->u.ldst.immed)
5477 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5479 /* Handle register writeback. */
5480 if (dsc->u.ldst.writeback)
5481 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5482 /* Put result in right place. */
5483 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5484 if (dsc->u.ldst.xfersize == 8)
5485 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5488 /* Clean up store instructions. */
5491 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5492 struct displaced_step_closure *dsc)
5494 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5496 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5497 if (dsc->u.ldst.xfersize > 4)
5498 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5499 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5500 if (!dsc->u.ldst.immed)
5501 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5502 if (!dsc->u.ldst.restore_r4)
5503 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5506 if (dsc->u.ldst.writeback)
5507 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5510 /* Copy "extra" load/store instructions. These are halfword/doubleword
5511 transfers, which have a different encoding to byte/word transfers. */
5514 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5515 struct regcache *regs, struct displaced_step_closure *dsc)
5517 unsigned int op1 = bits (insn, 20, 24);
5518 unsigned int op2 = bits (insn, 5, 6);
5519 unsigned int rt = bits (insn, 12, 15);
5520 unsigned int rn = bits (insn, 16, 19);
5521 unsigned int rm = bits (insn, 0, 3);
5522 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5523 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5524 int immed = (op1 & 0x4) != 0;
5526 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5528 if (!insn_references_pc (insn, 0x000ff00ful))
5529 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5531 if (debug_displaced)
5532 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5533 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5534 (unsigned long) insn);
5536 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5539 internal_error (__FILE__, __LINE__,
5540 _("copy_extra_ld_st: instruction decode error"));
5542 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5543 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5544 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5546 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5548 rt_val = displaced_read_reg (regs, dsc, rt);
5549 if (bytesize[opcode] == 8)
5550 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5551 rn_val = displaced_read_reg (regs, dsc, rn);
5553 rm_val = displaced_read_reg (regs, dsc, rm);
5555 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5556 if (bytesize[opcode] == 8)
5557 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5558 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5560 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5563 dsc->u.ldst.xfersize = bytesize[opcode];
5564 dsc->u.ldst.rn = rn;
5565 dsc->u.ldst.immed = immed;
5566 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5567 dsc->u.ldst.restore_r4 = 0;
5570 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5572 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5573 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5575 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5577 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5578 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5580 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5585 /* Copy byte/half word/word loads and stores. */
5588 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5589 struct displaced_step_closure *dsc, int load,
5590 int immed, int writeback, int size, int usermode,
5591 int rt, int rm, int rn)
5593 ULONGEST rt_val, rn_val, rm_val = 0;
5595 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5596 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5598 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5600 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5602 rt_val = displaced_read_reg (regs, dsc, rt);
5603 rn_val = displaced_read_reg (regs, dsc, rn);
5605 rm_val = displaced_read_reg (regs, dsc, rm);
5607 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5608 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5610 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5612 dsc->u.ldst.xfersize = size;
5613 dsc->u.ldst.rn = rn;
5614 dsc->u.ldst.immed = immed;
5615 dsc->u.ldst.writeback = writeback;
5617 /* To write PC we can do:
5619 Before this sequence of instructions:
5620 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5621 r2 is the Rn value got from dispalced_read_reg.
5623 Insn1: push {pc} Write address of STR instruction + offset on stack
5624 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5625 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5626 = addr(Insn1) + offset - addr(Insn3) - 8
5628 Insn4: add r4, r4, #8 r4 = offset - 8
5629 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5631 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5633 Otherwise we don't know what value to write for PC, since the offset is
5634 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5635 of this can be found in Section "Saving from r15" in
5636 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5638 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5643 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5644 uint16_t insn2, struct regcache *regs,
5645 struct displaced_step_closure *dsc, int size)
5647 unsigned int u_bit = bit (insn1, 7);
5648 unsigned int rt = bits (insn2, 12, 15);
5649 int imm12 = bits (insn2, 0, 11);
5652 if (debug_displaced)
5653 fprintf_unfiltered (gdb_stdlog,
5654 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5655 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5661 /* Rewrite instruction LDR Rt imm12 into:
5663 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5667 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5670 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5671 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5672 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5674 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5676 pc_val = pc_val & 0xfffffffc;
5678 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5679 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5683 dsc->u.ldst.xfersize = size;
5684 dsc->u.ldst.immed = 0;
5685 dsc->u.ldst.writeback = 0;
5686 dsc->u.ldst.restore_r4 = 0;
5688 /* LDR R0, R2, R3 */
5689 dsc->modinsn[0] = 0xf852;
5690 dsc->modinsn[1] = 0x3;
5693 dsc->cleanup = &cleanup_load;
5699 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5700 uint16_t insn2, struct regcache *regs,
5701 struct displaced_step_closure *dsc,
5702 int writeback, int immed)
5704 unsigned int rt = bits (insn2, 12, 15);
5705 unsigned int rn = bits (insn1, 0, 3);
5706 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5707 /* In LDR (register), there is also a register Rm, which is not allowed to
5708 be PC, so we don't have to check it. */
5710 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5711 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5714 if (debug_displaced)
5715 fprintf_unfiltered (gdb_stdlog,
5716 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5717 rt, rn, insn1, insn2);
5719 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5722 dsc->u.ldst.restore_r4 = 0;
5725 /* ldr[b]<cond> rt, [rn, #imm], etc.
5727 ldr[b]<cond> r0, [r2, #imm]. */
5729 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5730 dsc->modinsn[1] = insn2 & 0x0fff;
5733 /* ldr[b]<cond> rt, [rn, rm], etc.
5735 ldr[b]<cond> r0, [r2, r3]. */
5737 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5738 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5748 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5749 struct regcache *regs,
5750 struct displaced_step_closure *dsc,
5751 int load, int size, int usermode)
5753 int immed = !bit (insn, 25);
5754 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5755 unsigned int rt = bits (insn, 12, 15);
5756 unsigned int rn = bits (insn, 16, 19);
5757 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5759 if (!insn_references_pc (insn, 0x000ff00ful))
5760 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog,
5764 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5765 load ? (size == 1 ? "ldrb" : "ldr")
5766 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5768 (unsigned long) insn);
5770 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5771 usermode, rt, rm, rn);
5773 if (load || rt != ARM_PC_REGNUM)
5775 dsc->u.ldst.restore_r4 = 0;
5778 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5780 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5781 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5783 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5785 {ldr,str}[b]<cond> r0, [r2, r3]. */
5786 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5790 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5791 dsc->u.ldst.restore_r4 = 1;
5792 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5793 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5794 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5795 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5796 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5800 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5802 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5807 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5812 /* Cleanup LDM instructions with fully-populated register list. This is an
5813 unfortunate corner case: it's impossible to implement correctly by modifying
5814 the instruction. The issue is as follows: we have an instruction,
5818 which we must rewrite to avoid loading PC. A possible solution would be to
5819 do the load in two halves, something like (with suitable cleanup
5823 ldm[id][ab] r8!, {r0-r7}
5825 ldm[id][ab] r8, {r7-r14}
5828 but at present there's no suitable place for <temp>, since the scratch space
5829 is overwritten before the cleanup routine is called. For now, we simply
5830 emulate the instruction. */
5833 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5834 struct displaced_step_closure *dsc)
5836 int inc = dsc->u.block.increment;
5837 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5838 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5839 uint32_t regmask = dsc->u.block.regmask;
5840 int regno = inc ? 0 : 15;
5841 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5842 int exception_return = dsc->u.block.load && dsc->u.block.user
5843 && (regmask & 0x8000) != 0;
5844 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5845 int do_transfer = condition_true (dsc->u.block.cond, status);
5846 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5851 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5852 sensible we can do here. Complain loudly. */
5853 if (exception_return)
5854 error (_("Cannot single-step exception return"));
5856 /* We don't handle any stores here for now. */
5857 gdb_assert (dsc->u.block.load != 0);
5859 if (debug_displaced)
5860 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5861 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5862 dsc->u.block.increment ? "inc" : "dec",
5863 dsc->u.block.before ? "before" : "after");
5870 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5873 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5876 xfer_addr += bump_before;
5878 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5879 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5881 xfer_addr += bump_after;
5883 regmask &= ~(1 << regno);
5886 if (dsc->u.block.writeback)
5887 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5891 /* Clean up an STM which included the PC in the register list. */
5894 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5895 struct displaced_step_closure *dsc)
5897 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5898 int store_executed = condition_true (dsc->u.block.cond, status);
5899 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5900 CORE_ADDR stm_insn_addr;
5903 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5905 /* If condition code fails, there's nothing else to do. */
5906 if (!store_executed)
5909 if (dsc->u.block.increment)
5911 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5913 if (dsc->u.block.before)
5918 pc_stored_at = dsc->u.block.xfer_addr;
5920 if (dsc->u.block.before)
5924 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5925 stm_insn_addr = dsc->scratch_base;
5926 offset = pc_val - stm_insn_addr;
5928 if (debug_displaced)
5929 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5930 "STM instruction\n", offset);
5932 /* Rewrite the stored PC to the proper value for the non-displaced original
5934 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5935 dsc->insn_addr + offset);
5938 /* Clean up an LDM which includes the PC in the register list. We clumped all
5939 the registers in the transferred list into a contiguous range r0...rX (to
5940 avoid loading PC directly and losing control of the debugged program), so we
5941 must undo that here. */
5944 cleanup_block_load_pc (struct gdbarch *gdbarch,
5945 struct regcache *regs,
5946 struct displaced_step_closure *dsc)
5948 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5949 int load_executed = condition_true (dsc->u.block.cond, status);
5950 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5951 unsigned int regs_loaded = bitcount (mask);
5952 unsigned int num_to_shuffle = regs_loaded, clobbered;
5954 /* The method employed here will fail if the register list is fully populated
5955 (we need to avoid loading PC directly). */
5956 gdb_assert (num_to_shuffle < 16);
5961 clobbered = (1 << num_to_shuffle) - 1;
5963 while (num_to_shuffle > 0)
5965 if ((mask & (1 << write_reg)) != 0)
5967 unsigned int read_reg = num_to_shuffle - 1;
5969 if (read_reg != write_reg)
5971 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5972 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5973 if (debug_displaced)
5974 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5975 "loaded register r%d to r%d\n"), read_reg,
5978 else if (debug_displaced)
5979 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5980 "r%d already in the right place\n"),
5983 clobbered &= ~(1 << write_reg);
5991 /* Restore any registers we scribbled over. */
5992 for (write_reg = 0; clobbered != 0; write_reg++)
5994 if ((clobbered & (1 << write_reg)) != 0)
5996 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5998 if (debug_displaced)
5999 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6000 "clobbered register r%d\n"), write_reg);
6001 clobbered &= ~(1 << write_reg);
6005 /* Perform register writeback manually. */
6006 if (dsc->u.block.writeback)
6008 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6010 if (dsc->u.block.increment)
6011 new_rn_val += regs_loaded * 4;
6013 new_rn_val -= regs_loaded * 4;
6015 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6020 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6021 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6024 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6025 struct regcache *regs,
6026 struct displaced_step_closure *dsc)
6028 int load = bit (insn, 20);
6029 int user = bit (insn, 22);
6030 int increment = bit (insn, 23);
6031 int before = bit (insn, 24);
6032 int writeback = bit (insn, 21);
6033 int rn = bits (insn, 16, 19);
6035 /* Block transfers which don't mention PC can be run directly
6037 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6038 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6040 if (rn == ARM_PC_REGNUM)
6042 warning (_("displaced: Unpredictable LDM or STM with "
6043 "base register r15"));
6044 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6047 if (debug_displaced)
6048 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6049 "%.8lx\n", (unsigned long) insn);
6051 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6052 dsc->u.block.rn = rn;
6054 dsc->u.block.load = load;
6055 dsc->u.block.user = user;
6056 dsc->u.block.increment = increment;
6057 dsc->u.block.before = before;
6058 dsc->u.block.writeback = writeback;
6059 dsc->u.block.cond = bits (insn, 28, 31);
6061 dsc->u.block.regmask = insn & 0xffff;
6065 if ((insn & 0xffff) == 0xffff)
6067 /* LDM with a fully-populated register list. This case is
6068 particularly tricky. Implement for now by fully emulating the
6069 instruction (which might not behave perfectly in all cases, but
6070 these instructions should be rare enough for that not to matter
6072 dsc->modinsn[0] = ARM_NOP;
6074 dsc->cleanup = &cleanup_block_load_all;
6078 /* LDM of a list of registers which includes PC. Implement by
6079 rewriting the list of registers to be transferred into a
6080 contiguous chunk r0...rX before doing the transfer, then shuffling
6081 registers into the correct places in the cleanup routine. */
6082 unsigned int regmask = insn & 0xffff;
6083 unsigned int num_in_list = bitcount (regmask), new_regmask;
6086 for (i = 0; i < num_in_list; i++)
6087 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6089 /* Writeback makes things complicated. We need to avoid clobbering
6090 the base register with one of the registers in our modified
6091 register list, but just using a different register can't work in
6094 ldm r14!, {r0-r13,pc}
6096 which would need to be rewritten as:
6100 but that can't work, because there's no free register for N.
6102 Solve this by turning off the writeback bit, and emulating
6103 writeback manually in the cleanup routine. */
6108 new_regmask = (1 << num_in_list) - 1;
6110 if (debug_displaced)
6111 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6112 "{..., pc}: original reg list %.4x, modified "
6113 "list %.4x\n"), rn, writeback ? "!" : "",
6114 (int) insn & 0xffff, new_regmask);
6116 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6118 dsc->cleanup = &cleanup_block_load_pc;
6123 /* STM of a list of registers which includes PC. Run the instruction
6124 as-is, but out of line: this will store the wrong value for the PC,
6125 so we must manually fix up the memory in the cleanup routine.
6126 Doing things this way has the advantage that we can auto-detect
6127 the offset of the PC write (which is architecture-dependent) in
6128 the cleanup routine. */
6129 dsc->modinsn[0] = insn;
6131 dsc->cleanup = &cleanup_block_store_pc;
6138 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6139 struct regcache *regs,
6140 struct displaced_step_closure *dsc)
6142 int rn = bits (insn1, 0, 3);
6143 int load = bit (insn1, 4);
6144 int writeback = bit (insn1, 5);
6146 /* Block transfers which don't mention PC can be run directly
6148 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6149 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6151 if (rn == ARM_PC_REGNUM)
6153 warning (_("displaced: Unpredictable LDM or STM with "
6154 "base register r15"));
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6156 "unpredictable ldm/stm", dsc);
6159 if (debug_displaced)
6160 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6161 "%.4x%.4x\n", insn1, insn2);
6163 /* Clear bit 13, since it should be always zero. */
6164 dsc->u.block.regmask = (insn2 & 0xdfff);
6165 dsc->u.block.rn = rn;
6167 dsc->u.block.load = load;
6168 dsc->u.block.user = 0;
6169 dsc->u.block.increment = bit (insn1, 7);
6170 dsc->u.block.before = bit (insn1, 8);
6171 dsc->u.block.writeback = writeback;
6172 dsc->u.block.cond = INST_AL;
6173 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6177 if (dsc->u.block.regmask == 0xffff)
6179 /* This branch is impossible to happen. */
6184 unsigned int regmask = dsc->u.block.regmask;
6185 unsigned int num_in_list = bitcount (regmask), new_regmask;
6188 for (i = 0; i < num_in_list; i++)
6189 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6194 new_regmask = (1 << num_in_list) - 1;
6196 if (debug_displaced)
6197 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6198 "{..., pc}: original reg list %.4x, modified "
6199 "list %.4x\n"), rn, writeback ? "!" : "",
6200 (int) dsc->u.block.regmask, new_regmask);
6202 dsc->modinsn[0] = insn1;
6203 dsc->modinsn[1] = (new_regmask & 0xffff);
6206 dsc->cleanup = &cleanup_block_load_pc;
6211 dsc->modinsn[0] = insn1;
6212 dsc->modinsn[1] = insn2;
6214 dsc->cleanup = &cleanup_block_store_pc;
6219 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6220 This is used to avoid a dependency on BFD's bfd_endian enum. */
6223 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6226 return read_memory_unsigned_integer (memaddr, len,
6227 (enum bfd_endian) byte_order);
6230 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6233 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6236 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6239 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6247 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6250 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6252 return arm_is_thumb (self->regcache);
6255 /* single_step() is called just before we want to resume the inferior,
6256 if we want to single-step it but there is no hardware or kernel
6257 single-step support. We find the target of the coming instructions
6258 and breakpoint them. */
6260 std::vector<CORE_ADDR>
6261 arm_software_single_step (struct regcache *regcache)
6263 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6264 struct arm_get_next_pcs next_pcs_ctx;
6266 arm_get_next_pcs_ctor (&next_pcs_ctx,
6267 &arm_get_next_pcs_ops,
6268 gdbarch_byte_order (gdbarch),
6269 gdbarch_byte_order_for_code (gdbarch),
6273 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6275 for (CORE_ADDR &pc_ref : next_pcs)
6276 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6281 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6282 for Linux, where some SVC instructions must be treated specially. */
6285 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6286 struct displaced_step_closure *dsc)
6288 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6290 if (debug_displaced)
6291 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6292 "%.8lx\n", (unsigned long) resume_addr);
6294 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6298 /* Common copy routine for svc instruciton. */
6301 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6302 struct displaced_step_closure *dsc)
6304 /* Preparation: none.
6305 Insn: unmodified svc.
6306 Cleanup: pc <- insn_addr + insn_size. */
6308 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6310 dsc->wrote_to_pc = 1;
6312 /* Allow OS-specific code to override SVC handling. */
6313 if (dsc->u.svc.copy_svc_os)
6314 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6317 dsc->cleanup = &cleanup_svc;
6323 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6324 struct regcache *regs, struct displaced_step_closure *dsc)
6327 if (debug_displaced)
6328 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6329 (unsigned long) insn);
6331 dsc->modinsn[0] = insn;
6333 return install_svc (gdbarch, regs, dsc);
6337 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6338 struct regcache *regs, struct displaced_step_closure *dsc)
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6345 dsc->modinsn[0] = insn;
6347 return install_svc (gdbarch, regs, dsc);
6350 /* Copy undefined instructions. */
6353 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6354 struct displaced_step_closure *dsc)
6356 if (debug_displaced)
6357 fprintf_unfiltered (gdb_stdlog,
6358 "displaced: copying undefined insn %.8lx\n",
6359 (unsigned long) insn);
6361 dsc->modinsn[0] = insn;
6367 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6368 struct displaced_step_closure *dsc)
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6373 "%.4x %.4x\n", (unsigned short) insn1,
6374 (unsigned short) insn2);
6376 dsc->modinsn[0] = insn1;
6377 dsc->modinsn[1] = insn2;
6383 /* Copy unpredictable instructions. */
6386 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6387 struct displaced_step_closure *dsc)
6389 if (debug_displaced)
6390 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6391 "%.8lx\n", (unsigned long) insn);
6393 dsc->modinsn[0] = insn;
6398 /* The decode_* functions are instruction decoding helpers. They mostly follow
6399 the presentation in the ARM ARM. */
6402 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6403 struct regcache *regs,
6404 struct displaced_step_closure *dsc)
6406 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6407 unsigned int rn = bits (insn, 16, 19);
6409 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6410 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6411 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6412 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6413 else if ((op1 & 0x60) == 0x20)
6414 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6415 else if ((op1 & 0x71) == 0x40)
6416 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6418 else if ((op1 & 0x77) == 0x41)
6419 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6420 else if ((op1 & 0x77) == 0x45)
6421 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6422 else if ((op1 & 0x77) == 0x51)
6425 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6427 return arm_copy_unpred (gdbarch, insn, dsc);
6429 else if ((op1 & 0x77) == 0x55)
6430 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6431 else if (op1 == 0x57)
6434 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6435 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6436 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6437 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6438 default: return arm_copy_unpred (gdbarch, insn, dsc);
6440 else if ((op1 & 0x63) == 0x43)
6441 return arm_copy_unpred (gdbarch, insn, dsc);
6442 else if ((op2 & 0x1) == 0x0)
6443 switch (op1 & ~0x80)
6446 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6448 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6449 case 0x71: case 0x75:
6451 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6452 case 0x63: case 0x67: case 0x73: case 0x77:
6453 return arm_copy_unpred (gdbarch, insn, dsc);
6455 return arm_copy_undef (gdbarch, insn, dsc);
6458 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6462 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6463 struct regcache *regs,
6464 struct displaced_step_closure *dsc)
6466 if (bit (insn, 27) == 0)
6467 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6468 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6469 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6472 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6475 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6477 case 0x4: case 0x5: case 0x6: case 0x7:
6478 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6481 switch ((insn & 0xe00000) >> 21)
6483 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6485 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6488 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6491 return arm_copy_undef (gdbarch, insn, dsc);
6496 int rn_f = (bits (insn, 16, 19) == 0xf);
6497 switch ((insn & 0xe00000) >> 21)
6500 /* ldc/ldc2 imm (undefined for rn == pc). */
6501 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6502 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6505 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6507 case 0x4: case 0x5: case 0x6: case 0x7:
6508 /* ldc/ldc2 lit (undefined for rn != pc). */
6509 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6510 : arm_copy_undef (gdbarch, insn, dsc);
6513 return arm_copy_undef (gdbarch, insn, dsc);
6518 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6521 if (bits (insn, 16, 19) == 0xf)
6523 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6525 return arm_copy_undef (gdbarch, insn, dsc);
6529 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6531 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6535 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6537 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6540 return arm_copy_undef (gdbarch, insn, dsc);
6544 /* Decode miscellaneous instructions in dp/misc encoding space. */
6547 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6548 struct regcache *regs,
6549 struct displaced_step_closure *dsc)
6551 unsigned int op2 = bits (insn, 4, 6);
6552 unsigned int op = bits (insn, 21, 22);
6557 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6560 if (op == 0x1) /* bx. */
6561 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6563 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6565 return arm_copy_undef (gdbarch, insn, dsc);
6569 /* Not really supported. */
6570 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6572 return arm_copy_undef (gdbarch, insn, dsc);
6576 return arm_copy_bx_blx_reg (gdbarch, insn,
6577 regs, dsc); /* blx register. */
6579 return arm_copy_undef (gdbarch, insn, dsc);
6582 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6586 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6588 /* Not really supported. */
6589 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6592 return arm_copy_undef (gdbarch, insn, dsc);
6597 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6598 struct regcache *regs,
6599 struct displaced_step_closure *dsc)
6602 switch (bits (insn, 20, 24))
6605 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6608 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6610 case 0x12: case 0x16:
6611 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6614 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6618 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6620 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6621 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6622 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6623 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6624 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6625 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6626 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6627 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6628 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6629 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6630 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6631 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6632 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6633 /* 2nd arg means "unprivileged". */
6634 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6638 /* Should be unreachable. */
6643 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6644 struct regcache *regs,
6645 struct displaced_step_closure *dsc)
6647 int a = bit (insn, 25), b = bit (insn, 4);
6648 uint32_t op1 = bits (insn, 20, 24);
6650 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6651 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6652 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6653 else if ((!a && (op1 & 0x17) == 0x02)
6654 || (a && (op1 & 0x17) == 0x02 && !b))
6655 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6656 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6657 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6658 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6659 else if ((!a && (op1 & 0x17) == 0x03)
6660 || (a && (op1 & 0x17) == 0x03 && !b))
6661 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6662 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6663 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6664 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6665 else if ((!a && (op1 & 0x17) == 0x06)
6666 || (a && (op1 & 0x17) == 0x06 && !b))
6667 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6668 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6669 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6671 else if ((!a && (op1 & 0x17) == 0x07)
6672 || (a && (op1 & 0x17) == 0x07 && !b))
6673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6675 /* Should be unreachable. */
6680 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6681 struct displaced_step_closure *dsc)
6683 switch (bits (insn, 20, 24))
6685 case 0x00: case 0x01: case 0x02: case 0x03:
6686 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6688 case 0x04: case 0x05: case 0x06: case 0x07:
6689 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6691 case 0x08: case 0x09: case 0x0a: case 0x0b:
6692 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6693 return arm_copy_unmodified (gdbarch, insn,
6694 "decode/pack/unpack/saturate/reverse", dsc);
6697 if (bits (insn, 5, 7) == 0) /* op2. */
6699 if (bits (insn, 12, 15) == 0xf)
6700 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6702 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6705 return arm_copy_undef (gdbarch, insn, dsc);
6707 case 0x1a: case 0x1b:
6708 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6709 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6711 return arm_copy_undef (gdbarch, insn, dsc);
6713 case 0x1c: case 0x1d:
6714 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6716 if (bits (insn, 0, 3) == 0xf)
6717 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6719 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6722 return arm_copy_undef (gdbarch, insn, dsc);
6724 case 0x1e: case 0x1f:
6725 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6726 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6728 return arm_copy_undef (gdbarch, insn, dsc);
6731 /* Should be unreachable. */
6736 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6737 struct regcache *regs,
6738 struct displaced_step_closure *dsc)
6741 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6743 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6747 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6748 struct regcache *regs,
6749 struct displaced_step_closure *dsc)
6751 unsigned int opcode = bits (insn, 20, 24);
6755 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6756 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6758 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6759 case 0x12: case 0x16:
6760 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6762 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6763 case 0x13: case 0x17:
6764 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6766 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6767 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6768 /* Note: no writeback for these instructions. Bit 25 will always be
6769 zero though (via caller), so the following works OK. */
6770 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6773 /* Should be unreachable. */
6777 /* Decode shifted register instructions. */
6780 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6781 uint16_t insn2, struct regcache *regs,
6782 struct displaced_step_closure *dsc)
6784 /* PC is only allowed to be used in instruction MOV. */
6786 unsigned int op = bits (insn1, 5, 8);
6787 unsigned int rn = bits (insn1, 0, 3);
6789 if (op == 0x2 && rn == 0xf) /* MOV */
6790 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6792 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6793 "dp (shift reg)", dsc);
6797 /* Decode extension register load/store. Exactly the same as
6798 arm_decode_ext_reg_ld_st. */
6801 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6802 uint16_t insn2, struct regcache *regs,
6803 struct displaced_step_closure *dsc)
6805 unsigned int opcode = bits (insn1, 4, 8);
6809 case 0x04: case 0x05:
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "vfp/neon vmov", dsc);
6813 case 0x08: case 0x0c: /* 01x00 */
6814 case 0x0a: case 0x0e: /* 01x10 */
6815 case 0x12: case 0x16: /* 10x10 */
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 "vfp/neon vstm/vpush", dsc);
6819 case 0x09: case 0x0d: /* 01x01 */
6820 case 0x0b: case 0x0f: /* 01x11 */
6821 case 0x13: case 0x17: /* 10x11 */
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6823 "vfp/neon vldm/vpop", dsc);
6825 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6828 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6829 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6832 /* Should be unreachable. */
6837 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6838 struct regcache *regs, struct displaced_step_closure *dsc)
6840 unsigned int op1 = bits (insn, 20, 25);
6841 int op = bit (insn, 4);
6842 unsigned int coproc = bits (insn, 8, 11);
6844 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6845 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6846 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6847 && (coproc & 0xe) != 0xa)
6849 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6850 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6851 && (coproc & 0xe) != 0xa)
6852 /* ldc/ldc2 imm/lit. */
6853 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6854 else if ((op1 & 0x3e) == 0x00)
6855 return arm_copy_undef (gdbarch, insn, dsc);
6856 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6857 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6858 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6859 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6860 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6861 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6862 else if ((op1 & 0x30) == 0x20 && !op)
6864 if ((coproc & 0xe) == 0xa)
6865 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6867 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6869 else if ((op1 & 0x30) == 0x20 && op)
6870 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6871 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6872 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6873 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6874 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6875 else if ((op1 & 0x30) == 0x30)
6876 return arm_copy_svc (gdbarch, insn, regs, dsc);
6878 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6882 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6883 uint16_t insn2, struct regcache *regs,
6884 struct displaced_step_closure *dsc)
6886 unsigned int coproc = bits (insn2, 8, 11);
6887 unsigned int bit_5_8 = bits (insn1, 5, 8);
6888 unsigned int bit_9 = bit (insn1, 9);
6889 unsigned int bit_4 = bit (insn1, 4);
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6897 else if (bit_5_8 == 0) /* UNDEFINED. */
6898 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6901 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6902 if ((coproc & 0xe) == 0xa)
6903 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6905 else /* coproc is not 101x. */
6907 if (bit_4 == 0) /* STC/STC2. */
6908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6910 else /* LDC/LDC2 {literal, immeidate}. */
6911 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6917 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6923 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6924 struct displaced_step_closure *dsc, int rd)
6930 Preparation: Rd <- PC
6936 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6937 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6941 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc,
6943 int rd, unsigned int imm)
6946 /* Encoding T2: ADDS Rd, #imm */
6947 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6949 install_pc_relative (gdbarch, regs, dsc, rd);
6955 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6956 struct regcache *regs,
6957 struct displaced_step_closure *dsc)
6959 unsigned int rd = bits (insn, 8, 10);
6960 unsigned int imm8 = bits (insn, 0, 7);
6962 if (debug_displaced)
6963 fprintf_unfiltered (gdb_stdlog,
6964 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6967 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6971 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6972 uint16_t insn2, struct regcache *regs,
6973 struct displaced_step_closure *dsc)
6975 unsigned int rd = bits (insn2, 8, 11);
6976 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6977 extract raw immediate encoding rather than computing immediate. When
6978 generating ADD or SUB instruction, we can simply perform OR operation to
6979 set immediate into ADD. */
6980 unsigned int imm_3_8 = insn2 & 0x70ff;
6981 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6983 if (debug_displaced)
6984 fprintf_unfiltered (gdb_stdlog,
6985 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6986 rd, imm_i, imm_3_8, insn1, insn2);
6988 if (bit (insn1, 7)) /* Encoding T2 */
6990 /* Encoding T3: SUB Rd, Rd, #imm */
6991 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6992 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6994 else /* Encoding T3 */
6996 /* Encoding T3: ADD Rd, Rd, #imm */
6997 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6998 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7002 install_pc_relative (gdbarch, regs, dsc, rd);
7008 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7009 struct regcache *regs,
7010 struct displaced_step_closure *dsc)
7012 unsigned int rt = bits (insn1, 8, 10);
7014 int imm8 = (bits (insn1, 0, 7) << 2);
7020 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7022 Insn: LDR R0, [R2, R3];
7023 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog,
7027 "displaced: copying thumb ldr r%d [pc #%d]\n"
7030 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7031 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7032 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7033 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7034 /* The assembler calculates the required value of the offset from the
7035 Align(PC,4) value of this instruction to the label. */
7036 pc = pc & 0xfffffffc;
7038 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7039 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7042 dsc->u.ldst.xfersize = 4;
7044 dsc->u.ldst.immed = 0;
7045 dsc->u.ldst.writeback = 0;
7046 dsc->u.ldst.restore_r4 = 0;
7048 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7050 dsc->cleanup = &cleanup_load;
7055 /* Copy Thumb cbnz/cbz insruction. */
7058 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7059 struct regcache *regs,
7060 struct displaced_step_closure *dsc)
7062 int non_zero = bit (insn1, 11);
7063 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7064 CORE_ADDR from = dsc->insn_addr;
7065 int rn = bits (insn1, 0, 2);
7066 int rn_val = displaced_read_reg (regs, dsc, rn);
7068 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7069 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7070 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7071 condition is false, let it be, cleanup_branch will do nothing. */
7072 if (dsc->u.branch.cond)
7074 dsc->u.branch.cond = INST_AL;
7075 dsc->u.branch.dest = from + 4 + imm5;
7078 dsc->u.branch.dest = from + 2;
7080 dsc->u.branch.link = 0;
7081 dsc->u.branch.exchange = 0;
7083 if (debug_displaced)
7084 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7085 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7086 rn, rn_val, insn1, dsc->u.branch.dest);
7088 dsc->modinsn[0] = THUMB_NOP;
7090 dsc->cleanup = &cleanup_branch;
7094 /* Copy Table Branch Byte/Halfword */
7096 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7097 uint16_t insn2, struct regcache *regs,
7098 struct displaced_step_closure *dsc)
7100 ULONGEST rn_val, rm_val;
7101 int is_tbh = bit (insn2, 4);
7102 CORE_ADDR halfwords = 0;
7103 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7105 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7106 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7112 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7113 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7119 target_read_memory (rn_val + rm_val, buf, 1);
7120 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7123 if (debug_displaced)
7124 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7125 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7126 (unsigned int) rn_val, (unsigned int) rm_val,
7127 (unsigned int) halfwords);
7129 dsc->u.branch.cond = INST_AL;
7130 dsc->u.branch.link = 0;
7131 dsc->u.branch.exchange = 0;
7132 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7134 dsc->cleanup = &cleanup_branch;
7140 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7141 struct displaced_step_closure *dsc)
7144 int val = displaced_read_reg (regs, dsc, 7);
7145 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7148 val = displaced_read_reg (regs, dsc, 8);
7149 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7152 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7157 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7158 struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7161 dsc->u.block.regmask = insn1 & 0x00ff;
7163 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7166 (1) register list is full, that is, r0-r7 are used.
7167 Prepare: tmp[0] <- r8
7169 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7170 MOV r8, r7; Move value of r7 to r8;
7171 POP {r7}; Store PC value into r7.
7173 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7175 (2) register list is not full, supposing there are N registers in
7176 register list (except PC, 0 <= N <= 7).
7177 Prepare: for each i, 0 - N, tmp[i] <- ri.
7179 POP {r0, r1, ...., rN};
7181 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7182 from tmp[] properly.
7184 if (debug_displaced)
7185 fprintf_unfiltered (gdb_stdlog,
7186 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7187 dsc->u.block.regmask, insn1);
7189 if (dsc->u.block.regmask == 0xff)
7191 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7193 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7194 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7195 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7198 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7202 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7204 unsigned int new_regmask;
7206 for (i = 0; i < num_in_list + 1; i++)
7207 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7209 new_regmask = (1 << (num_in_list + 1)) - 1;
7211 if (debug_displaced)
7212 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7213 "{..., pc}: original reg list %.4x,"
7214 " modified list %.4x\n"),
7215 (int) dsc->u.block.regmask, new_regmask);
7217 dsc->u.block.regmask |= 0x8000;
7218 dsc->u.block.writeback = 0;
7219 dsc->u.block.cond = INST_AL;
7221 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7223 dsc->cleanup = &cleanup_block_load_pc;
7230 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7231 struct regcache *regs,
7232 struct displaced_step_closure *dsc)
7234 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7235 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7238 /* 16-bit thumb instructions. */
7239 switch (op_bit_12_15)
7241 /* Shift (imme), add, subtract, move and compare. */
7242 case 0: case 1: case 2: case 3:
7243 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7244 "shift/add/sub/mov/cmp",
7248 switch (op_bit_10_11)
7250 case 0: /* Data-processing */
7251 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7255 case 1: /* Special data instructions and branch and exchange. */
7257 unsigned short op = bits (insn1, 7, 9);
7258 if (op == 6 || op == 7) /* BX or BLX */
7259 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7260 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7261 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7263 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7267 default: /* LDR (literal) */
7268 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7271 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7272 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7275 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7276 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7277 else /* Generate SP-relative address */
7278 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7280 case 11: /* Misc 16-bit instructions */
7282 switch (bits (insn1, 8, 11))
7284 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7285 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7287 case 12: case 13: /* POP */
7288 if (bit (insn1, 8)) /* PC is in register list. */
7289 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7293 case 15: /* If-Then, and hints */
7294 if (bits (insn1, 0, 3))
7295 /* If-Then makes up to four following instructions conditional.
7296 IT instruction itself is not conditional, so handle it as a
7297 common unmodified instruction. */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7301 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7304 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7309 if (op_bit_10_11 < 2) /* Store multiple registers */
7310 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7311 else /* Load multiple registers */
7312 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7314 case 13: /* Conditional branch and supervisor call */
7315 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7316 err = thumb_copy_b (gdbarch, insn1, dsc);
7318 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7320 case 14: /* Unconditional branch */
7321 err = thumb_copy_b (gdbarch, insn1, dsc);
7328 internal_error (__FILE__, __LINE__,
7329 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7333 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7334 uint16_t insn1, uint16_t insn2,
7335 struct regcache *regs,
7336 struct displaced_step_closure *dsc)
7338 int rt = bits (insn2, 12, 15);
7339 int rn = bits (insn1, 0, 3);
7340 int op1 = bits (insn1, 7, 8);
7342 switch (bits (insn1, 5, 6))
7344 case 0: /* Load byte and memory hints */
7345 if (rt == 0xf) /* PLD/PLI */
7348 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7349 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7356 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7357 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7360 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7361 "ldrb{reg, immediate}/ldrbt",
7366 case 1: /* Load halfword and memory hints. */
7367 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7368 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7369 "pld/unalloc memhint", dsc);
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7380 case 2: /* Load word */
7382 int insn2_bit_8_11 = bits (insn2, 8, 11);
7385 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7386 else if (op1 == 0x1) /* Encoding T3 */
7387 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7389 else /* op1 == 0x0 */
7391 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7392 /* LDR (immediate) */
7393 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7394 dsc, bit (insn2, 8), 1);
7395 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7399 /* LDR (register) */
7400 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7406 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7413 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7414 uint16_t insn2, struct regcache *regs,
7415 struct displaced_step_closure *dsc)
7418 unsigned short op = bit (insn2, 15);
7419 unsigned int op1 = bits (insn1, 11, 12);
7425 switch (bits (insn1, 9, 10))
7430 /* Load/store {dual, execlusive}, table branch. */
7431 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7432 && bits (insn2, 5, 7) == 0)
7433 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7436 /* PC is not allowed to use in load/store {dual, exclusive}
7438 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7439 "load/store dual/ex", dsc);
7441 else /* load/store multiple */
7443 switch (bits (insn1, 7, 8))
7445 case 0: case 3: /* SRS, RFE */
7446 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7449 case 1: case 2: /* LDM/STM/PUSH/POP */
7450 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7457 /* Data-processing (shift register). */
7458 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7461 default: /* Coprocessor instructions. */
7462 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7467 case 2: /* op1 = 2 */
7468 if (op) /* Branch and misc control. */
7470 if (bit (insn2, 14) /* BLX/BL */
7471 || bit (insn2, 12) /* Unconditional branch */
7472 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7473 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7480 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7482 int op = bits (insn1, 4, 8);
7483 int rn = bits (insn1, 0, 3);
7484 if ((op == 0 || op == 0xa) && rn == 0xf)
7485 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7488 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7491 else /* Data processing (modified immeidate) */
7492 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7496 case 3: /* op1 = 3 */
7497 switch (bits (insn1, 9, 10))
7501 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7503 else /* NEON Load/Store and Store single data item */
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "neon elt/struct load/store",
7508 case 1: /* op1 = 3, bits (9, 10) == 1 */
7509 switch (bits (insn1, 7, 8))
7511 case 0: case 1: /* Data processing (register) */
7512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7515 case 2: /* Multiply and absolute difference */
7516 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7517 "mul/mua/diff", dsc);
7519 case 3: /* Long multiply and divide */
7520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7525 default: /* Coprocessor instructions */
7526 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7535 internal_error (__FILE__, __LINE__,
7536 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7541 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7542 struct regcache *regs,
7543 struct displaced_step_closure *dsc)
7545 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7547 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7549 if (debug_displaced)
7550 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7551 "at %.8lx\n", insn1, (unsigned long) from);
7554 dsc->insn_size = thumb_insn_size (insn1);
7555 if (thumb_insn_size (insn1) == 4)
7558 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7559 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7562 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7566 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7567 CORE_ADDR to, struct regcache *regs,
7568 struct displaced_step_closure *dsc)
7571 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7574 /* Most displaced instructions use a 1-instruction scratch space, so set this
7575 here and override below if/when necessary. */
7577 dsc->insn_addr = from;
7578 dsc->scratch_base = to;
7579 dsc->cleanup = NULL;
7580 dsc->wrote_to_pc = 0;
7582 if (!displaced_in_arm_mode (regs))
7583 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7587 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7588 if (debug_displaced)
7589 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7590 "at %.8lx\n", (unsigned long) insn,
7591 (unsigned long) from);
7593 if ((insn & 0xf0000000) == 0xf0000000)
7594 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7595 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7597 case 0x0: case 0x1: case 0x2: case 0x3:
7598 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7601 case 0x4: case 0x5: case 0x6:
7602 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7606 err = arm_decode_media (gdbarch, insn, dsc);
7609 case 0x8: case 0x9: case 0xa: case 0xb:
7610 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7613 case 0xc: case 0xd: case 0xe: case 0xf:
7614 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7619 internal_error (__FILE__, __LINE__,
7620 _("arm_process_displaced_insn: Instruction decode error"));
7623 /* Actually set up the scratch space for a displaced instruction. */
7626 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7627 CORE_ADDR to, struct displaced_step_closure *dsc)
7629 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7630 unsigned int i, len, offset;
7631 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7632 int size = dsc->is_thumb? 2 : 4;
7633 const gdb_byte *bkp_insn;
7636 /* Poke modified instruction(s). */
7637 for (i = 0; i < dsc->numinsns; i++)
7639 if (debug_displaced)
7641 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7643 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7646 fprintf_unfiltered (gdb_stdlog, "%.4x",
7647 (unsigned short)dsc->modinsn[i]);
7649 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7650 (unsigned long) to + offset);
7653 write_memory_unsigned_integer (to + offset, size,
7654 byte_order_for_code,
7659 /* Choose the correct breakpoint instruction. */
7662 bkp_insn = tdep->thumb_breakpoint;
7663 len = tdep->thumb_breakpoint_size;
7667 bkp_insn = tdep->arm_breakpoint;
7668 len = tdep->arm_breakpoint_size;
7671 /* Put breakpoint afterwards. */
7672 write_memory (to + offset, bkp_insn, len);
7674 if (debug_displaced)
7675 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7676 paddress (gdbarch, from), paddress (gdbarch, to));
7679 /* Entry point for cleaning things up after a displaced instruction has been
7683 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7684 struct displaced_step_closure *dsc,
7685 CORE_ADDR from, CORE_ADDR to,
7686 struct regcache *regs)
7689 dsc->cleanup (gdbarch, regs, dsc);
7691 if (!dsc->wrote_to_pc)
7692 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7693 dsc->insn_addr + dsc->insn_size);
7697 #include "bfd-in2.h"
7698 #include "libcoff.h"
7701 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7703 gdb_disassembler *di
7704 = static_cast<gdb_disassembler *>(info->application_data);
7705 struct gdbarch *gdbarch = di->arch ();
7707 if (arm_pc_is_thumb (gdbarch, memaddr))
7709 static asymbol *asym;
7710 static combined_entry_type ce;
7711 static struct coff_symbol_struct csym;
7712 static struct bfd fake_bfd;
7713 static bfd_target fake_target;
7715 if (csym.native == NULL)
7717 /* Create a fake symbol vector containing a Thumb symbol.
7718 This is solely so that the code in print_insn_little_arm()
7719 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7720 the presence of a Thumb symbol and switch to decoding
7721 Thumb instructions. */
7723 fake_target.flavour = bfd_target_coff_flavour;
7724 fake_bfd.xvec = &fake_target;
7725 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7727 csym.symbol.the_bfd = &fake_bfd;
7728 csym.symbol.name = "fake";
7729 asym = (asymbol *) & csym;
7732 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7733 info->symbols = &asym;
7736 info->symbols = NULL;
7738 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7739 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7740 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7741 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7742 in default_print_insn. */
7743 if (exec_bfd != NULL)
7744 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7746 return default_print_insn (memaddr, info);
7749 /* The following define instruction sequences that will cause ARM
7750 cpu's to take an undefined instruction trap. These are used to
7751 signal a breakpoint to GDB.
7753 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7754 modes. A different instruction is required for each mode. The ARM
7755 cpu's can also be big or little endian. Thus four different
7756 instructions are needed to support all cases.
7758 Note: ARMv4 defines several new instructions that will take the
7759 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7760 not in fact add the new instructions. The new undefined
7761 instructions in ARMv4 are all instructions that had no defined
7762 behaviour in earlier chips. There is no guarantee that they will
7763 raise an exception, but may be treated as NOP's. In practice, it
7764 may only safe to rely on instructions matching:
7766 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7767 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7768 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7770 Even this may only true if the condition predicate is true. The
7771 following use a condition predicate of ALWAYS so it is always TRUE.
7773 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7774 and NetBSD all use a software interrupt rather than an undefined
7775 instruction to force a trap. This can be handled by by the
7776 abi-specific code during establishment of the gdbarch vector. */
7778 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7779 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7780 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7781 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7783 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7784 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7785 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7786 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7788 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7791 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7793 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7794 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7796 if (arm_pc_is_thumb (gdbarch, *pcptr))
7798 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7800 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7801 check whether we are replacing a 32-bit instruction. */
7802 if (tdep->thumb2_breakpoint != NULL)
7806 if (target_read_memory (*pcptr, buf, 2) == 0)
7808 unsigned short inst1;
7810 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7811 if (thumb_insn_size (inst1) == 4)
7812 return ARM_BP_KIND_THUMB2;
7816 return ARM_BP_KIND_THUMB;
7819 return ARM_BP_KIND_ARM;
7823 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7825 static const gdb_byte *
7826 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7828 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7832 case ARM_BP_KIND_ARM:
7833 *size = tdep->arm_breakpoint_size;
7834 return tdep->arm_breakpoint;
7835 case ARM_BP_KIND_THUMB:
7836 *size = tdep->thumb_breakpoint_size;
7837 return tdep->thumb_breakpoint;
7838 case ARM_BP_KIND_THUMB2:
7839 *size = tdep->thumb2_breakpoint_size;
7840 return tdep->thumb2_breakpoint;
7842 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7846 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7849 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7850 struct regcache *regcache,
7855 /* Check the memory pointed by PC is readable. */
7856 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7858 struct arm_get_next_pcs next_pcs_ctx;
7860 arm_get_next_pcs_ctor (&next_pcs_ctx,
7861 &arm_get_next_pcs_ops,
7862 gdbarch_byte_order (gdbarch),
7863 gdbarch_byte_order_for_code (gdbarch),
7867 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7869 /* If MEMADDR is the next instruction of current pc, do the
7870 software single step computation, and get the thumb mode by
7871 the destination address. */
7872 for (CORE_ADDR pc : next_pcs)
7874 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7876 if (IS_THUMB_ADDR (pc))
7878 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7879 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7882 return ARM_BP_KIND_ARM;
7887 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7890 /* Extract from an array REGBUF containing the (raw) register state a
7891 function return value of type TYPE, and copy that, in virtual
7892 format, into VALBUF. */
7895 arm_extract_return_value (struct type *type, struct regcache *regs,
7898 struct gdbarch *gdbarch = get_regcache_arch (regs);
7899 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7901 if (TYPE_CODE_FLT == TYPE_CODE (type))
7903 switch (gdbarch_tdep (gdbarch)->fp_model)
7907 /* The value is in register F0 in internal format. We need to
7908 extract the raw value and then convert it to the desired
7910 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7912 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7913 convert_typed_floating (tmpbuf, arm_ext_type (gdbarch),
7918 case ARM_FLOAT_SOFT_FPA:
7919 case ARM_FLOAT_SOFT_VFP:
7920 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7921 not using the VFP ABI code. */
7923 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7924 if (TYPE_LENGTH (type) > 4)
7925 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7926 valbuf + INT_REGISTER_SIZE);
7930 internal_error (__FILE__, __LINE__,
7931 _("arm_extract_return_value: "
7932 "Floating point model not supported"));
7936 else if (TYPE_CODE (type) == TYPE_CODE_INT
7937 || TYPE_CODE (type) == TYPE_CODE_CHAR
7938 || TYPE_CODE (type) == TYPE_CODE_BOOL
7939 || TYPE_CODE (type) == TYPE_CODE_PTR
7940 || TYPE_IS_REFERENCE (type)
7941 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7943 /* If the type is a plain integer, then the access is
7944 straight-forward. Otherwise we have to play around a bit
7946 int len = TYPE_LENGTH (type);
7947 int regno = ARM_A1_REGNUM;
7952 /* By using store_unsigned_integer we avoid having to do
7953 anything special for small big-endian values. */
7954 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7955 store_unsigned_integer (valbuf,
7956 (len > INT_REGISTER_SIZE
7957 ? INT_REGISTER_SIZE : len),
7959 len -= INT_REGISTER_SIZE;
7960 valbuf += INT_REGISTER_SIZE;
7965 /* For a structure or union the behaviour is as if the value had
7966 been stored to word-aligned memory and then loaded into
7967 registers with 32-bit load instruction(s). */
7968 int len = TYPE_LENGTH (type);
7969 int regno = ARM_A1_REGNUM;
7970 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7974 regcache_cooked_read (regs, regno++, tmpbuf);
7975 memcpy (valbuf, tmpbuf,
7976 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7977 len -= INT_REGISTER_SIZE;
7978 valbuf += INT_REGISTER_SIZE;
7984 /* Will a function return an aggregate type in memory or in a
7985 register? Return 0 if an aggregate type can be returned in a
7986 register, 1 if it must be returned in memory. */
7989 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7991 enum type_code code;
7993 type = check_typedef (type);
7995 /* Simple, non-aggregate types (ie not including vectors and
7996 complex) are always returned in a register (or registers). */
7997 code = TYPE_CODE (type);
7998 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7999 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8002 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8004 /* Vector values should be returned using ARM registers if they
8005 are not over 16 bytes. */
8006 return (TYPE_LENGTH (type) > 16);
8009 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8011 /* The AAPCS says all aggregates not larger than a word are returned
8013 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8022 /* All aggregate types that won't fit in a register must be returned
8024 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8027 /* In the ARM ABI, "integer" like aggregate types are returned in
8028 registers. For an aggregate type to be integer like, its size
8029 must be less than or equal to INT_REGISTER_SIZE and the
8030 offset of each addressable subfield must be zero. Note that bit
8031 fields are not addressable, and all addressable subfields of
8032 unions always start at offset zero.
8034 This function is based on the behaviour of GCC 2.95.1.
8035 See: gcc/arm.c: arm_return_in_memory() for details.
8037 Note: All versions of GCC before GCC 2.95.2 do not set up the
8038 parameters correctly for a function returning the following
8039 structure: struct { float f;}; This should be returned in memory,
8040 not a register. Richard Earnshaw sent me a patch, but I do not
8041 know of any way to detect if a function like the above has been
8042 compiled with the correct calling convention. */
8044 /* Assume all other aggregate types can be returned in a register.
8045 Run a check for structures, unions and arrays. */
8048 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8051 /* Need to check if this struct/union is "integer" like. For
8052 this to be true, its size must be less than or equal to
8053 INT_REGISTER_SIZE and the offset of each addressable
8054 subfield must be zero. Note that bit fields are not
8055 addressable, and unions always start at offset zero. If any
8056 of the subfields is a floating point type, the struct/union
8057 cannot be an integer type. */
8059 /* For each field in the object, check:
8060 1) Is it FP? --> yes, nRc = 1;
8061 2) Is it addressable (bitpos != 0) and
8062 not packed (bitsize == 0)?
8066 for (i = 0; i < TYPE_NFIELDS (type); i++)
8068 enum type_code field_type_code;
8071 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8074 /* Is it a floating point type field? */
8075 if (field_type_code == TYPE_CODE_FLT)
8081 /* If bitpos != 0, then we have to care about it. */
8082 if (TYPE_FIELD_BITPOS (type, i) != 0)
8084 /* Bitfields are not addressable. If the field bitsize is
8085 zero, then the field is not packed. Hence it cannot be
8086 a bitfield or any other packed type. */
8087 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8100 /* Write into appropriate registers a function return value of type
8101 TYPE, given in virtual format. */
8104 arm_store_return_value (struct type *type, struct regcache *regs,
8105 const gdb_byte *valbuf)
8107 struct gdbarch *gdbarch = get_regcache_arch (regs);
8108 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8110 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8112 gdb_byte buf[FP_REGISTER_SIZE];
8114 switch (gdbarch_tdep (gdbarch)->fp_model)
8118 convert_typed_floating (valbuf, type, buf, arm_ext_type (gdbarch));
8119 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8122 case ARM_FLOAT_SOFT_FPA:
8123 case ARM_FLOAT_SOFT_VFP:
8124 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8125 not using the VFP ABI code. */
8127 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8128 if (TYPE_LENGTH (type) > 4)
8129 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8130 valbuf + INT_REGISTER_SIZE);
8134 internal_error (__FILE__, __LINE__,
8135 _("arm_store_return_value: Floating "
8136 "point model not supported"));
8140 else if (TYPE_CODE (type) == TYPE_CODE_INT
8141 || TYPE_CODE (type) == TYPE_CODE_CHAR
8142 || TYPE_CODE (type) == TYPE_CODE_BOOL
8143 || TYPE_CODE (type) == TYPE_CODE_PTR
8144 || TYPE_IS_REFERENCE (type)
8145 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8147 if (TYPE_LENGTH (type) <= 4)
8149 /* Values of one word or less are zero/sign-extended and
8151 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8152 LONGEST val = unpack_long (type, valbuf);
8154 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8155 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8159 /* Integral values greater than one word are stored in consecutive
8160 registers starting with r0. This will always be a multiple of
8161 the regiser size. */
8162 int len = TYPE_LENGTH (type);
8163 int regno = ARM_A1_REGNUM;
8167 regcache_cooked_write (regs, regno++, valbuf);
8168 len -= INT_REGISTER_SIZE;
8169 valbuf += INT_REGISTER_SIZE;
8175 /* For a structure or union the behaviour is as if the value had
8176 been stored to word-aligned memory and then loaded into
8177 registers with 32-bit load instruction(s). */
8178 int len = TYPE_LENGTH (type);
8179 int regno = ARM_A1_REGNUM;
8180 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8184 memcpy (tmpbuf, valbuf,
8185 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8186 regcache_cooked_write (regs, regno++, tmpbuf);
8187 len -= INT_REGISTER_SIZE;
8188 valbuf += INT_REGISTER_SIZE;
8194 /* Handle function return values. */
8196 static enum return_value_convention
8197 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8198 struct type *valtype, struct regcache *regcache,
8199 gdb_byte *readbuf, const gdb_byte *writebuf)
8201 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8202 struct type *func_type = function ? value_type (function) : NULL;
8203 enum arm_vfp_cprc_base_type vfp_base_type;
8206 if (arm_vfp_abi_for_function (gdbarch, func_type)
8207 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8209 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8210 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8212 for (i = 0; i < vfp_base_count; i++)
8214 if (reg_char == 'q')
8217 arm_neon_quad_write (gdbarch, regcache, i,
8218 writebuf + i * unit_length);
8221 arm_neon_quad_read (gdbarch, regcache, i,
8222 readbuf + i * unit_length);
8229 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8230 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8233 regcache_cooked_write (regcache, regnum,
8234 writebuf + i * unit_length);
8236 regcache_cooked_read (regcache, regnum,
8237 readbuf + i * unit_length);
8240 return RETURN_VALUE_REGISTER_CONVENTION;
8243 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8244 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8245 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8247 if (tdep->struct_return == pcc_struct_return
8248 || arm_return_in_memory (gdbarch, valtype))
8249 return RETURN_VALUE_STRUCT_CONVENTION;
8251 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8253 if (arm_return_in_memory (gdbarch, valtype))
8254 return RETURN_VALUE_STRUCT_CONVENTION;
8258 arm_store_return_value (valtype, regcache, writebuf);
8261 arm_extract_return_value (valtype, regcache, readbuf);
8263 return RETURN_VALUE_REGISTER_CONVENTION;
8268 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8270 struct gdbarch *gdbarch = get_frame_arch (frame);
8271 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8272 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8274 gdb_byte buf[INT_REGISTER_SIZE];
8276 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8278 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8282 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8286 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8287 return the target PC. Otherwise return 0. */
8290 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8294 CORE_ADDR start_addr;
8296 /* Find the starting address and name of the function containing the PC. */
8297 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8299 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8301 start_addr = arm_skip_bx_reg (frame, pc);
8302 if (start_addr != 0)
8308 /* If PC is in a Thumb call or return stub, return the address of the
8309 target PC, which is in a register. The thunk functions are called
8310 _call_via_xx, where x is the register name. The possible names
8311 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8312 functions, named __ARM_call_via_r[0-7]. */
8313 if (startswith (name, "_call_via_")
8314 || startswith (name, "__ARM_call_via_"))
8316 /* Use the name suffix to determine which register contains the
8318 static const char *table[15] =
8319 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8320 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8323 int offset = strlen (name) - 2;
8325 for (regno = 0; regno <= 14; regno++)
8326 if (strcmp (&name[offset], table[regno]) == 0)
8327 return get_frame_register_unsigned (frame, regno);
8330 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8331 non-interworking calls to foo. We could decode the stubs
8332 to find the target but it's easier to use the symbol table. */
8333 namelen = strlen (name);
8334 if (name[0] == '_' && name[1] == '_'
8335 && ((namelen > 2 + strlen ("_from_thumb")
8336 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8337 || (namelen > 2 + strlen ("_from_arm")
8338 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8341 int target_len = namelen - 2;
8342 struct bound_minimal_symbol minsym;
8343 struct objfile *objfile;
8344 struct obj_section *sec;
8346 if (name[namelen - 1] == 'b')
8347 target_len -= strlen ("_from_thumb");
8349 target_len -= strlen ("_from_arm");
8351 target_name = (char *) alloca (target_len + 1);
8352 memcpy (target_name, name + 2, target_len);
8353 target_name[target_len] = '\0';
8355 sec = find_pc_section (pc);
8356 objfile = (sec == NULL) ? NULL : sec->objfile;
8357 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8358 if (minsym.minsym != NULL)
8359 return BMSYMBOL_VALUE_ADDRESS (minsym);
8364 return 0; /* not a stub */
8368 set_arm_command (char *args, int from_tty)
8370 printf_unfiltered (_("\
8371 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8372 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8376 show_arm_command (char *args, int from_tty)
8378 cmd_show_list (showarmcmdlist, from_tty, "");
8382 arm_update_current_architecture (void)
8384 struct gdbarch_info info;
8386 /* If the current architecture is not ARM, we have nothing to do. */
8387 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8390 /* Update the architecture. */
8391 gdbarch_info_init (&info);
8393 if (!gdbarch_update_p (info))
8394 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8398 set_fp_model_sfunc (char *args, int from_tty,
8399 struct cmd_list_element *c)
8403 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8404 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8406 arm_fp_model = (enum arm_float_model) fp_model;
8410 if (fp_model == ARM_FLOAT_LAST)
8411 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8414 arm_update_current_architecture ();
8418 show_fp_model (struct ui_file *file, int from_tty,
8419 struct cmd_list_element *c, const char *value)
8421 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8423 if (arm_fp_model == ARM_FLOAT_AUTO
8424 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8425 fprintf_filtered (file, _("\
8426 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8427 fp_model_strings[tdep->fp_model]);
8429 fprintf_filtered (file, _("\
8430 The current ARM floating point model is \"%s\".\n"),
8431 fp_model_strings[arm_fp_model]);
8435 arm_set_abi (char *args, int from_tty,
8436 struct cmd_list_element *c)
8440 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8441 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8443 arm_abi_global = (enum arm_abi_kind) arm_abi;
8447 if (arm_abi == ARM_ABI_LAST)
8448 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8451 arm_update_current_architecture ();
8455 arm_show_abi (struct ui_file *file, int from_tty,
8456 struct cmd_list_element *c, const char *value)
8458 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8460 if (arm_abi_global == ARM_ABI_AUTO
8461 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8462 fprintf_filtered (file, _("\
8463 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8464 arm_abi_strings[tdep->arm_abi]);
8466 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8471 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8472 struct cmd_list_element *c, const char *value)
8474 fprintf_filtered (file,
8475 _("The current execution mode assumed "
8476 "(when symbols are unavailable) is \"%s\".\n"),
8477 arm_fallback_mode_string);
8481 arm_show_force_mode (struct ui_file *file, int from_tty,
8482 struct cmd_list_element *c, const char *value)
8484 fprintf_filtered (file,
8485 _("The current execution mode assumed "
8486 "(even when symbols are available) is \"%s\".\n"),
8487 arm_force_mode_string);
8490 /* If the user changes the register disassembly style used for info
8491 register and other commands, we have to also switch the style used
8492 in opcodes for disassembly output. This function is run in the "set
8493 arm disassembly" command, and does that. */
8496 set_disassembly_style_sfunc (char *args, int from_tty,
8497 struct cmd_list_element *c)
8499 /* Convert the short style name into the long style name (eg, reg-names-*)
8500 before calling the generic set_disassembler_options() function. */
8501 std::string long_name = std::string ("reg-names-") + disassembly_style;
8502 set_disassembler_options (&long_name[0]);
8506 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8507 struct cmd_list_element *c, const char *value)
8509 struct gdbarch *gdbarch = get_current_arch ();
8510 char *options = get_disassembler_options (gdbarch);
8511 const char *style = "";
8515 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8516 if (CONST_STRNEQ (opt, "reg-names-"))
8518 style = &opt[strlen ("reg-names-")];
8519 len = strcspn (style, ",");
8522 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8525 /* Return the ARM register name corresponding to register I. */
8527 arm_register_name (struct gdbarch *gdbarch, int i)
8529 const int num_regs = gdbarch_num_regs (gdbarch);
8531 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8532 && i >= num_regs && i < num_regs + 32)
8534 static const char *const vfp_pseudo_names[] = {
8535 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8536 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8537 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8538 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8541 return vfp_pseudo_names[i - num_regs];
8544 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8545 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8547 static const char *const neon_pseudo_names[] = {
8548 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8549 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8552 return neon_pseudo_names[i - num_regs - 32];
8555 if (i >= ARRAY_SIZE (arm_register_names))
8556 /* These registers are only supported on targets which supply
8557 an XML description. */
8560 return arm_register_names[i];
8563 /* Test whether the coff symbol specific value corresponds to a Thumb
8567 coff_sym_is_thumb (int val)
8569 return (val == C_THUMBEXT
8570 || val == C_THUMBSTAT
8571 || val == C_THUMBEXTFUNC
8572 || val == C_THUMBSTATFUNC
8573 || val == C_THUMBLABEL);
8576 /* arm_coff_make_msymbol_special()
8577 arm_elf_make_msymbol_special()
8579 These functions test whether the COFF or ELF symbol corresponds to
8580 an address in thumb code, and set a "special" bit in a minimal
8581 symbol to indicate that it does. */
8584 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8586 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8588 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8589 == ST_BRANCH_TO_THUMB)
8590 MSYMBOL_SET_SPECIAL (msym);
8594 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8596 if (coff_sym_is_thumb (val))
8597 MSYMBOL_SET_SPECIAL (msym);
8601 arm_objfile_data_free (struct objfile *objfile, void *arg)
8603 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8606 for (i = 0; i < objfile->obfd->section_count; i++)
8607 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8611 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8614 const char *name = bfd_asymbol_name (sym);
8615 struct arm_per_objfile *data;
8616 VEC(arm_mapping_symbol_s) **map_p;
8617 struct arm_mapping_symbol new_map_sym;
8619 gdb_assert (name[0] == '$');
8620 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8623 data = (struct arm_per_objfile *) objfile_data (objfile,
8624 arm_objfile_data_key);
8627 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8628 struct arm_per_objfile);
8629 set_objfile_data (objfile, arm_objfile_data_key, data);
8630 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8631 objfile->obfd->section_count,
8632 VEC(arm_mapping_symbol_s) *);
8634 map_p = &data->section_maps[bfd_get_section (sym)->index];
8636 new_map_sym.value = sym->value;
8637 new_map_sym.type = name[1];
8639 /* Assume that most mapping symbols appear in order of increasing
8640 value. If they were randomly distributed, it would be faster to
8641 always push here and then sort at first use. */
8642 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8644 struct arm_mapping_symbol *prev_map_sym;
8646 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8647 if (prev_map_sym->value >= sym->value)
8650 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8651 arm_compare_mapping_symbols);
8652 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8657 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8661 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8663 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8664 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8666 /* If necessary, set the T bit. */
8669 ULONGEST val, t_bit;
8670 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8671 t_bit = arm_psr_thumb_bit (gdbarch);
8672 if (arm_pc_is_thumb (gdbarch, pc))
8673 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8676 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8681 /* Read the contents of a NEON quad register, by reading from two
8682 double registers. This is used to implement the quad pseudo
8683 registers, and for argument passing in case the quad registers are
8684 missing; vectors are passed in quad registers when using the VFP
8685 ABI, even if a NEON unit is not present. REGNUM is the index of
8686 the quad register, in [0, 15]. */
8688 static enum register_status
8689 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8690 int regnum, gdb_byte *buf)
8693 gdb_byte reg_buf[8];
8694 int offset, double_regnum;
8695 enum register_status status;
8697 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8698 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8701 /* d0 is always the least significant half of q0. */
8702 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8707 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8708 if (status != REG_VALID)
8710 memcpy (buf + offset, reg_buf, 8);
8712 offset = 8 - offset;
8713 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8714 if (status != REG_VALID)
8716 memcpy (buf + offset, reg_buf, 8);
8721 static enum register_status
8722 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8723 int regnum, gdb_byte *buf)
8725 const int num_regs = gdbarch_num_regs (gdbarch);
8727 gdb_byte reg_buf[8];
8728 int offset, double_regnum;
8730 gdb_assert (regnum >= num_regs);
8733 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8734 /* Quad-precision register. */
8735 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8738 enum register_status status;
8740 /* Single-precision register. */
8741 gdb_assert (regnum < 32);
8743 /* s0 is always the least significant half of d0. */
8744 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8745 offset = (regnum & 1) ? 0 : 4;
8747 offset = (regnum & 1) ? 4 : 0;
8749 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8750 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8753 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8754 if (status == REG_VALID)
8755 memcpy (buf, reg_buf + offset, 4);
8760 /* Store the contents of BUF to a NEON quad register, by writing to
8761 two double registers. This is used to implement the quad pseudo
8762 registers, and for argument passing in case the quad registers are
8763 missing; vectors are passed in quad registers when using the VFP
8764 ABI, even if a NEON unit is not present. REGNUM is the index
8765 of the quad register, in [0, 15]. */
8768 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8769 int regnum, const gdb_byte *buf)
8772 int offset, double_regnum;
8774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8778 /* d0 is always the least significant half of q0. */
8779 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8784 regcache_raw_write (regcache, double_regnum, buf + offset);
8785 offset = 8 - offset;
8786 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8790 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8791 int regnum, const gdb_byte *buf)
8793 const int num_regs = gdbarch_num_regs (gdbarch);
8795 gdb_byte reg_buf[8];
8796 int offset, double_regnum;
8798 gdb_assert (regnum >= num_regs);
8801 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8802 /* Quad-precision register. */
8803 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8806 /* Single-precision register. */
8807 gdb_assert (regnum < 32);
8809 /* s0 is always the least significant half of d0. */
8810 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8811 offset = (regnum & 1) ? 0 : 4;
8813 offset = (regnum & 1) ? 4 : 0;
8815 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8816 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8819 regcache_raw_read (regcache, double_regnum, reg_buf);
8820 memcpy (reg_buf + offset, buf, 4);
8821 regcache_raw_write (regcache, double_regnum, reg_buf);
8825 static struct value *
8826 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8828 const int *reg_p = (const int *) baton;
8829 return value_of_register (*reg_p, frame);
8832 static enum gdb_osabi
8833 arm_elf_osabi_sniffer (bfd *abfd)
8835 unsigned int elfosabi;
8836 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8838 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8840 if (elfosabi == ELFOSABI_ARM)
8841 /* GNU tools use this value. Check note sections in this case,
8843 bfd_map_over_sections (abfd,
8844 generic_elf_osabi_sniff_abi_tag_sections,
8847 /* Anything else will be handled by the generic ELF sniffer. */
8852 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8853 struct reggroup *group)
8855 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8856 this, FPS register belongs to save_regroup, restore_reggroup, and
8857 all_reggroup, of course. */
8858 if (regnum == ARM_FPS_REGNUM)
8859 return (group == float_reggroup
8860 || group == save_reggroup
8861 || group == restore_reggroup
8862 || group == all_reggroup);
8864 return default_register_reggroup_p (gdbarch, regnum, group);
8868 /* For backward-compatibility we allow two 'g' packet lengths with
8869 the remote protocol depending on whether FPA registers are
8870 supplied. M-profile targets do not have FPA registers, but some
8871 stubs already exist in the wild which use a 'g' packet which
8872 supplies them albeit with dummy values. The packet format which
8873 includes FPA registers should be considered deprecated for
8874 M-profile targets. */
8877 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8879 if (gdbarch_tdep (gdbarch)->is_m)
8881 /* If we know from the executable this is an M-profile target,
8882 cater for remote targets whose register set layout is the
8883 same as the FPA layout. */
8884 register_remote_g_packet_guess (gdbarch,
8885 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8886 (16 * INT_REGISTER_SIZE)
8887 + (8 * FP_REGISTER_SIZE)
8888 + (2 * INT_REGISTER_SIZE),
8889 tdesc_arm_with_m_fpa_layout);
8891 /* The regular M-profile layout. */
8892 register_remote_g_packet_guess (gdbarch,
8893 /* r0-r12,sp,lr,pc; xpsr */
8894 (16 * INT_REGISTER_SIZE)
8895 + INT_REGISTER_SIZE,
8898 /* M-profile plus M4F VFP. */
8899 register_remote_g_packet_guess (gdbarch,
8900 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8901 (16 * INT_REGISTER_SIZE)
8902 + (16 * VFP_REGISTER_SIZE)
8903 + (2 * INT_REGISTER_SIZE),
8904 tdesc_arm_with_m_vfp_d16);
8907 /* Otherwise we don't have a useful guess. */
8910 /* Implement the code_of_frame_writable gdbarch method. */
8913 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8915 if (gdbarch_tdep (gdbarch)->is_m
8916 && get_frame_type (frame) == SIGTRAMP_FRAME)
8918 /* M-profile exception frames return to some magic PCs, where
8919 isn't writable at all. */
8927 /* Initialize the current architecture based on INFO. If possible,
8928 re-use an architecture from ARCHES, which is a list of
8929 architectures already created during this debugging session.
8931 Called e.g. at program startup, when reading a core file, and when
8932 reading a binary file. */
8934 static struct gdbarch *
8935 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8937 struct gdbarch_tdep *tdep;
8938 struct gdbarch *gdbarch;
8939 struct gdbarch_list *best_arch;
8940 enum arm_abi_kind arm_abi = arm_abi_global;
8941 enum arm_float_model fp_model = arm_fp_model;
8942 struct tdesc_arch_data *tdesc_data = NULL;
8944 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8945 int have_wmmx_registers = 0;
8947 int have_fpa_registers = 1;
8948 const struct target_desc *tdesc = info.target_desc;
8950 /* If we have an object to base this architecture on, try to determine
8953 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8955 int ei_osabi, e_flags;
8957 switch (bfd_get_flavour (info.abfd))
8959 case bfd_target_coff_flavour:
8960 /* Assume it's an old APCS-style ABI. */
8962 arm_abi = ARM_ABI_APCS;
8965 case bfd_target_elf_flavour:
8966 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8967 e_flags = elf_elfheader (info.abfd)->e_flags;
8969 if (ei_osabi == ELFOSABI_ARM)
8971 /* GNU tools used to use this value, but do not for EABI
8972 objects. There's nowhere to tag an EABI version
8973 anyway, so assume APCS. */
8974 arm_abi = ARM_ABI_APCS;
8976 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8978 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8979 int attr_arch, attr_profile;
8983 case EF_ARM_EABI_UNKNOWN:
8984 /* Assume GNU tools. */
8985 arm_abi = ARM_ABI_APCS;
8988 case EF_ARM_EABI_VER4:
8989 case EF_ARM_EABI_VER5:
8990 arm_abi = ARM_ABI_AAPCS;
8991 /* EABI binaries default to VFP float ordering.
8992 They may also contain build attributes that can
8993 be used to identify if the VFP argument-passing
8995 if (fp_model == ARM_FLOAT_AUTO)
8998 switch (bfd_elf_get_obj_attr_int (info.abfd,
9002 case AEABI_VFP_args_base:
9003 /* "The user intended FP parameter/result
9004 passing to conform to AAPCS, base
9006 fp_model = ARM_FLOAT_SOFT_VFP;
9008 case AEABI_VFP_args_vfp:
9009 /* "The user intended FP parameter/result
9010 passing to conform to AAPCS, VFP
9012 fp_model = ARM_FLOAT_VFP;
9014 case AEABI_VFP_args_toolchain:
9015 /* "The user intended FP parameter/result
9016 passing to conform to tool chain-specific
9017 conventions" - we don't know any such
9018 conventions, so leave it as "auto". */
9020 case AEABI_VFP_args_compatible:
9021 /* "Code is compatible with both the base
9022 and VFP variants; the user did not permit
9023 non-variadic functions to pass FP
9024 parameters/results" - leave it as
9028 /* Attribute value not mentioned in the
9029 November 2012 ABI, so leave it as
9034 fp_model = ARM_FLOAT_SOFT_VFP;
9040 /* Leave it as "auto". */
9041 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9046 /* Detect M-profile programs. This only works if the
9047 executable file includes build attributes; GCC does
9048 copy them to the executable, but e.g. RealView does
9050 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9052 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9054 Tag_CPU_arch_profile);
9055 /* GCC specifies the profile for v6-M; RealView only
9056 specifies the profile for architectures starting with
9057 V7 (as opposed to architectures with a tag
9058 numerically greater than TAG_CPU_ARCH_V7). */
9059 if (!tdesc_has_registers (tdesc)
9060 && (attr_arch == TAG_CPU_ARCH_V6_M
9061 || attr_arch == TAG_CPU_ARCH_V6S_M
9062 || attr_profile == 'M'))
9067 if (fp_model == ARM_FLOAT_AUTO)
9069 int e_flags = elf_elfheader (info.abfd)->e_flags;
9071 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9074 /* Leave it as "auto". Strictly speaking this case
9075 means FPA, but almost nobody uses that now, and
9076 many toolchains fail to set the appropriate bits
9077 for the floating-point model they use. */
9079 case EF_ARM_SOFT_FLOAT:
9080 fp_model = ARM_FLOAT_SOFT_FPA;
9082 case EF_ARM_VFP_FLOAT:
9083 fp_model = ARM_FLOAT_VFP;
9085 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9086 fp_model = ARM_FLOAT_SOFT_VFP;
9091 if (e_flags & EF_ARM_BE8)
9092 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9097 /* Leave it as "auto". */
9102 /* Check any target description for validity. */
9103 if (tdesc_has_registers (tdesc))
9105 /* For most registers we require GDB's default names; but also allow
9106 the numeric names for sp / lr / pc, as a convenience. */
9107 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9108 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9109 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9111 const struct tdesc_feature *feature;
9114 feature = tdesc_find_feature (tdesc,
9115 "org.gnu.gdb.arm.core");
9116 if (feature == NULL)
9118 feature = tdesc_find_feature (tdesc,
9119 "org.gnu.gdb.arm.m-profile");
9120 if (feature == NULL)
9126 tdesc_data = tdesc_data_alloc ();
9129 for (i = 0; i < ARM_SP_REGNUM; i++)
9130 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9131 arm_register_names[i]);
9132 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9135 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9138 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9142 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9143 ARM_PS_REGNUM, "xpsr");
9145 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9146 ARM_PS_REGNUM, "cpsr");
9150 tdesc_data_cleanup (tdesc_data);
9154 feature = tdesc_find_feature (tdesc,
9155 "org.gnu.gdb.arm.fpa");
9156 if (feature != NULL)
9159 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9160 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9161 arm_register_names[i]);
9164 tdesc_data_cleanup (tdesc_data);
9169 have_fpa_registers = 0;
9171 feature = tdesc_find_feature (tdesc,
9172 "org.gnu.gdb.xscale.iwmmxt");
9173 if (feature != NULL)
9175 static const char *const iwmmxt_names[] = {
9176 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9177 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9178 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9179 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9183 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9185 &= tdesc_numbered_register (feature, tdesc_data, i,
9186 iwmmxt_names[i - ARM_WR0_REGNUM]);
9188 /* Check for the control registers, but do not fail if they
9190 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9191 tdesc_numbered_register (feature, tdesc_data, i,
9192 iwmmxt_names[i - ARM_WR0_REGNUM]);
9194 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9196 &= tdesc_numbered_register (feature, tdesc_data, i,
9197 iwmmxt_names[i - ARM_WR0_REGNUM]);
9201 tdesc_data_cleanup (tdesc_data);
9205 have_wmmx_registers = 1;
9208 /* If we have a VFP unit, check whether the single precision registers
9209 are present. If not, then we will synthesize them as pseudo
9211 feature = tdesc_find_feature (tdesc,
9212 "org.gnu.gdb.arm.vfp");
9213 if (feature != NULL)
9215 static const char *const vfp_double_names[] = {
9216 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9217 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9218 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9219 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9222 /* Require the double precision registers. There must be either
9225 for (i = 0; i < 32; i++)
9227 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9229 vfp_double_names[i]);
9233 if (!valid_p && i == 16)
9236 /* Also require FPSCR. */
9237 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9238 ARM_FPSCR_REGNUM, "fpscr");
9241 tdesc_data_cleanup (tdesc_data);
9245 if (tdesc_unnumbered_register (feature, "s0") == 0)
9246 have_vfp_pseudos = 1;
9248 vfp_register_count = i;
9250 /* If we have VFP, also check for NEON. The architecture allows
9251 NEON without VFP (integer vector operations only), but GDB
9252 does not support that. */
9253 feature = tdesc_find_feature (tdesc,
9254 "org.gnu.gdb.arm.neon");
9255 if (feature != NULL)
9257 /* NEON requires 32 double-precision registers. */
9260 tdesc_data_cleanup (tdesc_data);
9264 /* If there are quad registers defined by the stub, use
9265 their type; otherwise (normally) provide them with
9266 the default type. */
9267 if (tdesc_unnumbered_register (feature, "q0") == 0)
9268 have_neon_pseudos = 1;
9275 /* If there is already a candidate, use it. */
9276 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9278 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9280 if (arm_abi != ARM_ABI_AUTO
9281 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9284 if (fp_model != ARM_FLOAT_AUTO
9285 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9288 /* There are various other properties in tdep that we do not
9289 need to check here: those derived from a target description,
9290 since gdbarches with a different target description are
9291 automatically disqualified. */
9293 /* Do check is_m, though, since it might come from the binary. */
9294 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9297 /* Found a match. */
9301 if (best_arch != NULL)
9303 if (tdesc_data != NULL)
9304 tdesc_data_cleanup (tdesc_data);
9305 return best_arch->gdbarch;
9308 tdep = XCNEW (struct gdbarch_tdep);
9309 gdbarch = gdbarch_alloc (&info, tdep);
9311 /* Record additional information about the architecture we are defining.
9312 These are gdbarch discriminators, like the OSABI. */
9313 tdep->arm_abi = arm_abi;
9314 tdep->fp_model = fp_model;
9316 tdep->have_fpa_registers = have_fpa_registers;
9317 tdep->have_wmmx_registers = have_wmmx_registers;
9318 gdb_assert (vfp_register_count == 0
9319 || vfp_register_count == 16
9320 || vfp_register_count == 32);
9321 tdep->vfp_register_count = vfp_register_count;
9322 tdep->have_vfp_pseudos = have_vfp_pseudos;
9323 tdep->have_neon_pseudos = have_neon_pseudos;
9324 tdep->have_neon = have_neon;
9326 arm_register_g_packet_guesses (gdbarch);
9329 switch (info.byte_order_for_code)
9331 case BFD_ENDIAN_BIG:
9332 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9333 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9334 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9335 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9339 case BFD_ENDIAN_LITTLE:
9340 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9341 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9342 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9343 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9348 internal_error (__FILE__, __LINE__,
9349 _("arm_gdbarch_init: bad byte order for float format"));
9352 /* On ARM targets char defaults to unsigned. */
9353 set_gdbarch_char_signed (gdbarch, 0);
9355 /* wchar_t is unsigned under the AAPCS. */
9356 if (tdep->arm_abi == ARM_ABI_AAPCS)
9357 set_gdbarch_wchar_signed (gdbarch, 0);
9359 set_gdbarch_wchar_signed (gdbarch, 1);
9361 /* Note: for displaced stepping, this includes the breakpoint, and one word
9362 of additional scratch space. This setting isn't used for anything beside
9363 displaced stepping at present. */
9364 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9366 /* This should be low enough for everything. */
9367 tdep->lowest_pc = 0x20;
9368 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9370 /* The default, for both APCS and AAPCS, is to return small
9371 structures in registers. */
9372 tdep->struct_return = reg_struct_return;
9374 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9375 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9378 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9380 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9382 /* Frame handling. */
9383 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9384 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9385 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9387 frame_base_set_default (gdbarch, &arm_normal_base);
9389 /* Address manipulation. */
9390 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9392 /* Advance PC across function entry code. */
9393 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9395 /* Detect whether PC is at a point where the stack has been destroyed. */
9396 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9398 /* Skip trampolines. */
9399 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9401 /* The stack grows downward. */
9402 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9404 /* Breakpoint manipulation. */
9405 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9406 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9407 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9408 arm_breakpoint_kind_from_current_state);
9410 /* Information about registers, etc. */
9411 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9412 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9413 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9414 set_gdbarch_register_type (gdbarch, arm_register_type);
9415 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9417 /* This "info float" is FPA-specific. Use the generic version if we
9419 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9420 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9422 /* Internal <-> external register number maps. */
9423 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9424 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9426 set_gdbarch_register_name (gdbarch, arm_register_name);
9428 /* Returning results. */
9429 set_gdbarch_return_value (gdbarch, arm_return_value);
9432 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9434 /* Minsymbol frobbing. */
9435 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9436 set_gdbarch_coff_make_msymbol_special (gdbarch,
9437 arm_coff_make_msymbol_special);
9438 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9440 /* Thumb-2 IT block support. */
9441 set_gdbarch_adjust_breakpoint_address (gdbarch,
9442 arm_adjust_breakpoint_address);
9444 /* Virtual tables. */
9445 set_gdbarch_vbit_in_delta (gdbarch, 1);
9447 /* Hook in the ABI-specific overrides, if they have been registered. */
9448 gdbarch_init_osabi (info, gdbarch);
9450 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9452 /* Add some default predicates. */
9454 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9455 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9456 dwarf2_append_unwinders (gdbarch);
9457 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9458 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9459 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9461 /* Now we have tuned the configuration, set a few final things,
9462 based on what the OS ABI has told us. */
9464 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9465 binaries are always marked. */
9466 if (tdep->arm_abi == ARM_ABI_AUTO)
9467 tdep->arm_abi = ARM_ABI_APCS;
9469 /* Watchpoints are not steppable. */
9470 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9472 /* We used to default to FPA for generic ARM, but almost nobody
9473 uses that now, and we now provide a way for the user to force
9474 the model. So default to the most useful variant. */
9475 if (tdep->fp_model == ARM_FLOAT_AUTO)
9476 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9478 if (tdep->jb_pc >= 0)
9479 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9481 /* Floating point sizes and format. */
9482 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9483 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9485 set_gdbarch_double_format
9486 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9487 set_gdbarch_long_double_format
9488 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9492 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9493 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9496 if (have_vfp_pseudos)
9498 /* NOTE: These are the only pseudo registers used by
9499 the ARM target at the moment. If more are added, a
9500 little more care in numbering will be needed. */
9502 int num_pseudos = 32;
9503 if (have_neon_pseudos)
9505 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9506 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9507 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9512 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9514 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9516 /* Override tdesc_register_type to adjust the types of VFP
9517 registers for NEON. */
9518 set_gdbarch_register_type (gdbarch, arm_register_type);
9521 /* Add standard register aliases. We add aliases even for those
9522 nanes which are used by the current architecture - it's simpler,
9523 and does no harm, since nothing ever lists user registers. */
9524 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9525 user_reg_add (gdbarch, arm_register_aliases[i].name,
9526 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9528 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9529 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9535 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9537 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9542 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9543 (unsigned long) tdep->lowest_pc);
9549 static void arm_record_test (void);
9554 _initialize_arm_tdep (void)
9557 const char *setname;
9558 const char *setdesc;
9560 char regdesc[1024], *rdptr = regdesc;
9561 size_t rest = sizeof (regdesc);
9563 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9565 arm_objfile_data_key
9566 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9568 /* Add ourselves to objfile event chain. */
9569 observer_attach_new_objfile (arm_exidx_new_objfile);
9571 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9573 /* Register an ELF OS ABI sniffer for ARM binaries. */
9574 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9575 bfd_target_elf_flavour,
9576 arm_elf_osabi_sniffer);
9578 /* Initialize the standard target descriptions. */
9579 initialize_tdesc_arm_with_m ();
9580 initialize_tdesc_arm_with_m_fpa_layout ();
9581 initialize_tdesc_arm_with_m_vfp_d16 ();
9582 initialize_tdesc_arm_with_iwmmxt ();
9583 initialize_tdesc_arm_with_vfpv2 ();
9584 initialize_tdesc_arm_with_vfpv3 ();
9585 initialize_tdesc_arm_with_neon ();
9587 /* Add root prefix command for all "set arm"/"show arm" commands. */
9588 add_prefix_cmd ("arm", no_class, set_arm_command,
9589 _("Various ARM-specific commands."),
9590 &setarmcmdlist, "set arm ", 0, &setlist);
9592 add_prefix_cmd ("arm", no_class, show_arm_command,
9593 _("Various ARM-specific commands."),
9594 &showarmcmdlist, "show arm ", 0, &showlist);
9597 arm_disassembler_options = xstrdup ("reg-names-std");
9598 const disasm_options_t *disasm_options = disassembler_options_arm ();
9599 int num_disassembly_styles = 0;
9600 for (i = 0; disasm_options->name[i] != NULL; i++)
9601 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9602 num_disassembly_styles++;
9604 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9605 valid_disassembly_styles = XNEWVEC (const char *,
9606 num_disassembly_styles + 1);
9607 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9608 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9610 size_t offset = strlen ("reg-names-");
9611 const char *style = disasm_options->name[i];
9612 valid_disassembly_styles[j++] = &style[offset];
9613 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9614 disasm_options->description[i]);
9618 /* Mark the end of valid options. */
9619 valid_disassembly_styles[num_disassembly_styles] = NULL;
9621 /* Create the help text. */
9622 std::string helptext = string_printf ("%s%s%s",
9623 _("The valid values are:\n"),
9625 _("The default is \"std\"."));
9627 add_setshow_enum_cmd("disassembler", no_class,
9628 valid_disassembly_styles, &disassembly_style,
9629 _("Set the disassembly style."),
9630 _("Show the disassembly style."),
9632 set_disassembly_style_sfunc,
9633 show_disassembly_style_sfunc,
9634 &setarmcmdlist, &showarmcmdlist);
9636 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9637 _("Set usage of ARM 32-bit mode."),
9638 _("Show usage of ARM 32-bit mode."),
9639 _("When off, a 26-bit PC will be used."),
9641 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9643 &setarmcmdlist, &showarmcmdlist);
9645 /* Add a command to allow the user to force the FPU model. */
9646 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9647 _("Set the floating point type."),
9648 _("Show the floating point type."),
9649 _("auto - Determine the FP typefrom the OS-ABI.\n\
9650 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9651 fpa - FPA co-processor (GCC compiled).\n\
9652 softvfp - Software FP with pure-endian doubles.\n\
9653 vfp - VFP co-processor."),
9654 set_fp_model_sfunc, show_fp_model,
9655 &setarmcmdlist, &showarmcmdlist);
9657 /* Add a command to allow the user to force the ABI. */
9658 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9661 NULL, arm_set_abi, arm_show_abi,
9662 &setarmcmdlist, &showarmcmdlist);
9664 /* Add two commands to allow the user to force the assumed
9666 add_setshow_enum_cmd ("fallback-mode", class_support,
9667 arm_mode_strings, &arm_fallback_mode_string,
9668 _("Set the mode assumed when symbols are unavailable."),
9669 _("Show the mode assumed when symbols are unavailable."),
9670 NULL, NULL, arm_show_fallback_mode,
9671 &setarmcmdlist, &showarmcmdlist);
9672 add_setshow_enum_cmd ("force-mode", class_support,
9673 arm_mode_strings, &arm_force_mode_string,
9674 _("Set the mode assumed even when symbols are available."),
9675 _("Show the mode assumed even when symbols are available."),
9676 NULL, NULL, arm_show_force_mode,
9677 &setarmcmdlist, &showarmcmdlist);
9679 /* Debugging flag. */
9680 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9681 _("Set ARM debugging."),
9682 _("Show ARM debugging."),
9683 _("When on, arm-specific debugging is enabled."),
9685 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9686 &setdebuglist, &showdebuglist);
9689 selftests::register_test ("arm-record", selftests::arm_record_test);
9694 /* ARM-reversible process record data structures. */
9696 #define ARM_INSN_SIZE_BYTES 4
9697 #define THUMB_INSN_SIZE_BYTES 2
9698 #define THUMB2_INSN_SIZE_BYTES 4
9701 /* Position of the bit within a 32-bit ARM instruction
9702 that defines whether the instruction is a load or store. */
9703 #define INSN_S_L_BIT_NUM 20
9705 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9708 unsigned int reg_len = LENGTH; \
9711 REGS = XNEWVEC (uint32_t, reg_len); \
9712 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9717 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9720 unsigned int mem_len = LENGTH; \
9723 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9724 memcpy(&MEMS->len, &RECORD_BUF[0], \
9725 sizeof(struct arm_mem_r) * LENGTH); \
9730 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9731 #define INSN_RECORDED(ARM_RECORD) \
9732 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9734 /* ARM memory record structure. */
9737 uint32_t len; /* Record length. */
9738 uint32_t addr; /* Memory address. */
9741 /* ARM instruction record contains opcode of current insn
9742 and execution state (before entry to decode_insn()),
9743 contains list of to-be-modified registers and
9744 memory blocks (on return from decode_insn()). */
9746 typedef struct insn_decode_record_t
9748 struct gdbarch *gdbarch;
9749 struct regcache *regcache;
9750 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9751 uint32_t arm_insn; /* Should accommodate thumb. */
9752 uint32_t cond; /* Condition code. */
9753 uint32_t opcode; /* Insn opcode. */
9754 uint32_t decode; /* Insn decode bits. */
9755 uint32_t mem_rec_count; /* No of mem records. */
9756 uint32_t reg_rec_count; /* No of reg records. */
9757 uint32_t *arm_regs; /* Registers to be saved for this record. */
9758 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9759 } insn_decode_record;
9762 /* Checks ARM SBZ and SBO mandatory fields. */
9765 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9767 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9786 enum arm_record_result
9788 ARM_RECORD_SUCCESS = 0,
9789 ARM_RECORD_FAILURE = 1
9796 } arm_record_strx_t;
9807 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9808 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9811 struct regcache *reg_cache = arm_insn_r->regcache;
9812 ULONGEST u_regval[2]= {0};
9814 uint32_t reg_src1 = 0, reg_src2 = 0;
9815 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9817 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9818 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9820 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9822 /* 1) Handle misc store, immediate offset. */
9823 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9824 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9825 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9826 regcache_raw_read_unsigned (reg_cache, reg_src1,
9828 if (ARM_PC_REGNUM == reg_src1)
9830 /* If R15 was used as Rn, hence current PC+8. */
9831 u_regval[0] = u_regval[0] + 8;
9833 offset_8 = (immed_high << 4) | immed_low;
9834 /* Calculate target store address. */
9835 if (14 == arm_insn_r->opcode)
9837 tgt_mem_addr = u_regval[0] + offset_8;
9841 tgt_mem_addr = u_regval[0] - offset_8;
9843 if (ARM_RECORD_STRH == str_type)
9845 record_buf_mem[0] = 2;
9846 record_buf_mem[1] = tgt_mem_addr;
9847 arm_insn_r->mem_rec_count = 1;
9849 else if (ARM_RECORD_STRD == str_type)
9851 record_buf_mem[0] = 4;
9852 record_buf_mem[1] = tgt_mem_addr;
9853 record_buf_mem[2] = 4;
9854 record_buf_mem[3] = tgt_mem_addr + 4;
9855 arm_insn_r->mem_rec_count = 2;
9858 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9860 /* 2) Store, register offset. */
9862 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9864 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9865 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9866 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9869 /* If R15 was used as Rn, hence current PC+8. */
9870 u_regval[0] = u_regval[0] + 8;
9872 /* Calculate target store address, Rn +/- Rm, register offset. */
9873 if (12 == arm_insn_r->opcode)
9875 tgt_mem_addr = u_regval[0] + u_regval[1];
9879 tgt_mem_addr = u_regval[1] - u_regval[0];
9881 if (ARM_RECORD_STRH == str_type)
9883 record_buf_mem[0] = 2;
9884 record_buf_mem[1] = tgt_mem_addr;
9885 arm_insn_r->mem_rec_count = 1;
9887 else if (ARM_RECORD_STRD == str_type)
9889 record_buf_mem[0] = 4;
9890 record_buf_mem[1] = tgt_mem_addr;
9891 record_buf_mem[2] = 4;
9892 record_buf_mem[3] = tgt_mem_addr + 4;
9893 arm_insn_r->mem_rec_count = 2;
9896 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9897 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9899 /* 3) Store, immediate pre-indexed. */
9900 /* 5) Store, immediate post-indexed. */
9901 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9902 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9903 offset_8 = (immed_high << 4) | immed_low;
9904 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9905 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9906 /* Calculate target store address, Rn +/- Rm, register offset. */
9907 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9909 tgt_mem_addr = u_regval[0] + offset_8;
9913 tgt_mem_addr = u_regval[0] - offset_8;
9915 if (ARM_RECORD_STRH == str_type)
9917 record_buf_mem[0] = 2;
9918 record_buf_mem[1] = tgt_mem_addr;
9919 arm_insn_r->mem_rec_count = 1;
9921 else if (ARM_RECORD_STRD == str_type)
9923 record_buf_mem[0] = 4;
9924 record_buf_mem[1] = tgt_mem_addr;
9925 record_buf_mem[2] = 4;
9926 record_buf_mem[3] = tgt_mem_addr + 4;
9927 arm_insn_r->mem_rec_count = 2;
9929 /* Record Rn also as it changes. */
9930 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9931 arm_insn_r->reg_rec_count = 1;
9933 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9934 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9936 /* 4) Store, register pre-indexed. */
9937 /* 6) Store, register post -indexed. */
9938 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9939 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9940 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9941 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9942 /* Calculate target store address, Rn +/- Rm, register offset. */
9943 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9945 tgt_mem_addr = u_regval[0] + u_regval[1];
9949 tgt_mem_addr = u_regval[1] - u_regval[0];
9951 if (ARM_RECORD_STRH == str_type)
9953 record_buf_mem[0] = 2;
9954 record_buf_mem[1] = tgt_mem_addr;
9955 arm_insn_r->mem_rec_count = 1;
9957 else if (ARM_RECORD_STRD == str_type)
9959 record_buf_mem[0] = 4;
9960 record_buf_mem[1] = tgt_mem_addr;
9961 record_buf_mem[2] = 4;
9962 record_buf_mem[3] = tgt_mem_addr + 4;
9963 arm_insn_r->mem_rec_count = 2;
9965 /* Record Rn also as it changes. */
9966 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9967 arm_insn_r->reg_rec_count = 1;
9972 /* Handling ARM extension space insns. */
9975 arm_record_extension_space (insn_decode_record *arm_insn_r)
9977 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9978 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9979 uint32_t record_buf[8], record_buf_mem[8];
9980 uint32_t reg_src1 = 0;
9981 struct regcache *reg_cache = arm_insn_r->regcache;
9982 ULONGEST u_regval = 0;
9984 gdb_assert (!INSN_RECORDED(arm_insn_r));
9985 /* Handle unconditional insn extension space. */
9987 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9988 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9989 if (arm_insn_r->cond)
9991 /* PLD has no affect on architectural state, it just affects
9993 if (5 == ((opcode1 & 0xE0) >> 5))
9996 record_buf[0] = ARM_PS_REGNUM;
9997 record_buf[1] = ARM_LR_REGNUM;
9998 arm_insn_r->reg_rec_count = 2;
10000 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10004 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10005 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10008 /* Undefined instruction on ARM V5; need to handle if later
10009 versions define it. */
10012 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10013 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10014 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10016 /* Handle arithmetic insn extension space. */
10017 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10018 && !INSN_RECORDED(arm_insn_r))
10020 /* Handle MLA(S) and MUL(S). */
10021 if (0 <= insn_op1 && 3 >= insn_op1)
10023 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10024 record_buf[1] = ARM_PS_REGNUM;
10025 arm_insn_r->reg_rec_count = 2;
10027 else if (4 <= insn_op1 && 15 >= insn_op1)
10029 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10030 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10031 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10032 record_buf[2] = ARM_PS_REGNUM;
10033 arm_insn_r->reg_rec_count = 3;
10037 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10038 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10039 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10041 /* Handle control insn extension space. */
10043 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10044 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10046 if (!bit (arm_insn_r->arm_insn,25))
10048 if (!bits (arm_insn_r->arm_insn, 4, 7))
10050 if ((0 == insn_op1) || (2 == insn_op1))
10053 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10054 arm_insn_r->reg_rec_count = 1;
10056 else if (1 == insn_op1)
10058 /* CSPR is going to be changed. */
10059 record_buf[0] = ARM_PS_REGNUM;
10060 arm_insn_r->reg_rec_count = 1;
10062 else if (3 == insn_op1)
10064 /* SPSR is going to be changed. */
10065 /* We need to get SPSR value, which is yet to be done. */
10069 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10074 record_buf[0] = ARM_PS_REGNUM;
10075 arm_insn_r->reg_rec_count = 1;
10077 else if (3 == insn_op1)
10080 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10081 arm_insn_r->reg_rec_count = 1;
10084 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10087 record_buf[0] = ARM_PS_REGNUM;
10088 record_buf[1] = ARM_LR_REGNUM;
10089 arm_insn_r->reg_rec_count = 2;
10091 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10093 /* QADD, QSUB, QDADD, QDSUB */
10094 record_buf[0] = ARM_PS_REGNUM;
10095 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10096 arm_insn_r->reg_rec_count = 2;
10098 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10101 record_buf[0] = ARM_PS_REGNUM;
10102 record_buf[1] = ARM_LR_REGNUM;
10103 arm_insn_r->reg_rec_count = 2;
10105 /* Save SPSR also;how? */
10108 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10109 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10110 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10111 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10114 if (0 == insn_op1 || 1 == insn_op1)
10116 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10117 /* We dont do optimization for SMULW<y> where we
10119 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10120 record_buf[1] = ARM_PS_REGNUM;
10121 arm_insn_r->reg_rec_count = 2;
10123 else if (2 == insn_op1)
10126 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10127 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10128 arm_insn_r->reg_rec_count = 2;
10130 else if (3 == insn_op1)
10133 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10134 arm_insn_r->reg_rec_count = 1;
10140 /* MSR : immediate form. */
10143 /* CSPR is going to be changed. */
10144 record_buf[0] = ARM_PS_REGNUM;
10145 arm_insn_r->reg_rec_count = 1;
10147 else if (3 == insn_op1)
10149 /* SPSR is going to be changed. */
10150 /* we need to get SPSR value, which is yet to be done */
10156 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10157 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10158 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10160 /* Handle load/store insn extension space. */
10162 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10163 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10164 && !INSN_RECORDED(arm_insn_r))
10169 /* These insn, changes register and memory as well. */
10170 /* SWP or SWPB insn. */
10171 /* Get memory address given by Rn. */
10172 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10173 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10174 /* SWP insn ?, swaps word. */
10175 if (8 == arm_insn_r->opcode)
10177 record_buf_mem[0] = 4;
10181 /* SWPB insn, swaps only byte. */
10182 record_buf_mem[0] = 1;
10184 record_buf_mem[1] = u_regval;
10185 arm_insn_r->mem_rec_count = 1;
10186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10187 arm_insn_r->reg_rec_count = 1;
10189 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10192 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10195 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10199 record_buf[1] = record_buf[0] + 1;
10200 arm_insn_r->reg_rec_count = 2;
10202 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10205 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10208 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10210 /* LDRH, LDRSB, LDRSH. */
10211 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10212 arm_insn_r->reg_rec_count = 1;
10217 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10218 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10219 && !INSN_RECORDED(arm_insn_r))
10222 /* Handle coprocessor insn extension space. */
10225 /* To be done for ARMv5 and later; as of now we return -1. */
10229 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10230 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10235 /* Handling opcode 000 insns. */
10238 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10240 struct regcache *reg_cache = arm_insn_r->regcache;
10241 uint32_t record_buf[8], record_buf_mem[8];
10242 ULONGEST u_regval[2] = {0};
10244 uint32_t reg_src1 = 0, reg_dest = 0;
10245 uint32_t opcode1 = 0;
10247 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10248 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10249 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10251 /* Data processing insn /multiply insn. */
10252 if (9 == arm_insn_r->decode
10253 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10254 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10256 /* Handle multiply instructions. */
10257 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10258 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10260 /* Handle MLA and MUL. */
10261 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10262 record_buf[1] = ARM_PS_REGNUM;
10263 arm_insn_r->reg_rec_count = 2;
10265 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10267 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10268 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10269 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10270 record_buf[2] = ARM_PS_REGNUM;
10271 arm_insn_r->reg_rec_count = 3;
10274 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10275 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10277 /* Handle misc load insns, as 20th bit (L = 1). */
10278 /* LDR insn has a capability to do branching, if
10279 MOV LR, PC is precceded by LDR insn having Rn as R15
10280 in that case, it emulates branch and link insn, and hence we
10281 need to save CSPR and PC as well. I am not sure this is right
10282 place; as opcode = 010 LDR insn make this happen, if R15 was
10284 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10285 if (15 != reg_dest)
10287 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10288 arm_insn_r->reg_rec_count = 1;
10292 record_buf[0] = reg_dest;
10293 record_buf[1] = ARM_PS_REGNUM;
10294 arm_insn_r->reg_rec_count = 2;
10297 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10298 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10299 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10300 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10302 /* Handle MSR insn. */
10303 if (9 == arm_insn_r->opcode)
10305 /* CSPR is going to be changed. */
10306 record_buf[0] = ARM_PS_REGNUM;
10307 arm_insn_r->reg_rec_count = 1;
10311 /* SPSR is going to be changed. */
10312 /* How to read SPSR value? */
10316 else if (9 == arm_insn_r->decode
10317 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10318 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10320 /* Handling SWP, SWPB. */
10321 /* These insn, changes register and memory as well. */
10322 /* SWP or SWPB insn. */
10324 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10325 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10326 /* SWP insn ?, swaps word. */
10327 if (8 == arm_insn_r->opcode)
10329 record_buf_mem[0] = 4;
10333 /* SWPB insn, swaps only byte. */
10334 record_buf_mem[0] = 1;
10336 record_buf_mem[1] = u_regval[0];
10337 arm_insn_r->mem_rec_count = 1;
10338 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10339 arm_insn_r->reg_rec_count = 1;
10341 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10342 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10344 /* Handle BLX, branch and link/exchange. */
10345 if (9 == arm_insn_r->opcode)
10347 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10348 and R14 stores the return address. */
10349 record_buf[0] = ARM_PS_REGNUM;
10350 record_buf[1] = ARM_LR_REGNUM;
10351 arm_insn_r->reg_rec_count = 2;
10354 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10356 /* Handle enhanced software breakpoint insn, BKPT. */
10357 /* CPSR is changed to be executed in ARM state, disabling normal
10358 interrupts, entering abort mode. */
10359 /* According to high vector configuration PC is set. */
10360 /* user hit breakpoint and type reverse, in
10361 that case, we need to go back with previous CPSR and
10362 Program Counter. */
10363 record_buf[0] = ARM_PS_REGNUM;
10364 record_buf[1] = ARM_LR_REGNUM;
10365 arm_insn_r->reg_rec_count = 2;
10367 /* Save SPSR also; how? */
10370 else if (11 == arm_insn_r->decode
10371 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10373 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10375 /* Handle str(x) insn */
10376 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10379 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10380 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10382 /* Handle BX, branch and link/exchange. */
10383 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10384 record_buf[0] = ARM_PS_REGNUM;
10385 arm_insn_r->reg_rec_count = 1;
10387 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10388 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10389 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10391 /* Count leading zeros: CLZ. */
10392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10393 arm_insn_r->reg_rec_count = 1;
10395 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10396 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10397 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10398 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10401 /* Handle MRS insn. */
10402 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10403 arm_insn_r->reg_rec_count = 1;
10405 else if (arm_insn_r->opcode <= 15)
10407 /* Normal data processing insns. */
10408 /* Out of 11 shifter operands mode, all the insn modifies destination
10409 register, which is specified by 13-16 decode. */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 record_buf[1] = ARM_PS_REGNUM;
10412 arm_insn_r->reg_rec_count = 2;
10419 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10420 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10424 /* Handling opcode 001 insns. */
10427 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10429 uint32_t record_buf[8], record_buf_mem[8];
10431 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10432 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10434 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10435 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10436 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10439 /* Handle MSR insn. */
10440 if (9 == arm_insn_r->opcode)
10442 /* CSPR is going to be changed. */
10443 record_buf[0] = ARM_PS_REGNUM;
10444 arm_insn_r->reg_rec_count = 1;
10448 /* SPSR is going to be changed. */
10451 else if (arm_insn_r->opcode <= 15)
10453 /* Normal data processing insns. */
10454 /* Out of 11 shifter operands mode, all the insn modifies destination
10455 register, which is specified by 13-16 decode. */
10456 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10457 record_buf[1] = ARM_PS_REGNUM;
10458 arm_insn_r->reg_rec_count = 2;
10465 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10466 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10471 arm_record_media (insn_decode_record *arm_insn_r)
10473 uint32_t record_buf[8];
10475 switch (bits (arm_insn_r->arm_insn, 22, 24))
10478 /* Parallel addition and subtraction, signed */
10480 /* Parallel addition and subtraction, unsigned */
10483 /* Packing, unpacking, saturation and reversal */
10485 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10487 record_buf[arm_insn_r->reg_rec_count++] = rd;
10493 /* Signed multiplies */
10495 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10496 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10498 record_buf[arm_insn_r->reg_rec_count++] = rd;
10500 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10501 else if (op1 == 0x4)
10502 record_buf[arm_insn_r->reg_rec_count++]
10503 = bits (arm_insn_r->arm_insn, 12, 15);
10509 if (bit (arm_insn_r->arm_insn, 21)
10510 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10513 record_buf[arm_insn_r->reg_rec_count++]
10514 = bits (arm_insn_r->arm_insn, 12, 15);
10516 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10517 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10519 /* USAD8 and USADA8 */
10520 record_buf[arm_insn_r->reg_rec_count++]
10521 = bits (arm_insn_r->arm_insn, 16, 19);
10528 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10529 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10531 /* Permanently UNDEFINED */
10536 /* BFC, BFI and UBFX */
10537 record_buf[arm_insn_r->reg_rec_count++]
10538 = bits (arm_insn_r->arm_insn, 12, 15);
10547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10552 /* Handle ARM mode instructions with opcode 010. */
10555 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10557 struct regcache *reg_cache = arm_insn_r->regcache;
10559 uint32_t reg_base , reg_dest;
10560 uint32_t offset_12, tgt_mem_addr;
10561 uint32_t record_buf[8], record_buf_mem[8];
10562 unsigned char wback;
10565 /* Calculate wback. */
10566 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10567 || (bit (arm_insn_r->arm_insn, 21) == 1);
10569 arm_insn_r->reg_rec_count = 0;
10570 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10572 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10574 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10577 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10578 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10580 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10581 preceeds a LDR instruction having R15 as reg_base, it
10582 emulates a branch and link instruction, and hence we need to save
10583 CPSR and PC as well. */
10584 if (ARM_PC_REGNUM == reg_dest)
10585 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10587 /* If wback is true, also save the base register, which is going to be
10590 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10594 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10596 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10597 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10599 /* Handle bit U. */
10600 if (bit (arm_insn_r->arm_insn, 23))
10602 /* U == 1: Add the offset. */
10603 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10607 /* U == 0: subtract the offset. */
10608 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10611 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10613 if (bit (arm_insn_r->arm_insn, 22))
10615 /* STRB and STRBT: 1 byte. */
10616 record_buf_mem[0] = 1;
10620 /* STR and STRT: 4 bytes. */
10621 record_buf_mem[0] = 4;
10624 /* Handle bit P. */
10625 if (bit (arm_insn_r->arm_insn, 24))
10626 record_buf_mem[1] = tgt_mem_addr;
10628 record_buf_mem[1] = (uint32_t) u_regval;
10630 arm_insn_r->mem_rec_count = 1;
10632 /* If wback is true, also save the base register, which is going to be
10635 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10638 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10639 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10643 /* Handling opcode 011 insns. */
10646 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10648 struct regcache *reg_cache = arm_insn_r->regcache;
10650 uint32_t shift_imm = 0;
10651 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10652 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10653 uint32_t record_buf[8], record_buf_mem[8];
10656 ULONGEST u_regval[2];
10658 if (bit (arm_insn_r->arm_insn, 4))
10659 return arm_record_media (arm_insn_r);
10661 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10662 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10664 /* Handle enhanced store insns and LDRD DSP insn,
10665 order begins according to addressing modes for store insns
10669 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10671 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10672 /* LDR insn has a capability to do branching, if
10673 MOV LR, PC is precedded by LDR insn having Rn as R15
10674 in that case, it emulates branch and link insn, and hence we
10675 need to save CSPR and PC as well. */
10676 if (15 != reg_dest)
10678 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10679 arm_insn_r->reg_rec_count = 1;
10683 record_buf[0] = reg_dest;
10684 record_buf[1] = ARM_PS_REGNUM;
10685 arm_insn_r->reg_rec_count = 2;
10690 if (! bits (arm_insn_r->arm_insn, 4, 11))
10692 /* Store insn, register offset and register pre-indexed,
10693 register post-indexed. */
10695 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10697 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10698 regcache_raw_read_unsigned (reg_cache, reg_src1
10700 regcache_raw_read_unsigned (reg_cache, reg_src2
10702 if (15 == reg_src2)
10704 /* If R15 was used as Rn, hence current PC+8. */
10705 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10706 u_regval[0] = u_regval[0] + 8;
10708 /* Calculate target store address, Rn +/- Rm, register offset. */
10710 if (bit (arm_insn_r->arm_insn, 23))
10712 tgt_mem_addr = u_regval[0] + u_regval[1];
10716 tgt_mem_addr = u_regval[1] - u_regval[0];
10719 switch (arm_insn_r->opcode)
10733 record_buf_mem[0] = 4;
10748 record_buf_mem[0] = 1;
10752 gdb_assert_not_reached ("no decoding pattern found");
10755 record_buf_mem[1] = tgt_mem_addr;
10756 arm_insn_r->mem_rec_count = 1;
10758 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10759 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10760 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10761 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10762 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10763 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10766 /* Rn is going to be changed in pre-indexed mode and
10767 post-indexed mode as well. */
10768 record_buf[0] = reg_src2;
10769 arm_insn_r->reg_rec_count = 1;
10774 /* Store insn, scaled register offset; scaled pre-indexed. */
10775 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10777 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10779 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10780 /* Get shift_imm. */
10781 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10782 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10783 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10784 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10785 /* Offset_12 used as shift. */
10789 /* Offset_12 used as index. */
10790 offset_12 = u_regval[0] << shift_imm;
10794 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10800 if (bit (u_regval[0], 31))
10802 offset_12 = 0xFFFFFFFF;
10811 /* This is arithmetic shift. */
10812 offset_12 = s_word >> shift_imm;
10819 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10821 /* Get C flag value and shift it by 31. */
10822 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10823 | (u_regval[0]) >> 1);
10827 offset_12 = (u_regval[0] >> shift_imm) \
10829 (sizeof(uint32_t) - shift_imm));
10834 gdb_assert_not_reached ("no decoding pattern found");
10838 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10840 if (bit (arm_insn_r->arm_insn, 23))
10842 tgt_mem_addr = u_regval[1] + offset_12;
10846 tgt_mem_addr = u_regval[1] - offset_12;
10849 switch (arm_insn_r->opcode)
10863 record_buf_mem[0] = 4;
10878 record_buf_mem[0] = 1;
10882 gdb_assert_not_reached ("no decoding pattern found");
10885 record_buf_mem[1] = tgt_mem_addr;
10886 arm_insn_r->mem_rec_count = 1;
10888 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10889 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10890 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10891 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10892 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10893 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10896 /* Rn is going to be changed in register scaled pre-indexed
10897 mode,and scaled post indexed mode. */
10898 record_buf[0] = reg_src2;
10899 arm_insn_r->reg_rec_count = 1;
10904 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10905 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10909 /* Handle ARM mode instructions with opcode 100. */
10912 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10914 struct regcache *reg_cache = arm_insn_r->regcache;
10915 uint32_t register_count = 0, register_bits;
10916 uint32_t reg_base, addr_mode;
10917 uint32_t record_buf[24], record_buf_mem[48];
10921 /* Fetch the list of registers. */
10922 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10923 arm_insn_r->reg_rec_count = 0;
10925 /* Fetch the base register that contains the address we are loading data
10927 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10929 /* Calculate wback. */
10930 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10932 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10934 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10936 /* Find out which registers are going to be loaded from memory. */
10937 while (register_bits)
10939 if (register_bits & 0x00000001)
10940 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10941 register_bits = register_bits >> 1;
10946 /* If wback is true, also save the base register, which is going to be
10949 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10951 /* Save the CPSR register. */
10952 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10956 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10958 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10960 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10962 /* Find out how many registers are going to be stored to memory. */
10963 while (register_bits)
10965 if (register_bits & 0x00000001)
10967 register_bits = register_bits >> 1;
10972 /* STMDA (STMED): Decrement after. */
10974 record_buf_mem[1] = (uint32_t) u_regval
10975 - register_count * INT_REGISTER_SIZE + 4;
10977 /* STM (STMIA, STMEA): Increment after. */
10979 record_buf_mem[1] = (uint32_t) u_regval;
10981 /* STMDB (STMFD): Decrement before. */
10983 record_buf_mem[1] = (uint32_t) u_regval
10984 - register_count * INT_REGISTER_SIZE;
10986 /* STMIB (STMFA): Increment before. */
10988 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10991 gdb_assert_not_reached ("no decoding pattern found");
10995 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10996 arm_insn_r->mem_rec_count = 1;
10998 /* If wback is true, also save the base register, which is going to be
11001 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11004 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11005 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11009 /* Handling opcode 101 insns. */
11012 arm_record_b_bl (insn_decode_record *arm_insn_r)
11014 uint32_t record_buf[8];
11016 /* Handle B, BL, BLX(1) insns. */
11017 /* B simply branches so we do nothing here. */
11018 /* Note: BLX(1) doesnt fall here but instead it falls into
11019 extension space. */
11020 if (bit (arm_insn_r->arm_insn, 24))
11022 record_buf[0] = ARM_LR_REGNUM;
11023 arm_insn_r->reg_rec_count = 1;
11026 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11032 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11034 printf_unfiltered (_("Process record does not support instruction "
11035 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11036 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11041 /* Record handler for vector data transfer instructions. */
11044 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11046 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11047 uint32_t record_buf[4];
11049 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11050 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11051 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11052 bit_l = bit (arm_insn_r->arm_insn, 20);
11053 bit_c = bit (arm_insn_r->arm_insn, 8);
11055 /* Handle VMOV instruction. */
11056 if (bit_l && bit_c)
11058 record_buf[0] = reg_t;
11059 arm_insn_r->reg_rec_count = 1;
11061 else if (bit_l && !bit_c)
11063 /* Handle VMOV instruction. */
11064 if (bits_a == 0x00)
11066 record_buf[0] = reg_t;
11067 arm_insn_r->reg_rec_count = 1;
11069 /* Handle VMRS instruction. */
11070 else if (bits_a == 0x07)
11073 reg_t = ARM_PS_REGNUM;
11075 record_buf[0] = reg_t;
11076 arm_insn_r->reg_rec_count = 1;
11079 else if (!bit_l && !bit_c)
11081 /* Handle VMOV instruction. */
11082 if (bits_a == 0x00)
11084 record_buf[0] = ARM_D0_REGNUM + reg_v;
11086 arm_insn_r->reg_rec_count = 1;
11088 /* Handle VMSR instruction. */
11089 else if (bits_a == 0x07)
11091 record_buf[0] = ARM_FPSCR_REGNUM;
11092 arm_insn_r->reg_rec_count = 1;
11095 else if (!bit_l && bit_c)
11097 /* Handle VMOV instruction. */
11098 if (!(bits_a & 0x04))
11100 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11102 arm_insn_r->reg_rec_count = 1;
11104 /* Handle VDUP instruction. */
11107 if (bit (arm_insn_r->arm_insn, 21))
11109 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11110 record_buf[0] = reg_v + ARM_D0_REGNUM;
11111 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11112 arm_insn_r->reg_rec_count = 2;
11116 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11117 record_buf[0] = reg_v + ARM_D0_REGNUM;
11118 arm_insn_r->reg_rec_count = 1;
11123 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11127 /* Record handler for extension register load/store instructions. */
11130 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11132 uint32_t opcode, single_reg;
11133 uint8_t op_vldm_vstm;
11134 uint32_t record_buf[8], record_buf_mem[128];
11135 ULONGEST u_regval = 0;
11137 struct regcache *reg_cache = arm_insn_r->regcache;
11139 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11140 single_reg = !bit (arm_insn_r->arm_insn, 8);
11141 op_vldm_vstm = opcode & 0x1b;
11143 /* Handle VMOV instructions. */
11144 if ((opcode & 0x1e) == 0x04)
11146 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11148 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11149 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11150 arm_insn_r->reg_rec_count = 2;
11154 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11155 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11159 /* The first S register number m is REG_M:M (M is bit 5),
11160 the corresponding D register number is REG_M:M / 2, which
11162 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11163 /* The second S register number is REG_M:M + 1, the
11164 corresponding D register number is (REG_M:M + 1) / 2.
11165 IOW, if bit M is 1, the first and second S registers
11166 are mapped to different D registers, otherwise, they are
11167 in the same D register. */
11170 record_buf[arm_insn_r->reg_rec_count++]
11171 = ARM_D0_REGNUM + reg_m + 1;
11176 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11177 arm_insn_r->reg_rec_count = 1;
11181 /* Handle VSTM and VPUSH instructions. */
11182 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11183 || op_vldm_vstm == 0x12)
11185 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11186 uint32_t memory_index = 0;
11188 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11189 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11190 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11191 imm_off32 = imm_off8 << 2;
11192 memory_count = imm_off8;
11194 if (bit (arm_insn_r->arm_insn, 23))
11195 start_address = u_regval;
11197 start_address = u_regval - imm_off32;
11199 if (bit (arm_insn_r->arm_insn, 21))
11201 record_buf[0] = reg_rn;
11202 arm_insn_r->reg_rec_count = 1;
11205 while (memory_count > 0)
11209 record_buf_mem[memory_index] = 4;
11210 record_buf_mem[memory_index + 1] = start_address;
11211 start_address = start_address + 4;
11212 memory_index = memory_index + 2;
11216 record_buf_mem[memory_index] = 4;
11217 record_buf_mem[memory_index + 1] = start_address;
11218 record_buf_mem[memory_index + 2] = 4;
11219 record_buf_mem[memory_index + 3] = start_address + 4;
11220 start_address = start_address + 8;
11221 memory_index = memory_index + 4;
11225 arm_insn_r->mem_rec_count = (memory_index >> 1);
11227 /* Handle VLDM instructions. */
11228 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11229 || op_vldm_vstm == 0x13)
11231 uint32_t reg_count, reg_vd;
11232 uint32_t reg_index = 0;
11233 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11235 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11236 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11238 /* REG_VD is the first D register number. If the instruction
11239 loads memory to S registers (SINGLE_REG is TRUE), the register
11240 number is (REG_VD << 1 | bit D), so the corresponding D
11241 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11243 reg_vd = reg_vd | (bit_d << 4);
11245 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11246 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11248 /* If the instruction loads memory to D register, REG_COUNT should
11249 be divided by 2, according to the ARM Architecture Reference
11250 Manual. If the instruction loads memory to S register, divide by
11251 2 as well because two S registers are mapped to D register. */
11252 reg_count = reg_count / 2;
11253 if (single_reg && bit_d)
11255 /* Increase the register count if S register list starts from
11256 an odd number (bit d is one). */
11260 while (reg_count > 0)
11262 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11265 arm_insn_r->reg_rec_count = reg_index;
11267 /* VSTR Vector store register. */
11268 else if ((opcode & 0x13) == 0x10)
11270 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11271 uint32_t memory_index = 0;
11273 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11274 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11275 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11276 imm_off32 = imm_off8 << 2;
11278 if (bit (arm_insn_r->arm_insn, 23))
11279 start_address = u_regval + imm_off32;
11281 start_address = u_regval - imm_off32;
11285 record_buf_mem[memory_index] = 4;
11286 record_buf_mem[memory_index + 1] = start_address;
11287 arm_insn_r->mem_rec_count = 1;
11291 record_buf_mem[memory_index] = 4;
11292 record_buf_mem[memory_index + 1] = start_address;
11293 record_buf_mem[memory_index + 2] = 4;
11294 record_buf_mem[memory_index + 3] = start_address + 4;
11295 arm_insn_r->mem_rec_count = 2;
11298 /* VLDR Vector load register. */
11299 else if ((opcode & 0x13) == 0x11)
11301 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11305 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11306 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11310 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11311 /* Record register D rather than pseudo register S. */
11312 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11314 arm_insn_r->reg_rec_count = 1;
11317 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11318 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11322 /* Record handler for arm/thumb mode VFP data processing instructions. */
11325 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11327 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11328 uint32_t record_buf[4];
11329 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11330 enum insn_types curr_insn_type = INSN_INV;
11332 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11333 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11334 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11335 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11336 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11337 bit_d = bit (arm_insn_r->arm_insn, 22);
11338 opc1 = opc1 & 0x04;
11340 /* Handle VMLA, VMLS. */
11343 if (bit (arm_insn_r->arm_insn, 10))
11345 if (bit (arm_insn_r->arm_insn, 6))
11346 curr_insn_type = INSN_T0;
11348 curr_insn_type = INSN_T1;
11353 curr_insn_type = INSN_T1;
11355 curr_insn_type = INSN_T2;
11358 /* Handle VNMLA, VNMLS, VNMUL. */
11359 else if (opc1 == 0x01)
11362 curr_insn_type = INSN_T1;
11364 curr_insn_type = INSN_T2;
11367 else if (opc1 == 0x02 && !(opc3 & 0x01))
11369 if (bit (arm_insn_r->arm_insn, 10))
11371 if (bit (arm_insn_r->arm_insn, 6))
11372 curr_insn_type = INSN_T0;
11374 curr_insn_type = INSN_T1;
11379 curr_insn_type = INSN_T1;
11381 curr_insn_type = INSN_T2;
11384 /* Handle VADD, VSUB. */
11385 else if (opc1 == 0x03)
11387 if (!bit (arm_insn_r->arm_insn, 9))
11389 if (bit (arm_insn_r->arm_insn, 6))
11390 curr_insn_type = INSN_T0;
11392 curr_insn_type = INSN_T1;
11397 curr_insn_type = INSN_T1;
11399 curr_insn_type = INSN_T2;
11403 else if (opc1 == 0x0b)
11406 curr_insn_type = INSN_T1;
11408 curr_insn_type = INSN_T2;
11410 /* Handle all other vfp data processing instructions. */
11411 else if (opc1 == 0x0b)
11414 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11416 if (bit (arm_insn_r->arm_insn, 4))
11418 if (bit (arm_insn_r->arm_insn, 6))
11419 curr_insn_type = INSN_T0;
11421 curr_insn_type = INSN_T1;
11426 curr_insn_type = INSN_T1;
11428 curr_insn_type = INSN_T2;
11431 /* Handle VNEG and VABS. */
11432 else if ((opc2 == 0x01 && opc3 == 0x01)
11433 || (opc2 == 0x00 && opc3 == 0x03))
11435 if (!bit (arm_insn_r->arm_insn, 11))
11437 if (bit (arm_insn_r->arm_insn, 6))
11438 curr_insn_type = INSN_T0;
11440 curr_insn_type = INSN_T1;
11445 curr_insn_type = INSN_T1;
11447 curr_insn_type = INSN_T2;
11450 /* Handle VSQRT. */
11451 else if (opc2 == 0x01 && opc3 == 0x03)
11454 curr_insn_type = INSN_T1;
11456 curr_insn_type = INSN_T2;
11459 else if (opc2 == 0x07 && opc3 == 0x03)
11462 curr_insn_type = INSN_T1;
11464 curr_insn_type = INSN_T2;
11466 else if (opc3 & 0x01)
11469 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11471 if (!bit (arm_insn_r->arm_insn, 18))
11472 curr_insn_type = INSN_T2;
11476 curr_insn_type = INSN_T1;
11478 curr_insn_type = INSN_T2;
11482 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11485 curr_insn_type = INSN_T1;
11487 curr_insn_type = INSN_T2;
11489 /* Handle VCVTB, VCVTT. */
11490 else if ((opc2 & 0x0e) == 0x02)
11491 curr_insn_type = INSN_T2;
11492 /* Handle VCMP, VCMPE. */
11493 else if ((opc2 & 0x0e) == 0x04)
11494 curr_insn_type = INSN_T3;
11498 switch (curr_insn_type)
11501 reg_vd = reg_vd | (bit_d << 4);
11502 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11503 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11504 arm_insn_r->reg_rec_count = 2;
11508 reg_vd = reg_vd | (bit_d << 4);
11509 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11510 arm_insn_r->reg_rec_count = 1;
11514 reg_vd = (reg_vd << 1) | bit_d;
11515 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11516 arm_insn_r->reg_rec_count = 1;
11520 record_buf[0] = ARM_FPSCR_REGNUM;
11521 arm_insn_r->reg_rec_count = 1;
11525 gdb_assert_not_reached ("no decoding pattern found");
11529 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11533 /* Handling opcode 110 insns. */
11536 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11538 uint32_t op1, op1_ebit, coproc;
11540 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11541 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11542 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11544 if ((coproc & 0x0e) == 0x0a)
11546 /* Handle extension register ld/st instructions. */
11548 return arm_record_exreg_ld_st_insn (arm_insn_r);
11550 /* 64-bit transfers between arm core and extension registers. */
11551 if ((op1 & 0x3e) == 0x04)
11552 return arm_record_exreg_ld_st_insn (arm_insn_r);
11556 /* Handle coprocessor ld/st instructions. */
11561 return arm_record_unsupported_insn (arm_insn_r);
11564 return arm_record_unsupported_insn (arm_insn_r);
11567 /* Move to coprocessor from two arm core registers. */
11569 return arm_record_unsupported_insn (arm_insn_r);
11571 /* Move to two arm core registers from coprocessor. */
11576 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11577 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11578 arm_insn_r->reg_rec_count = 2;
11580 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11584 return arm_record_unsupported_insn (arm_insn_r);
11587 /* Handling opcode 111 insns. */
11590 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11592 uint32_t op, op1_sbit, op1_ebit, coproc;
11593 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11594 struct regcache *reg_cache = arm_insn_r->regcache;
11596 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11597 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11598 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11599 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11600 op = bit (arm_insn_r->arm_insn, 4);
11602 /* Handle arm SWI/SVC system call instructions. */
11605 if (tdep->arm_syscall_record != NULL)
11607 ULONGEST svc_operand, svc_number;
11609 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11611 if (svc_operand) /* OABI. */
11612 svc_number = svc_operand - 0x900000;
11614 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11616 return tdep->arm_syscall_record (reg_cache, svc_number);
11620 printf_unfiltered (_("no syscall record support\n"));
11625 if ((coproc & 0x0e) == 0x0a)
11627 /* VFP data-processing instructions. */
11628 if (!op1_sbit && !op)
11629 return arm_record_vfp_data_proc_insn (arm_insn_r);
11631 /* Advanced SIMD, VFP instructions. */
11632 if (!op1_sbit && op)
11633 return arm_record_vdata_transfer_insn (arm_insn_r);
11637 /* Coprocessor data operations. */
11638 if (!op1_sbit && !op)
11639 return arm_record_unsupported_insn (arm_insn_r);
11641 /* Move to Coprocessor from ARM core register. */
11642 if (!op1_sbit && !op1_ebit && op)
11643 return arm_record_unsupported_insn (arm_insn_r);
11645 /* Move to arm core register from coprocessor. */
11646 if (!op1_sbit && op1_ebit && op)
11648 uint32_t record_buf[1];
11650 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11651 if (record_buf[0] == 15)
11652 record_buf[0] = ARM_PS_REGNUM;
11654 arm_insn_r->reg_rec_count = 1;
11655 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11661 return arm_record_unsupported_insn (arm_insn_r);
11664 /* Handling opcode 000 insns. */
11667 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11669 uint32_t record_buf[8];
11670 uint32_t reg_src1 = 0;
11672 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11674 record_buf[0] = ARM_PS_REGNUM;
11675 record_buf[1] = reg_src1;
11676 thumb_insn_r->reg_rec_count = 2;
11678 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11684 /* Handling opcode 001 insns. */
11687 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11689 uint32_t record_buf[8];
11690 uint32_t reg_src1 = 0;
11692 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11694 record_buf[0] = ARM_PS_REGNUM;
11695 record_buf[1] = reg_src1;
11696 thumb_insn_r->reg_rec_count = 2;
11698 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11703 /* Handling opcode 010 insns. */
11706 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11708 struct regcache *reg_cache = thumb_insn_r->regcache;
11709 uint32_t record_buf[8], record_buf_mem[8];
11711 uint32_t reg_src1 = 0, reg_src2 = 0;
11712 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11714 ULONGEST u_regval[2] = {0};
11716 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11718 if (bit (thumb_insn_r->arm_insn, 12))
11720 /* Handle load/store register offset. */
11721 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11723 if (opB >= 4 && opB <= 7)
11725 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11726 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11727 record_buf[0] = reg_src1;
11728 thumb_insn_r->reg_rec_count = 1;
11730 else if (opB >= 0 && opB <= 2)
11732 /* STR(2), STRB(2), STRH(2) . */
11733 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11734 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11735 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11736 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11738 record_buf_mem[0] = 4; /* STR (2). */
11740 record_buf_mem[0] = 1; /* STRB (2). */
11742 record_buf_mem[0] = 2; /* STRH (2). */
11743 record_buf_mem[1] = u_regval[0] + u_regval[1];
11744 thumb_insn_r->mem_rec_count = 1;
11747 else if (bit (thumb_insn_r->arm_insn, 11))
11749 /* Handle load from literal pool. */
11751 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11752 record_buf[0] = reg_src1;
11753 thumb_insn_r->reg_rec_count = 1;
11757 /* Special data instructions and branch and exchange */
11758 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11759 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11760 if ((3 == opcode2) && (!opcode3))
11762 /* Branch with exchange. */
11763 record_buf[0] = ARM_PS_REGNUM;
11764 thumb_insn_r->reg_rec_count = 1;
11768 /* Format 8; special data processing insns. */
11769 record_buf[0] = ARM_PS_REGNUM;
11770 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11771 | bits (thumb_insn_r->arm_insn, 0, 2));
11772 thumb_insn_r->reg_rec_count = 2;
11777 /* Format 5; data processing insns. */
11778 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11779 if (bit (thumb_insn_r->arm_insn, 7))
11781 reg_src1 = reg_src1 + 8;
11783 record_buf[0] = ARM_PS_REGNUM;
11784 record_buf[1] = reg_src1;
11785 thumb_insn_r->reg_rec_count = 2;
11788 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11789 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11795 /* Handling opcode 001 insns. */
11798 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11800 struct regcache *reg_cache = thumb_insn_r->regcache;
11801 uint32_t record_buf[8], record_buf_mem[8];
11803 uint32_t reg_src1 = 0;
11804 uint32_t opcode = 0, immed_5 = 0;
11806 ULONGEST u_regval = 0;
11808 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11813 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11814 record_buf[0] = reg_src1;
11815 thumb_insn_r->reg_rec_count = 1;
11820 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11821 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11822 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11823 record_buf_mem[0] = 4;
11824 record_buf_mem[1] = u_regval + (immed_5 * 4);
11825 thumb_insn_r->mem_rec_count = 1;
11828 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11829 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11835 /* Handling opcode 100 insns. */
11838 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11840 struct regcache *reg_cache = thumb_insn_r->regcache;
11841 uint32_t record_buf[8], record_buf_mem[8];
11843 uint32_t reg_src1 = 0;
11844 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11846 ULONGEST u_regval = 0;
11848 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11853 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11854 record_buf[0] = reg_src1;
11855 thumb_insn_r->reg_rec_count = 1;
11857 else if (1 == opcode)
11860 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11861 record_buf[0] = reg_src1;
11862 thumb_insn_r->reg_rec_count = 1;
11864 else if (2 == opcode)
11867 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11868 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11869 record_buf_mem[0] = 4;
11870 record_buf_mem[1] = u_regval + (immed_8 * 4);
11871 thumb_insn_r->mem_rec_count = 1;
11873 else if (0 == opcode)
11876 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11877 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11878 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11879 record_buf_mem[0] = 2;
11880 record_buf_mem[1] = u_regval + (immed_5 * 2);
11881 thumb_insn_r->mem_rec_count = 1;
11884 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11885 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11891 /* Handling opcode 101 insns. */
11894 thumb_record_misc (insn_decode_record *thumb_insn_r)
11896 struct regcache *reg_cache = thumb_insn_r->regcache;
11898 uint32_t opcode = 0;
11899 uint32_t register_bits = 0, register_count = 0;
11900 uint32_t index = 0, start_address = 0;
11901 uint32_t record_buf[24], record_buf_mem[48];
11904 ULONGEST u_regval = 0;
11906 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11908 if (opcode == 0 || opcode == 1)
11910 /* ADR and ADD (SP plus immediate) */
11912 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11913 record_buf[0] = reg_src1;
11914 thumb_insn_r->reg_rec_count = 1;
11918 /* Miscellaneous 16-bit instructions */
11919 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11924 /* SETEND and CPS */
11927 /* ADD/SUB (SP plus immediate) */
11928 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11929 record_buf[0] = ARM_SP_REGNUM;
11930 thumb_insn_r->reg_rec_count = 1;
11932 case 1: /* fall through */
11933 case 3: /* fall through */
11934 case 9: /* fall through */
11939 /* SXTH, SXTB, UXTH, UXTB */
11940 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11941 thumb_insn_r->reg_rec_count = 1;
11943 case 4: /* fall through */
11946 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11947 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11948 while (register_bits)
11950 if (register_bits & 0x00000001)
11952 register_bits = register_bits >> 1;
11954 start_address = u_regval - \
11955 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11956 thumb_insn_r->mem_rec_count = register_count;
11957 while (register_count)
11959 record_buf_mem[(register_count * 2) - 1] = start_address;
11960 record_buf_mem[(register_count * 2) - 2] = 4;
11961 start_address = start_address + 4;
11964 record_buf[0] = ARM_SP_REGNUM;
11965 thumb_insn_r->reg_rec_count = 1;
11968 /* REV, REV16, REVSH */
11969 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11970 thumb_insn_r->reg_rec_count = 1;
11972 case 12: /* fall through */
11975 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11976 while (register_bits)
11978 if (register_bits & 0x00000001)
11979 record_buf[index++] = register_count;
11980 register_bits = register_bits >> 1;
11983 record_buf[index++] = ARM_PS_REGNUM;
11984 record_buf[index++] = ARM_SP_REGNUM;
11985 thumb_insn_r->reg_rec_count = index;
11989 /* Handle enhanced software breakpoint insn, BKPT. */
11990 /* CPSR is changed to be executed in ARM state, disabling normal
11991 interrupts, entering abort mode. */
11992 /* According to high vector configuration PC is set. */
11993 /* User hits breakpoint and type reverse, in that case, we need to go back with
11994 previous CPSR and Program Counter. */
11995 record_buf[0] = ARM_PS_REGNUM;
11996 record_buf[1] = ARM_LR_REGNUM;
11997 thumb_insn_r->reg_rec_count = 2;
11998 /* We need to save SPSR value, which is not yet done. */
11999 printf_unfiltered (_("Process record does not support instruction "
12000 "0x%0x at address %s.\n"),
12001 thumb_insn_r->arm_insn,
12002 paddress (thumb_insn_r->gdbarch,
12003 thumb_insn_r->this_addr));
12007 /* If-Then, and hints */
12014 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12015 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12021 /* Handling opcode 110 insns. */
12024 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12026 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12027 struct regcache *reg_cache = thumb_insn_r->regcache;
12029 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12030 uint32_t reg_src1 = 0;
12031 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12032 uint32_t index = 0, start_address = 0;
12033 uint32_t record_buf[24], record_buf_mem[48];
12035 ULONGEST u_regval = 0;
12037 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12038 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12044 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12046 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12047 while (register_bits)
12049 if (register_bits & 0x00000001)
12050 record_buf[index++] = register_count;
12051 register_bits = register_bits >> 1;
12054 record_buf[index++] = reg_src1;
12055 thumb_insn_r->reg_rec_count = index;
12057 else if (0 == opcode2)
12059 /* It handles both STMIA. */
12060 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12062 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12063 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12064 while (register_bits)
12066 if (register_bits & 0x00000001)
12068 register_bits = register_bits >> 1;
12070 start_address = u_regval;
12071 thumb_insn_r->mem_rec_count = register_count;
12072 while (register_count)
12074 record_buf_mem[(register_count * 2) - 1] = start_address;
12075 record_buf_mem[(register_count * 2) - 2] = 4;
12076 start_address = start_address + 4;
12080 else if (0x1F == opcode1)
12082 /* Handle arm syscall insn. */
12083 if (tdep->arm_syscall_record != NULL)
12085 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12086 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12090 printf_unfiltered (_("no syscall record support\n"));
12095 /* B (1), conditional branch is automatically taken care in process_record,
12096 as PC is saved there. */
12098 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12099 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12105 /* Handling opcode 111 insns. */
12108 thumb_record_branch (insn_decode_record *thumb_insn_r)
12110 uint32_t record_buf[8];
12111 uint32_t bits_h = 0;
12113 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12115 if (2 == bits_h || 3 == bits_h)
12118 record_buf[0] = ARM_LR_REGNUM;
12119 thumb_insn_r->reg_rec_count = 1;
12121 else if (1 == bits_h)
12124 record_buf[0] = ARM_PS_REGNUM;
12125 record_buf[1] = ARM_LR_REGNUM;
12126 thumb_insn_r->reg_rec_count = 2;
12129 /* B(2) is automatically taken care in process_record, as PC is
12132 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12137 /* Handler for thumb2 load/store multiple instructions. */
12140 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12142 struct regcache *reg_cache = thumb2_insn_r->regcache;
12144 uint32_t reg_rn, op;
12145 uint32_t register_bits = 0, register_count = 0;
12146 uint32_t index = 0, start_address = 0;
12147 uint32_t record_buf[24], record_buf_mem[48];
12149 ULONGEST u_regval = 0;
12151 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12152 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12154 if (0 == op || 3 == op)
12156 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12158 /* Handle RFE instruction. */
12159 record_buf[0] = ARM_PS_REGNUM;
12160 thumb2_insn_r->reg_rec_count = 1;
12164 /* Handle SRS instruction after reading banked SP. */
12165 return arm_record_unsupported_insn (thumb2_insn_r);
12168 else if (1 == op || 2 == op)
12170 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12172 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12173 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12174 while (register_bits)
12176 if (register_bits & 0x00000001)
12177 record_buf[index++] = register_count;
12180 register_bits = register_bits >> 1;
12182 record_buf[index++] = reg_rn;
12183 record_buf[index++] = ARM_PS_REGNUM;
12184 thumb2_insn_r->reg_rec_count = index;
12188 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12189 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12190 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12191 while (register_bits)
12193 if (register_bits & 0x00000001)
12196 register_bits = register_bits >> 1;
12201 /* Start address calculation for LDMDB/LDMEA. */
12202 start_address = u_regval;
12206 /* Start address calculation for LDMDB/LDMEA. */
12207 start_address = u_regval - register_count * 4;
12210 thumb2_insn_r->mem_rec_count = register_count;
12211 while (register_count)
12213 record_buf_mem[register_count * 2 - 1] = start_address;
12214 record_buf_mem[register_count * 2 - 2] = 4;
12215 start_address = start_address + 4;
12218 record_buf[0] = reg_rn;
12219 record_buf[1] = ARM_PS_REGNUM;
12220 thumb2_insn_r->reg_rec_count = 2;
12224 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12226 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12228 return ARM_RECORD_SUCCESS;
12231 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12235 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12237 struct regcache *reg_cache = thumb2_insn_r->regcache;
12239 uint32_t reg_rd, reg_rn, offset_imm;
12240 uint32_t reg_dest1, reg_dest2;
12241 uint32_t address, offset_addr;
12242 uint32_t record_buf[8], record_buf_mem[8];
12243 uint32_t op1, op2, op3;
12245 ULONGEST u_regval[2];
12247 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12248 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12249 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12251 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12253 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12255 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12256 record_buf[0] = reg_dest1;
12257 record_buf[1] = ARM_PS_REGNUM;
12258 thumb2_insn_r->reg_rec_count = 2;
12261 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12263 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12264 record_buf[2] = reg_dest2;
12265 thumb2_insn_r->reg_rec_count = 3;
12270 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12271 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12273 if (0 == op1 && 0 == op2)
12275 /* Handle STREX. */
12276 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12277 address = u_regval[0] + (offset_imm * 4);
12278 record_buf_mem[0] = 4;
12279 record_buf_mem[1] = address;
12280 thumb2_insn_r->mem_rec_count = 1;
12281 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12282 record_buf[0] = reg_rd;
12283 thumb2_insn_r->reg_rec_count = 1;
12285 else if (1 == op1 && 0 == op2)
12287 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12288 record_buf[0] = reg_rd;
12289 thumb2_insn_r->reg_rec_count = 1;
12290 address = u_regval[0];
12291 record_buf_mem[1] = address;
12295 /* Handle STREXB. */
12296 record_buf_mem[0] = 1;
12297 thumb2_insn_r->mem_rec_count = 1;
12301 /* Handle STREXH. */
12302 record_buf_mem[0] = 2 ;
12303 thumb2_insn_r->mem_rec_count = 1;
12307 /* Handle STREXD. */
12308 address = u_regval[0];
12309 record_buf_mem[0] = 4;
12310 record_buf_mem[2] = 4;
12311 record_buf_mem[3] = address + 4;
12312 thumb2_insn_r->mem_rec_count = 2;
12317 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12319 if (bit (thumb2_insn_r->arm_insn, 24))
12321 if (bit (thumb2_insn_r->arm_insn, 23))
12322 offset_addr = u_regval[0] + (offset_imm * 4);
12324 offset_addr = u_regval[0] - (offset_imm * 4);
12326 address = offset_addr;
12329 address = u_regval[0];
12331 record_buf_mem[0] = 4;
12332 record_buf_mem[1] = address;
12333 record_buf_mem[2] = 4;
12334 record_buf_mem[3] = address + 4;
12335 thumb2_insn_r->mem_rec_count = 2;
12336 record_buf[0] = reg_rn;
12337 thumb2_insn_r->reg_rec_count = 1;
12341 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12343 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12345 return ARM_RECORD_SUCCESS;
12348 /* Handler for thumb2 data processing (shift register and modified immediate)
12352 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12354 uint32_t reg_rd, op;
12355 uint32_t record_buf[8];
12357 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12360 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12362 record_buf[0] = ARM_PS_REGNUM;
12363 thumb2_insn_r->reg_rec_count = 1;
12367 record_buf[0] = reg_rd;
12368 record_buf[1] = ARM_PS_REGNUM;
12369 thumb2_insn_r->reg_rec_count = 2;
12372 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12374 return ARM_RECORD_SUCCESS;
12377 /* Generic handler for thumb2 instructions which effect destination and PS
12381 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12384 uint32_t record_buf[8];
12386 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12388 record_buf[0] = reg_rd;
12389 record_buf[1] = ARM_PS_REGNUM;
12390 thumb2_insn_r->reg_rec_count = 2;
12392 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12394 return ARM_RECORD_SUCCESS;
12397 /* Handler for thumb2 branch and miscellaneous control instructions. */
12400 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12402 uint32_t op, op1, op2;
12403 uint32_t record_buf[8];
12405 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12406 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12407 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12409 /* Handle MSR insn. */
12410 if (!(op1 & 0x2) && 0x38 == op)
12414 /* CPSR is going to be changed. */
12415 record_buf[0] = ARM_PS_REGNUM;
12416 thumb2_insn_r->reg_rec_count = 1;
12420 arm_record_unsupported_insn(thumb2_insn_r);
12424 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12427 record_buf[0] = ARM_PS_REGNUM;
12428 record_buf[1] = ARM_LR_REGNUM;
12429 thumb2_insn_r->reg_rec_count = 2;
12432 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12434 return ARM_RECORD_SUCCESS;
12437 /* Handler for thumb2 store single data item instructions. */
12440 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12442 struct regcache *reg_cache = thumb2_insn_r->regcache;
12444 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12445 uint32_t address, offset_addr;
12446 uint32_t record_buf[8], record_buf_mem[8];
12449 ULONGEST u_regval[2];
12451 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12452 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12453 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12454 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12456 if (bit (thumb2_insn_r->arm_insn, 23))
12459 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12460 offset_addr = u_regval[0] + offset_imm;
12461 address = offset_addr;
12466 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12468 /* Handle STRB (register). */
12469 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12470 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12471 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12472 offset_addr = u_regval[1] << shift_imm;
12473 address = u_regval[0] + offset_addr;
12477 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12478 if (bit (thumb2_insn_r->arm_insn, 10))
12480 if (bit (thumb2_insn_r->arm_insn, 9))
12481 offset_addr = u_regval[0] + offset_imm;
12483 offset_addr = u_regval[0] - offset_imm;
12485 address = offset_addr;
12488 address = u_regval[0];
12494 /* Store byte instructions. */
12497 record_buf_mem[0] = 1;
12499 /* Store half word instructions. */
12502 record_buf_mem[0] = 2;
12504 /* Store word instructions. */
12507 record_buf_mem[0] = 4;
12511 gdb_assert_not_reached ("no decoding pattern found");
12515 record_buf_mem[1] = address;
12516 thumb2_insn_r->mem_rec_count = 1;
12517 record_buf[0] = reg_rn;
12518 thumb2_insn_r->reg_rec_count = 1;
12520 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12522 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12524 return ARM_RECORD_SUCCESS;
12527 /* Handler for thumb2 load memory hints instructions. */
12530 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12532 uint32_t record_buf[8];
12533 uint32_t reg_rt, reg_rn;
12535 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12536 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12538 if (ARM_PC_REGNUM != reg_rt)
12540 record_buf[0] = reg_rt;
12541 record_buf[1] = reg_rn;
12542 record_buf[2] = ARM_PS_REGNUM;
12543 thumb2_insn_r->reg_rec_count = 3;
12545 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12547 return ARM_RECORD_SUCCESS;
12550 return ARM_RECORD_FAILURE;
12553 /* Handler for thumb2 load word instructions. */
12556 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12558 uint32_t record_buf[8];
12560 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12561 record_buf[1] = ARM_PS_REGNUM;
12562 thumb2_insn_r->reg_rec_count = 2;
12564 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12566 return ARM_RECORD_SUCCESS;
12569 /* Handler for thumb2 long multiply, long multiply accumulate, and
12570 divide instructions. */
12573 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12575 uint32_t opcode1 = 0, opcode2 = 0;
12576 uint32_t record_buf[8];
12578 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12579 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12581 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12583 /* Handle SMULL, UMULL, SMULAL. */
12584 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12585 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12586 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12587 record_buf[2] = ARM_PS_REGNUM;
12588 thumb2_insn_r->reg_rec_count = 3;
12590 else if (1 == opcode1 || 3 == opcode2)
12592 /* Handle SDIV and UDIV. */
12593 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12594 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12595 record_buf[2] = ARM_PS_REGNUM;
12596 thumb2_insn_r->reg_rec_count = 3;
12599 return ARM_RECORD_FAILURE;
12601 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12603 return ARM_RECORD_SUCCESS;
12606 /* Record handler for thumb32 coprocessor instructions. */
12609 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12611 if (bit (thumb2_insn_r->arm_insn, 25))
12612 return arm_record_coproc_data_proc (thumb2_insn_r);
12614 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12617 /* Record handler for advance SIMD structure load/store instructions. */
12620 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12622 struct regcache *reg_cache = thumb2_insn_r->regcache;
12623 uint32_t l_bit, a_bit, b_bits;
12624 uint32_t record_buf[128], record_buf_mem[128];
12625 uint32_t reg_rn, reg_vd, address, f_elem;
12626 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12629 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12630 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12631 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12632 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12633 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12634 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12635 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12636 f_elem = 8 / f_ebytes;
12640 ULONGEST u_regval = 0;
12641 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12642 address = u_regval;
12647 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12649 if (b_bits == 0x07)
12651 else if (b_bits == 0x0a)
12653 else if (b_bits == 0x06)
12655 else if (b_bits == 0x02)
12660 for (index_r = 0; index_r < bf_regs; index_r++)
12662 for (index_e = 0; index_e < f_elem; index_e++)
12664 record_buf_mem[index_m++] = f_ebytes;
12665 record_buf_mem[index_m++] = address;
12666 address = address + f_ebytes;
12667 thumb2_insn_r->mem_rec_count += 1;
12672 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12674 if (b_bits == 0x09 || b_bits == 0x08)
12676 else if (b_bits == 0x03)
12681 for (index_r = 0; index_r < bf_regs; index_r++)
12682 for (index_e = 0; index_e < f_elem; index_e++)
12684 for (loop_t = 0; loop_t < 2; loop_t++)
12686 record_buf_mem[index_m++] = f_ebytes;
12687 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12688 thumb2_insn_r->mem_rec_count += 1;
12690 address = address + (2 * f_ebytes);
12694 else if ((b_bits & 0x0e) == 0x04)
12696 for (index_e = 0; index_e < f_elem; index_e++)
12698 for (loop_t = 0; loop_t < 3; loop_t++)
12700 record_buf_mem[index_m++] = f_ebytes;
12701 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12702 thumb2_insn_r->mem_rec_count += 1;
12704 address = address + (3 * f_ebytes);
12708 else if (!(b_bits & 0x0e))
12710 for (index_e = 0; index_e < f_elem; index_e++)
12712 for (loop_t = 0; loop_t < 4; loop_t++)
12714 record_buf_mem[index_m++] = f_ebytes;
12715 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12716 thumb2_insn_r->mem_rec_count += 1;
12718 address = address + (4 * f_ebytes);
12724 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12726 if (bft_size == 0x00)
12728 else if (bft_size == 0x01)
12730 else if (bft_size == 0x02)
12736 if (!(b_bits & 0x0b) || b_bits == 0x08)
12737 thumb2_insn_r->mem_rec_count = 1;
12739 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12740 thumb2_insn_r->mem_rec_count = 2;
12742 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12743 thumb2_insn_r->mem_rec_count = 3;
12745 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12746 thumb2_insn_r->mem_rec_count = 4;
12748 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12750 record_buf_mem[index_m] = f_ebytes;
12751 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12760 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12761 thumb2_insn_r->reg_rec_count = 1;
12763 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12764 thumb2_insn_r->reg_rec_count = 2;
12766 else if ((b_bits & 0x0e) == 0x04)
12767 thumb2_insn_r->reg_rec_count = 3;
12769 else if (!(b_bits & 0x0e))
12770 thumb2_insn_r->reg_rec_count = 4;
12775 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12776 thumb2_insn_r->reg_rec_count = 1;
12778 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12779 thumb2_insn_r->reg_rec_count = 2;
12781 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12782 thumb2_insn_r->reg_rec_count = 3;
12784 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12785 thumb2_insn_r->reg_rec_count = 4;
12787 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12788 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12792 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12794 record_buf[index_r] = reg_rn;
12795 thumb2_insn_r->reg_rec_count += 1;
12798 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12800 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12805 /* Decodes thumb2 instruction type and invokes its record handler. */
12807 static unsigned int
12808 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12810 uint32_t op, op1, op2;
12812 op = bit (thumb2_insn_r->arm_insn, 15);
12813 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12814 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12818 if (!(op2 & 0x64 ))
12820 /* Load/store multiple instruction. */
12821 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12823 else if ((op2 & 0x64) == 0x4)
12825 /* Load/store (dual/exclusive) and table branch instruction. */
12826 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12828 else if ((op2 & 0x60) == 0x20)
12830 /* Data-processing (shifted register). */
12831 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12833 else if (op2 & 0x40)
12835 /* Co-processor instructions. */
12836 return thumb2_record_coproc_insn (thumb2_insn_r);
12839 else if (op1 == 0x02)
12843 /* Branches and miscellaneous control instructions. */
12844 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12846 else if (op2 & 0x20)
12848 /* Data-processing (plain binary immediate) instruction. */
12849 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12853 /* Data-processing (modified immediate). */
12854 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12857 else if (op1 == 0x03)
12859 if (!(op2 & 0x71 ))
12861 /* Store single data item. */
12862 return thumb2_record_str_single_data (thumb2_insn_r);
12864 else if (!((op2 & 0x71) ^ 0x10))
12866 /* Advanced SIMD or structure load/store instructions. */
12867 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12869 else if (!((op2 & 0x67) ^ 0x01))
12871 /* Load byte, memory hints instruction. */
12872 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12874 else if (!((op2 & 0x67) ^ 0x03))
12876 /* Load halfword, memory hints instruction. */
12877 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12879 else if (!((op2 & 0x67) ^ 0x05))
12881 /* Load word instruction. */
12882 return thumb2_record_ld_word (thumb2_insn_r);
12884 else if (!((op2 & 0x70) ^ 0x20))
12886 /* Data-processing (register) instruction. */
12887 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12889 else if (!((op2 & 0x78) ^ 0x30))
12891 /* Multiply, multiply accumulate, abs diff instruction. */
12892 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12894 else if (!((op2 & 0x78) ^ 0x38))
12896 /* Long multiply, long multiply accumulate, and divide. */
12897 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12899 else if (op2 & 0x40)
12901 /* Co-processor instructions. */
12902 return thumb2_record_coproc_insn (thumb2_insn_r);
12910 /* Abstract memory reader. */
12912 class abstract_memory_reader
12915 /* Read LEN bytes of target memory at address MEMADDR, placing the
12916 results in GDB's memory at BUF. Return true on success. */
12918 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12921 /* Instruction reader from real target. */
12923 class instruction_reader : public abstract_memory_reader
12926 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12928 if (target_read_memory (memaddr, buf, len))
12937 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12938 and positive val on fauilure. */
12941 extract_arm_insn (abstract_memory_reader& reader,
12942 insn_decode_record *insn_record, uint32_t insn_size)
12944 gdb_byte buf[insn_size];
12946 memset (&buf[0], 0, insn_size);
12948 if (!reader.read (insn_record->this_addr, buf, insn_size))
12950 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12952 gdbarch_byte_order_for_code (insn_record->gdbarch));
12956 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12958 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12962 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12963 record_type_t record_type, uint32_t insn_size)
12966 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12968 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12970 arm_record_data_proc_misc_ld_str, /* 000. */
12971 arm_record_data_proc_imm, /* 001. */
12972 arm_record_ld_st_imm_offset, /* 010. */
12973 arm_record_ld_st_reg_offset, /* 011. */
12974 arm_record_ld_st_multiple, /* 100. */
12975 arm_record_b_bl, /* 101. */
12976 arm_record_asimd_vfp_coproc, /* 110. */
12977 arm_record_coproc_data_proc /* 111. */
12980 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12982 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12984 thumb_record_shift_add_sub, /* 000. */
12985 thumb_record_add_sub_cmp_mov, /* 001. */
12986 thumb_record_ld_st_reg_offset, /* 010. */
12987 thumb_record_ld_st_imm_offset, /* 011. */
12988 thumb_record_ld_st_stack, /* 100. */
12989 thumb_record_misc, /* 101. */
12990 thumb_record_ldm_stm_swi, /* 110. */
12991 thumb_record_branch /* 111. */
12994 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12995 uint32_t insn_id = 0;
12997 if (extract_arm_insn (reader, arm_record, insn_size))
13001 printf_unfiltered (_("Process record: error reading memory at "
13002 "addr %s len = %d.\n"),
13003 paddress (arm_record->gdbarch,
13004 arm_record->this_addr), insn_size);
13008 else if (ARM_RECORD == record_type)
13010 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13011 insn_id = bits (arm_record->arm_insn, 25, 27);
13013 if (arm_record->cond == 0xf)
13014 ret = arm_record_extension_space (arm_record);
13017 /* If this insn has fallen into extension space
13018 then we need not decode it anymore. */
13019 ret = arm_handle_insn[insn_id] (arm_record);
13021 if (ret != ARM_RECORD_SUCCESS)
13023 arm_record_unsupported_insn (arm_record);
13027 else if (THUMB_RECORD == record_type)
13029 /* As thumb does not have condition codes, we set negative. */
13030 arm_record->cond = -1;
13031 insn_id = bits (arm_record->arm_insn, 13, 15);
13032 ret = thumb_handle_insn[insn_id] (arm_record);
13033 if (ret != ARM_RECORD_SUCCESS)
13035 arm_record_unsupported_insn (arm_record);
13039 else if (THUMB2_RECORD == record_type)
13041 /* As thumb does not have condition codes, we set negative. */
13042 arm_record->cond = -1;
13044 /* Swap first half of 32bit thumb instruction with second half. */
13045 arm_record->arm_insn
13046 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13048 ret = thumb2_record_decode_insn_handler (arm_record);
13050 if (ret != ARM_RECORD_SUCCESS)
13052 arm_record_unsupported_insn (arm_record);
13058 /* Throw assertion. */
13059 gdb_assert_not_reached ("not a valid instruction, could not decode");
13066 namespace selftests {
13068 /* Provide both 16-bit and 32-bit thumb instructions. */
13070 class instruction_reader_thumb : public abstract_memory_reader
13073 template<size_t SIZE>
13074 instruction_reader_thumb (enum bfd_endian endian,
13075 const uint16_t (&insns)[SIZE])
13076 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13079 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13081 SELF_CHECK (len == 4 || len == 2);
13082 SELF_CHECK (memaddr % 2 == 0);
13083 SELF_CHECK ((memaddr / 2) < m_insns_size);
13085 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13088 store_unsigned_integer (&buf[2], 2, m_endian,
13089 m_insns[memaddr / 2 + 1]);
13095 enum bfd_endian m_endian;
13096 const uint16_t *m_insns;
13097 size_t m_insns_size;
13101 arm_record_test (void)
13103 struct gdbarch_info info;
13104 gdbarch_info_init (&info);
13105 info.bfd_arch_info = bfd_scan_arch ("arm");
13107 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13109 SELF_CHECK (gdbarch != NULL);
13111 /* 16-bit Thumb instructions. */
13113 insn_decode_record arm_record;
13115 memset (&arm_record, 0, sizeof (insn_decode_record));
13116 arm_record.gdbarch = gdbarch;
13118 static const uint16_t insns[] = {
13119 /* db b2 uxtb r3, r3 */
13121 /* cd 58 ldr r5, [r1, r3] */
13125 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13126 instruction_reader_thumb reader (endian, insns);
13127 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13128 THUMB_INSN_SIZE_BYTES);
13130 SELF_CHECK (ret == 0);
13131 SELF_CHECK (arm_record.mem_rec_count == 0);
13132 SELF_CHECK (arm_record.reg_rec_count == 1);
13133 SELF_CHECK (arm_record.arm_regs[0] == 3);
13135 arm_record.this_addr += 2;
13136 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13137 THUMB_INSN_SIZE_BYTES);
13139 SELF_CHECK (ret == 0);
13140 SELF_CHECK (arm_record.mem_rec_count == 0);
13141 SELF_CHECK (arm_record.reg_rec_count == 1);
13142 SELF_CHECK (arm_record.arm_regs[0] == 5);
13145 /* 32-bit Thumb-2 instructions. */
13147 insn_decode_record arm_record;
13149 memset (&arm_record, 0, sizeof (insn_decode_record));
13150 arm_record.gdbarch = gdbarch;
13152 static const uint16_t insns[] = {
13153 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13157 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13158 instruction_reader_thumb reader (endian, insns);
13159 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13160 THUMB2_INSN_SIZE_BYTES);
13162 SELF_CHECK (ret == 0);
13163 SELF_CHECK (arm_record.mem_rec_count == 0);
13164 SELF_CHECK (arm_record.reg_rec_count == 1);
13165 SELF_CHECK (arm_record.arm_regs[0] == 7);
13168 } // namespace selftests
13169 #endif /* GDB_SELF_TEST */
13171 /* Cleans up local record registers and memory allocations. */
13174 deallocate_reg_mem (insn_decode_record *record)
13176 xfree (record->arm_regs);
13177 xfree (record->arm_mems);
13181 /* Parse the current instruction and record the values of the registers and
13182 memory that will be changed in current instruction to record_arch_list".
13183 Return -1 if something is wrong. */
13186 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13187 CORE_ADDR insn_addr)
13190 uint32_t no_of_rec = 0;
13191 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13192 ULONGEST t_bit = 0, insn_id = 0;
13194 ULONGEST u_regval = 0;
13196 insn_decode_record arm_record;
13198 memset (&arm_record, 0, sizeof (insn_decode_record));
13199 arm_record.regcache = regcache;
13200 arm_record.this_addr = insn_addr;
13201 arm_record.gdbarch = gdbarch;
13204 if (record_debug > 1)
13206 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13208 paddress (gdbarch, arm_record.this_addr));
13211 instruction_reader reader;
13212 if (extract_arm_insn (reader, &arm_record, 2))
13216 printf_unfiltered (_("Process record: error reading memory at "
13217 "addr %s len = %d.\n"),
13218 paddress (arm_record.gdbarch,
13219 arm_record.this_addr), 2);
13224 /* Check the insn, whether it is thumb or arm one. */
13226 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13227 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13230 if (!(u_regval & t_bit))
13232 /* We are decoding arm insn. */
13233 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13237 insn_id = bits (arm_record.arm_insn, 11, 15);
13238 /* is it thumb2 insn? */
13239 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13241 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13242 THUMB2_INSN_SIZE_BYTES);
13246 /* We are decoding thumb insn. */
13247 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13248 THUMB_INSN_SIZE_BYTES);
13254 /* Record registers. */
13255 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13256 if (arm_record.arm_regs)
13258 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13260 if (record_full_arch_list_add_reg
13261 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13265 /* Record memories. */
13266 if (arm_record.arm_mems)
13268 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13270 if (record_full_arch_list_add_mem
13271 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13272 arm_record.arm_mems[no_of_rec].len))
13277 if (record_full_arch_list_add_end ())
13282 deallocate_reg_mem (&arm_record);