1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
58 #include "gdbsupport/vec.h"
61 #include "record-full.h"
65 #include "gdbsupport/selftest.h"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 struct arm_mapping_symbol
88 bool operator< (const arm_mapping_symbol &other) const
89 { return this->value < other.value; }
92 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
94 struct arm_per_objfile
96 explicit arm_per_objfile (size_t num_sections)
97 : section_maps (new arm_mapping_symbol_vec[num_sections]),
98 section_maps_sorted (new bool[num_sections] ())
101 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
103 /* Information about mapping symbols ($a, $d, $t) in the objfile.
105 The format is an array of vectors of arm_mapping_symbols, there is one
106 vector for each section of the objfile (the array is index by BFD section
109 For each section, the vector of arm_mapping_symbol is sorted by
110 symbol value (address). */
111 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
113 /* For each corresponding element of section_maps above, is this vector
115 std::unique_ptr<bool[]> section_maps_sorted;
118 /* Per-objfile data used for mapping symbols. */
119 static objfile_key<arm_per_objfile> arm_objfile_data_key;
121 /* The list of available "set arm ..." and "show arm ..." commands. */
122 static struct cmd_list_element *setarmcmdlist = NULL;
123 static struct cmd_list_element *showarmcmdlist = NULL;
125 /* The type of floating-point to use. Keep this in sync with enum
126 arm_float_model, and the help string in _initialize_arm_tdep. */
127 static const char *const fp_model_strings[] =
137 /* A variable that can be configured by the user. */
138 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
139 static const char *current_fp_model = "auto";
141 /* The ABI to use. Keep this in sync with arm_abi_kind. */
142 static const char *const arm_abi_strings[] =
150 /* A variable that can be configured by the user. */
151 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
152 static const char *arm_abi_string = "auto";
154 /* The execution mode to assume. */
155 static const char *const arm_mode_strings[] =
163 static const char *arm_fallback_mode_string = "auto";
164 static const char *arm_force_mode_string = "auto";
166 /* The standard register names, and all the valid aliases for them. Note
167 that `fp', `sp' and `pc' are not added in this alias list, because they
168 have been added as builtin user registers in
169 std-regs.c:_initialize_frame_reg. */
174 } arm_register_aliases[] = {
175 /* Basic register numbers. */
192 /* Synonyms (argument and variable registers). */
205 /* Other platform-specific names for r9. */
211 /* Names used by GCC (not listed in the ARM EABI). */
213 /* A special name from the older ATPCS. */
217 static const char *const arm_register_names[] =
218 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
219 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
220 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
221 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
222 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
223 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
224 "fps", "cpsr" }; /* 24 25 */
226 /* Holds the current set of options to be passed to the disassembler. */
227 static char *arm_disassembler_options;
229 /* Valid register name styles. */
230 static const char **valid_disassembly_styles;
232 /* Disassembly style to use. Default to "std" register names. */
233 static const char *disassembly_style;
235 /* All possible arm target descriptors. */
236 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
237 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
239 /* This is used to keep the bfd arch_info in sync with the disassembly
241 static void set_disassembly_style_sfunc (const char *, int,
242 struct cmd_list_element *);
243 static void show_disassembly_style_sfunc (struct ui_file *, int,
244 struct cmd_list_element *,
247 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
248 readable_regcache *regcache,
249 int regnum, gdb_byte *buf);
250 static void arm_neon_quad_write (struct gdbarch *gdbarch,
251 struct regcache *regcache,
252 int regnum, const gdb_byte *buf);
255 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
258 /* get_next_pcs operations. */
259 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
260 arm_get_next_pcs_read_memory_unsigned_integer,
261 arm_get_next_pcs_syscall_next_pc,
262 arm_get_next_pcs_addr_bits_remove,
263 arm_get_next_pcs_is_thumb,
267 struct arm_prologue_cache
269 /* The stack pointer at the time this frame was created; i.e. the
270 caller's stack pointer when this function was called. It is used
271 to identify this frame. */
274 /* The frame base for this frame is just prev_sp - frame size.
275 FRAMESIZE is the distance from the frame pointer to the
276 initial stack pointer. */
280 /* The register used to hold the frame pointer for this frame. */
283 /* Saved register offsets. */
284 struct trad_frame_saved_reg *saved_regs;
287 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
288 CORE_ADDR prologue_start,
289 CORE_ADDR prologue_end,
290 struct arm_prologue_cache *cache);
292 /* Architecture version for displaced stepping. This effects the behaviour of
293 certain instructions, and really should not be hard-wired. */
295 #define DISPLACED_STEPPING_ARCH_VERSION 5
297 /* Set to true if the 32-bit mode is in use. */
301 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
304 arm_psr_thumb_bit (struct gdbarch *gdbarch)
306 if (gdbarch_tdep (gdbarch)->is_m)
312 /* Determine if the processor is currently executing in Thumb mode. */
315 arm_is_thumb (struct regcache *regcache)
318 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
320 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
322 return (cpsr & t_bit) != 0;
325 /* Determine if FRAME is executing in Thumb mode. */
328 arm_frame_is_thumb (struct frame_info *frame)
331 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
333 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
334 directly (from a signal frame or dummy frame) or by interpreting
335 the saved LR (from a prologue or DWARF frame). So consult it and
336 trust the unwinders. */
337 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
339 return (cpsr & t_bit) != 0;
342 /* Search for the mapping symbol covering MEMADDR. If one is found,
343 return its type. Otherwise, return 0. If START is non-NULL,
344 set *START to the location of the mapping symbol. */
347 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
349 struct obj_section *sec;
351 /* If there are mapping symbols, consult them. */
352 sec = find_pc_section (memaddr);
355 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
358 unsigned int section_idx = sec->the_bfd_section->index;
359 arm_mapping_symbol_vec &map
360 = data->section_maps[section_idx];
362 /* Sort the vector on first use. */
363 if (!data->section_maps_sorted[section_idx])
365 std::sort (map.begin (), map.end ());
366 data->section_maps_sorted[section_idx] = true;
369 struct arm_mapping_symbol map_key
370 = { memaddr - obj_section_addr (sec), 0 };
371 arm_mapping_symbol_vec::const_iterator it
372 = std::lower_bound (map.begin (), map.end (), map_key);
374 /* std::lower_bound finds the earliest ordered insertion
375 point. If the symbol at this position starts at this exact
376 address, we use that; otherwise, the preceding
377 mapping symbol covers this address. */
380 if (it->value == map_key.value)
383 *start = it->value + obj_section_addr (sec);
388 if (it > map.begin ())
390 arm_mapping_symbol_vec::const_iterator prev_it
394 *start = prev_it->value + obj_section_addr (sec);
395 return prev_it->type;
403 /* Determine if the program counter specified in MEMADDR is in a Thumb
404 function. This function should be called for addresses unrelated to
405 any executing frame; otherwise, prefer arm_frame_is_thumb. */
408 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
410 struct bound_minimal_symbol sym;
412 arm_displaced_step_closure *dsc
413 = ((arm_displaced_step_closure * )
414 get_displaced_step_closure_by_addr (memaddr));
416 /* If checking the mode of displaced instruction in copy area, the mode
417 should be determined by instruction on the original address. */
421 fprintf_unfiltered (gdb_stdlog,
422 "displaced: check mode of %.8lx instead of %.8lx\n",
423 (unsigned long) dsc->insn_addr,
424 (unsigned long) memaddr);
425 memaddr = dsc->insn_addr;
428 /* If bit 0 of the address is set, assume this is a Thumb address. */
429 if (IS_THUMB_ADDR (memaddr))
432 /* If the user wants to override the symbol table, let him. */
433 if (strcmp (arm_force_mode_string, "arm") == 0)
435 if (strcmp (arm_force_mode_string, "thumb") == 0)
438 /* ARM v6-M and v7-M are always in Thumb mode. */
439 if (gdbarch_tdep (gdbarch)->is_m)
442 /* If there are mapping symbols, consult them. */
443 type = arm_find_mapping_symbol (memaddr, NULL);
447 /* Thumb functions have a "special" bit set in minimal symbols. */
448 sym = lookup_minimal_symbol_by_pc (memaddr);
450 return (MSYMBOL_IS_SPECIAL (sym.minsym));
452 /* If the user wants to override the fallback mode, let them. */
453 if (strcmp (arm_fallback_mode_string, "arm") == 0)
455 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
458 /* If we couldn't find any symbol, but we're talking to a running
459 target, then trust the current value of $cpsr. This lets
460 "display/i $pc" always show the correct mode (though if there is
461 a symbol table we will not reach here, so it still may not be
462 displayed in the mode it will be executed). */
463 if (target_has_registers)
464 return arm_frame_is_thumb (get_current_frame ());
466 /* Otherwise we're out of luck; we assume ARM. */
470 /* Determine if the address specified equals any of these magic return
471 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
474 From ARMv6-M Reference Manual B1.5.8
475 Table B1-5 Exception return behavior
477 EXC_RETURN Return To Return Stack
478 0xFFFFFFF1 Handler mode Main
479 0xFFFFFFF9 Thread mode Main
480 0xFFFFFFFD Thread mode Process
482 From ARMv7-M Reference Manual B1.5.8
483 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
485 EXC_RETURN Return To Return Stack
486 0xFFFFFFF1 Handler mode Main
487 0xFFFFFFF9 Thread mode Main
488 0xFFFFFFFD Thread mode Process
490 Table B1-9 EXC_RETURN definition of exception return behavior, with
493 EXC_RETURN Return To Return Stack Frame Type
494 0xFFFFFFE1 Handler mode Main Extended
495 0xFFFFFFE9 Thread mode Main Extended
496 0xFFFFFFED Thread mode Process Extended
497 0xFFFFFFF1 Handler mode Main Basic
498 0xFFFFFFF9 Thread mode Main Basic
499 0xFFFFFFFD Thread mode Process Basic
501 For more details see "B1.5.8 Exception return behavior"
502 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
505 arm_m_addr_is_magic (CORE_ADDR addr)
509 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
510 the exception return behavior. */
517 /* Address is magic. */
521 /* Address is not magic. */
526 /* Remove useless bits from addresses in a running program. */
528 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
530 /* On M-profile devices, do not strip the low bit from EXC_RETURN
531 (the magic exception return address). */
532 if (gdbarch_tdep (gdbarch)->is_m
533 && arm_m_addr_is_magic (val))
537 return UNMAKE_THUMB_ADDR (val);
539 return (val & 0x03fffffc);
542 /* Return 1 if PC is the start of a compiler helper function which
543 can be safely ignored during prologue skipping. IS_THUMB is true
544 if the function is known to be a Thumb function due to the way it
547 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
549 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
550 struct bound_minimal_symbol msym;
552 msym = lookup_minimal_symbol_by_pc (pc);
553 if (msym.minsym != NULL
554 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
555 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
557 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
559 /* The GNU linker's Thumb call stub to foo is named
561 if (strstr (name, "_from_thumb") != NULL)
564 /* On soft-float targets, __truncdfsf2 is called to convert promoted
565 arguments to their argument types in non-prototyped
567 if (startswith (name, "__truncdfsf2"))
569 if (startswith (name, "__aeabi_d2f"))
572 /* Internal functions related to thread-local storage. */
573 if (startswith (name, "__tls_get_addr"))
575 if (startswith (name, "__aeabi_read_tp"))
580 /* If we run against a stripped glibc, we may be unable to identify
581 special functions by name. Check for one important case,
582 __aeabi_read_tp, by comparing the *code* against the default
583 implementation (this is hand-written ARM assembler in glibc). */
586 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
587 == 0xe3e00a0f /* mov r0, #0xffff0fff */
588 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
589 == 0xe240f01f) /* sub pc, r0, #31 */
596 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
597 the first 16-bit of instruction, and INSN2 is the second 16-bit of
599 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
600 ((bits ((insn1), 0, 3) << 12) \
601 | (bits ((insn1), 10, 10) << 11) \
602 | (bits ((insn2), 12, 14) << 8) \
603 | bits ((insn2), 0, 7))
605 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
606 the 32-bit instruction. */
607 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
608 ((bits ((insn), 16, 19) << 12) \
609 | bits ((insn), 0, 11))
611 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
614 thumb_expand_immediate (unsigned int imm)
616 unsigned int count = imm >> 7;
624 return (imm & 0xff) | ((imm & 0xff) << 16);
626 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
628 return (imm & 0xff) | ((imm & 0xff) << 8)
629 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
632 return (0x80 | (imm & 0x7f)) << (32 - count);
635 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
636 epilogue, 0 otherwise. */
639 thumb_instruction_restores_sp (unsigned short insn)
641 return (insn == 0x46bd /* mov sp, r7 */
642 || (insn & 0xff80) == 0xb000 /* add sp, imm */
643 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
646 /* Analyze a Thumb prologue, looking for a recognizable stack frame
647 and frame pointer. Scan until we encounter a store that could
648 clobber the stack frame unexpectedly, or an unknown instruction.
649 Return the last address which is definitely safe to skip for an
650 initial breakpoint. */
653 thumb_analyze_prologue (struct gdbarch *gdbarch,
654 CORE_ADDR start, CORE_ADDR limit,
655 struct arm_prologue_cache *cache)
657 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
658 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
662 CORE_ADDR unrecognized_pc = 0;
664 for (i = 0; i < 16; i++)
665 regs[i] = pv_register (i, 0);
666 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
668 while (start < limit)
672 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
674 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
679 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
682 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
683 whether to save LR (R14). */
684 mask = (insn & 0xff) | ((insn & 0x100) << 6);
686 /* Calculate offsets of saved R0-R7 and LR. */
687 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
688 if (mask & (1 << regno))
690 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
692 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
695 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
697 offset = (insn & 0x7f) << 2; /* get scaled offset */
698 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
701 else if (thumb_instruction_restores_sp (insn))
703 /* Don't scan past the epilogue. */
706 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
707 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
709 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
710 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
711 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
713 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
714 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
715 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
717 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
718 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
719 && pv_is_constant (regs[bits (insn, 3, 5)]))
720 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
721 regs[bits (insn, 6, 8)]);
722 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
723 && pv_is_constant (regs[bits (insn, 3, 6)]))
725 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
726 int rm = bits (insn, 3, 6);
727 regs[rd] = pv_add (regs[rd], regs[rm]);
729 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
731 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
732 int src_reg = (insn & 0x78) >> 3;
733 regs[dst_reg] = regs[src_reg];
735 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
737 /* Handle stores to the stack. Normally pushes are used,
738 but with GCC -mtpcs-frame, there may be other stores
739 in the prologue to create the frame. */
740 int regno = (insn >> 8) & 0x7;
743 offset = (insn & 0xff) << 2;
744 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
746 if (stack.store_would_trash (addr))
749 stack.store (addr, 4, regs[regno]);
751 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
753 int rd = bits (insn, 0, 2);
754 int rn = bits (insn, 3, 5);
757 offset = bits (insn, 6, 10) << 2;
758 addr = pv_add_constant (regs[rn], offset);
760 if (stack.store_would_trash (addr))
763 stack.store (addr, 4, regs[rd]);
765 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
766 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
768 /* Ignore stores of argument registers to the stack. */
770 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
771 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
772 /* Ignore block loads from the stack, potentially copying
773 parameters from memory. */
775 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
776 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
777 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
778 /* Similarly ignore single loads from the stack. */
780 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
781 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
782 /* Skip register copies, i.e. saves to another register
783 instead of the stack. */
785 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
786 /* Recognize constant loads; even with small stacks these are necessary
788 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
789 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
791 /* Constant pool loads, for the same reason. */
792 unsigned int constant;
795 loc = start + 4 + bits (insn, 0, 7) * 4;
796 constant = read_memory_unsigned_integer (loc, 4, byte_order);
797 regs[bits (insn, 8, 10)] = pv_constant (constant);
799 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
801 unsigned short inst2;
803 inst2 = read_code_unsigned_integer (start + 2, 2,
804 byte_order_for_code);
806 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
808 /* BL, BLX. Allow some special function calls when
809 skipping the prologue; GCC generates these before
810 storing arguments to the stack. */
812 int j1, j2, imm1, imm2;
814 imm1 = sbits (insn, 0, 10);
815 imm2 = bits (inst2, 0, 10);
816 j1 = bit (inst2, 13);
817 j2 = bit (inst2, 11);
819 offset = ((imm1 << 12) + (imm2 << 1));
820 offset ^= ((!j2) << 22) | ((!j1) << 23);
822 nextpc = start + 4 + offset;
823 /* For BLX make sure to clear the low bits. */
824 if (bit (inst2, 12) == 0)
825 nextpc = nextpc & 0xfffffffc;
827 if (!skip_prologue_function (gdbarch, nextpc,
828 bit (inst2, 12) != 0))
832 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
834 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
836 pv_t addr = regs[bits (insn, 0, 3)];
839 if (stack.store_would_trash (addr))
842 /* Calculate offsets of saved registers. */
843 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
844 if (inst2 & (1 << regno))
846 addr = pv_add_constant (addr, -4);
847 stack.store (addr, 4, regs[regno]);
851 regs[bits (insn, 0, 3)] = addr;
854 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
856 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
858 int regno1 = bits (inst2, 12, 15);
859 int regno2 = bits (inst2, 8, 11);
860 pv_t addr = regs[bits (insn, 0, 3)];
862 offset = inst2 & 0xff;
864 addr = pv_add_constant (addr, offset);
866 addr = pv_add_constant (addr, -offset);
868 if (stack.store_would_trash (addr))
871 stack.store (addr, 4, regs[regno1]);
872 stack.store (pv_add_constant (addr, 4),
876 regs[bits (insn, 0, 3)] = addr;
879 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
880 && (inst2 & 0x0c00) == 0x0c00
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 int regno = bits (inst2, 12, 15);
884 pv_t addr = regs[bits (insn, 0, 3)];
886 offset = inst2 & 0xff;
888 addr = pv_add_constant (addr, offset);
890 addr = pv_add_constant (addr, -offset);
892 if (stack.store_would_trash (addr))
895 stack.store (addr, 4, regs[regno]);
898 regs[bits (insn, 0, 3)] = addr;
901 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
902 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
904 int regno = bits (inst2, 12, 15);
907 offset = inst2 & 0xfff;
908 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
910 if (stack.store_would_trash (addr))
913 stack.store (addr, 4, regs[regno]);
916 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
917 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
918 /* Ignore stores of argument registers to the stack. */
921 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
922 && (inst2 & 0x0d00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore stores of argument registers to the stack. */
927 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
929 && (inst2 & 0x8000) == 0x0000
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore block loads from the stack, potentially copying
932 parameters from memory. */
935 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 /* Similarly ignore dual loads from the stack. */
941 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
942 && (inst2 & 0x0d00) == 0x0c00
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Similarly ignore single loads from the stack. */
947 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
948 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 /* Similarly ignore single loads from the stack. */
952 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
953 && (inst2 & 0x8000) == 0x0000)
955 unsigned int imm = ((bits (insn, 10, 10) << 11)
956 | (bits (inst2, 12, 14) << 8)
957 | bits (inst2, 0, 7));
959 regs[bits (inst2, 8, 11)]
960 = pv_add_constant (regs[bits (insn, 0, 3)],
961 thumb_expand_immediate (imm));
964 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
965 && (inst2 & 0x8000) == 0x0000)
967 unsigned int imm = ((bits (insn, 10, 10) << 11)
968 | (bits (inst2, 12, 14) << 8)
969 | bits (inst2, 0, 7));
971 regs[bits (inst2, 8, 11)]
972 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
975 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
976 && (inst2 & 0x8000) == 0x0000)
978 unsigned int imm = ((bits (insn, 10, 10) << 11)
979 | (bits (inst2, 12, 14) << 8)
980 | bits (inst2, 0, 7));
982 regs[bits (inst2, 8, 11)]
983 = pv_add_constant (regs[bits (insn, 0, 3)],
984 - (CORE_ADDR) thumb_expand_immediate (imm));
987 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
988 && (inst2 & 0x8000) == 0x0000)
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
994 regs[bits (inst2, 8, 11)]
995 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
998 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1004 regs[bits (inst2, 8, 11)]
1005 = pv_constant (thumb_expand_immediate (imm));
1008 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1011 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1013 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1016 else if (insn == 0xea5f /* mov.w Rd,Rm */
1017 && (inst2 & 0xf0f0) == 0)
1019 int dst_reg = (inst2 & 0x0f00) >> 8;
1020 int src_reg = inst2 & 0xf;
1021 regs[dst_reg] = regs[src_reg];
1024 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1026 /* Constant pool loads. */
1027 unsigned int constant;
1030 offset = bits (inst2, 0, 11);
1032 loc = start + 4 + offset;
1034 loc = start + 4 - offset;
1036 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1037 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1040 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1042 /* Constant pool loads. */
1043 unsigned int constant;
1046 offset = bits (inst2, 0, 7) << 2;
1048 loc = start + 4 + offset;
1050 loc = start + 4 - offset;
1052 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1053 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1055 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1056 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1059 else if (thumb2_instruction_changes_pc (insn, inst2))
1061 /* Don't scan past anything that might change control flow. */
1066 /* The optimizer might shove anything into the prologue,
1067 so we just skip what we don't recognize. */
1068 unrecognized_pc = start;
1073 else if (thumb_instruction_changes_pc (insn))
1075 /* Don't scan past anything that might change control flow. */
1080 /* The optimizer might shove anything into the prologue,
1081 so we just skip what we don't recognize. */
1082 unrecognized_pc = start;
1089 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1090 paddress (gdbarch, start));
1092 if (unrecognized_pc == 0)
1093 unrecognized_pc = start;
1096 return unrecognized_pc;
1098 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1100 /* Frame pointer is fp. Frame size is constant. */
1101 cache->framereg = ARM_FP_REGNUM;
1102 cache->framesize = -regs[ARM_FP_REGNUM].k;
1104 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1106 /* Frame pointer is r7. Frame size is constant. */
1107 cache->framereg = THUMB_FP_REGNUM;
1108 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1112 /* Try the stack pointer... this is a bit desperate. */
1113 cache->framereg = ARM_SP_REGNUM;
1114 cache->framesize = -regs[ARM_SP_REGNUM].k;
1117 for (i = 0; i < 16; i++)
1118 if (stack.find_reg (gdbarch, i, &offset))
1119 cache->saved_regs[i].addr = offset;
1121 return unrecognized_pc;
1125 /* Try to analyze the instructions starting from PC, which load symbol
1126 __stack_chk_guard. Return the address of instruction after loading this
1127 symbol, set the dest register number to *BASEREG, and set the size of
1128 instructions for loading symbol in OFFSET. Return 0 if instructions are
1132 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1133 unsigned int *destreg, int *offset)
1135 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1136 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1137 unsigned int low, high, address;
1142 unsigned short insn1
1143 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1145 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1147 *destreg = bits (insn1, 8, 10);
1149 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1150 address = read_memory_unsigned_integer (address, 4,
1151 byte_order_for_code);
1153 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1155 unsigned short insn2
1156 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1158 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1161 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1163 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1165 /* movt Rd, #const */
1166 if ((insn1 & 0xfbc0) == 0xf2c0)
1168 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1169 *destreg = bits (insn2, 8, 11);
1171 address = (high << 16 | low);
1178 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1180 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1182 address = bits (insn, 0, 11) + pc + 8;
1183 address = read_memory_unsigned_integer (address, 4,
1184 byte_order_for_code);
1186 *destreg = bits (insn, 12, 15);
1189 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1191 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1194 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1196 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1198 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1199 *destreg = bits (insn, 12, 15);
1201 address = (high << 16 | low);
1209 /* Try to skip a sequence of instructions used for stack protector. If PC
1210 points to the first instruction of this sequence, return the address of
1211 first instruction after this sequence, otherwise, return original PC.
1213 On arm, this sequence of instructions is composed of mainly three steps,
1214 Step 1: load symbol __stack_chk_guard,
1215 Step 2: load from address of __stack_chk_guard,
1216 Step 3: store it to somewhere else.
1218 Usually, instructions on step 2 and step 3 are the same on various ARM
1219 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1220 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1221 instructions in step 1 vary from different ARM architectures. On ARMv7,
1224 movw Rn, #:lower16:__stack_chk_guard
1225 movt Rn, #:upper16:__stack_chk_guard
1232 .word __stack_chk_guard
1234 Since ldr/str is a very popular instruction, we can't use them as
1235 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1236 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1237 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1240 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1242 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1243 unsigned int basereg;
1244 struct bound_minimal_symbol stack_chk_guard;
1246 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1249 /* Try to parse the instructions in Step 1. */
1250 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1255 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1256 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1257 Otherwise, this sequence cannot be for stack protector. */
1258 if (stack_chk_guard.minsym == NULL
1259 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1264 unsigned int destreg;
1266 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1268 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1269 if ((insn & 0xf800) != 0x6800)
1271 if (bits (insn, 3, 5) != basereg)
1273 destreg = bits (insn, 0, 2);
1275 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1276 byte_order_for_code);
1277 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1278 if ((insn & 0xf800) != 0x6000)
1280 if (destreg != bits (insn, 0, 2))
1285 unsigned int destreg;
1287 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1289 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1290 if ((insn & 0x0e500000) != 0x04100000)
1292 if (bits (insn, 16, 19) != basereg)
1294 destreg = bits (insn, 12, 15);
1295 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1296 insn = read_code_unsigned_integer (pc + offset + 4,
1297 4, byte_order_for_code);
1298 if ((insn & 0x0e500000) != 0x04000000)
1300 if (bits (insn, 12, 15) != destreg)
1303 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1306 return pc + offset + 4;
1308 return pc + offset + 8;
1311 /* Advance the PC across any function entry prologue instructions to
1312 reach some "real" code.
1314 The APCS (ARM Procedure Call Standard) defines the following
1318 [stmfd sp!, {a1,a2,a3,a4}]
1319 stmfd sp!, {...,fp,ip,lr,pc}
1320 [stfe f7, [sp, #-12]!]
1321 [stfe f6, [sp, #-12]!]
1322 [stfe f5, [sp, #-12]!]
1323 [stfe f4, [sp, #-12]!]
1324 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1327 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1329 CORE_ADDR func_addr, limit_pc;
1331 /* See if we can determine the end of the prologue via the symbol table.
1332 If so, then return either PC, or the PC after the prologue, whichever
1334 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1336 CORE_ADDR post_prologue_pc
1337 = skip_prologue_using_sal (gdbarch, func_addr);
1338 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1340 if (post_prologue_pc)
1342 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1345 /* GCC always emits a line note before the prologue and another
1346 one after, even if the two are at the same address or on the
1347 same line. Take advantage of this so that we do not need to
1348 know every instruction that might appear in the prologue. We
1349 will have producer information for most binaries; if it is
1350 missing (e.g. for -gstabs), assuming the GNU tools. */
1351 if (post_prologue_pc
1353 || COMPUNIT_PRODUCER (cust) == NULL
1354 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1355 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1356 return post_prologue_pc;
1358 if (post_prologue_pc != 0)
1360 CORE_ADDR analyzed_limit;
1362 /* For non-GCC compilers, make sure the entire line is an
1363 acceptable prologue; GDB will round this function's
1364 return value up to the end of the following line so we
1365 can not skip just part of a line (and we do not want to).
1367 RealView does not treat the prologue specially, but does
1368 associate prologue code with the opening brace; so this
1369 lets us skip the first line if we think it is the opening
1371 if (arm_pc_is_thumb (gdbarch, func_addr))
1372 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1373 post_prologue_pc, NULL);
1375 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1376 post_prologue_pc, NULL);
1378 if (analyzed_limit != post_prologue_pc)
1381 return post_prologue_pc;
1385 /* Can't determine prologue from the symbol table, need to examine
1388 /* Find an upper limit on the function prologue using the debug
1389 information. If the debug information could not be used to provide
1390 that bound, then use an arbitrary large number as the upper bound. */
1391 /* Like arm_scan_prologue, stop no later than pc + 64. */
1392 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1394 limit_pc = pc + 64; /* Magic. */
1397 /* Check if this is Thumb code. */
1398 if (arm_pc_is_thumb (gdbarch, pc))
1399 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1405 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1406 This function decodes a Thumb function prologue to determine:
1407 1) the size of the stack frame
1408 2) which registers are saved on it
1409 3) the offsets of saved regs
1410 4) the offset from the stack pointer to the frame pointer
1412 A typical Thumb function prologue would create this stack frame
1413 (offsets relative to FP)
1414 old SP -> 24 stack parameters
1417 R7 -> 0 local variables (16 bytes)
1418 SP -> -12 additional stack space (12 bytes)
1419 The frame size would thus be 36 bytes, and the frame offset would be
1420 12 bytes. The frame register is R7.
1422 The comments for thumb_skip_prolog() describe the algorithm we use
1423 to detect the end of the prolog. */
1427 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1428 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1430 CORE_ADDR prologue_start;
1431 CORE_ADDR prologue_end;
1433 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1436 /* See comment in arm_scan_prologue for an explanation of
1438 if (prologue_end > prologue_start + 64)
1440 prologue_end = prologue_start + 64;
1444 /* We're in the boondocks: we have no idea where the start of the
1448 prologue_end = std::min (prologue_end, prev_pc);
1450 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1453 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1457 arm_instruction_restores_sp (unsigned int insn)
1459 if (bits (insn, 28, 31) != INST_NV)
1461 if ((insn & 0x0df0f000) == 0x0080d000
1462 /* ADD SP (register or immediate). */
1463 || (insn & 0x0df0f000) == 0x0040d000
1464 /* SUB SP (register or immediate). */
1465 || (insn & 0x0ffffff0) == 0x01a0d000
1467 || (insn & 0x0fff0000) == 0x08bd0000
1469 || (insn & 0x0fff0000) == 0x049d0000)
1470 /* POP of a single register. */
1477 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1478 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1479 fill it in. Return the first address not recognized as a prologue
1482 We recognize all the instructions typically found in ARM prologues,
1483 plus harmless instructions which can be skipped (either for analysis
1484 purposes, or a more restrictive set that can be skipped when finding
1485 the end of the prologue). */
1488 arm_analyze_prologue (struct gdbarch *gdbarch,
1489 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1490 struct arm_prologue_cache *cache)
1492 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1494 CORE_ADDR offset, current_pc;
1495 pv_t regs[ARM_FPS_REGNUM];
1496 CORE_ADDR unrecognized_pc = 0;
1498 /* Search the prologue looking for instructions that set up the
1499 frame pointer, adjust the stack pointer, and save registers.
1501 Be careful, however, and if it doesn't look like a prologue,
1502 don't try to scan it. If, for instance, a frameless function
1503 begins with stmfd sp!, then we will tell ourselves there is
1504 a frame, which will confuse stack traceback, as well as "finish"
1505 and other operations that rely on a knowledge of the stack
1508 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1509 regs[regno] = pv_register (regno, 0);
1510 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1512 for (current_pc = prologue_start;
1513 current_pc < prologue_end;
1517 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1519 if (insn == 0xe1a0c00d) /* mov ip, sp */
1521 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1524 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1534 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1535 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1537 unsigned imm = insn & 0xff; /* immediate value */
1538 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1539 int rd = bits (insn, 12, 15);
1540 imm = (imm >> rot) | (imm << (32 - rot));
1541 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1544 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1547 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1549 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1550 stack.store (regs[ARM_SP_REGNUM], 4,
1551 regs[bits (insn, 12, 15)]);
1554 else if ((insn & 0xffff0000) == 0xe92d0000)
1555 /* stmfd sp!, {..., fp, ip, lr, pc}
1557 stmfd sp!, {a1, a2, a3, a4} */
1559 int mask = insn & 0xffff;
1561 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1564 /* Calculate offsets of saved registers. */
1565 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1566 if (mask & (1 << regno))
1569 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1570 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1573 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1574 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1575 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1577 /* No need to add this to saved_regs -- it's just an arg reg. */
1580 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1581 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1582 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1584 /* No need to add this to saved_regs -- it's just an arg reg. */
1587 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1591 /* No need to add this to saved_regs -- it's just arg regs. */
1594 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1596 unsigned imm = insn & 0xff; /* immediate value */
1597 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1598 imm = (imm >> rot) | (imm << (32 - rot));
1599 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1601 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1603 unsigned imm = insn & 0xff; /* immediate value */
1604 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1605 imm = (imm >> rot) | (imm << (32 - rot));
1606 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1608 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1612 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1615 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1616 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1617 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1619 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1621 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1623 int n_saved_fp_regs;
1624 unsigned int fp_start_reg, fp_bound_reg;
1626 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1629 if ((insn & 0x800) == 0x800) /* N0 is set */
1631 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1632 n_saved_fp_regs = 3;
1634 n_saved_fp_regs = 1;
1638 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1639 n_saved_fp_regs = 2;
1641 n_saved_fp_regs = 4;
1644 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1645 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1646 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1648 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1649 stack.store (regs[ARM_SP_REGNUM], 12,
1650 regs[fp_start_reg++]);
1653 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1655 /* Allow some special function calls when skipping the
1656 prologue; GCC generates these before storing arguments to
1658 CORE_ADDR dest = BranchDest (current_pc, insn);
1660 if (skip_prologue_function (gdbarch, dest, 0))
1665 else if ((insn & 0xf0000000) != 0xe0000000)
1666 break; /* Condition not true, exit early. */
1667 else if (arm_instruction_changes_pc (insn))
1668 /* Don't scan past anything that might change control flow. */
1670 else if (arm_instruction_restores_sp (insn))
1672 /* Don't scan past the epilogue. */
1675 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1676 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 /* Ignore block loads from the stack, potentially copying
1678 parameters from memory. */
1680 else if ((insn & 0xfc500000) == 0xe4100000
1681 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1682 /* Similarly ignore single loads from the stack. */
1684 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1685 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1686 register instead of the stack. */
1690 /* The optimizer might shove anything into the prologue, if
1691 we build up cache (cache != NULL) from scanning prologue,
1692 we just skip what we don't recognize and scan further to
1693 make cache as complete as possible. However, if we skip
1694 prologue, we'll stop immediately on unrecognized
1696 unrecognized_pc = current_pc;
1704 if (unrecognized_pc == 0)
1705 unrecognized_pc = current_pc;
1709 int framereg, framesize;
1711 /* The frame size is just the distance from the frame register
1712 to the original stack pointer. */
1713 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1715 /* Frame pointer is fp. */
1716 framereg = ARM_FP_REGNUM;
1717 framesize = -regs[ARM_FP_REGNUM].k;
1721 /* Try the stack pointer... this is a bit desperate. */
1722 framereg = ARM_SP_REGNUM;
1723 framesize = -regs[ARM_SP_REGNUM].k;
1726 cache->framereg = framereg;
1727 cache->framesize = framesize;
1729 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1730 if (stack.find_reg (gdbarch, regno, &offset))
1731 cache->saved_regs[regno].addr = offset;
1735 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1736 paddress (gdbarch, unrecognized_pc));
1738 return unrecognized_pc;
1742 arm_scan_prologue (struct frame_info *this_frame,
1743 struct arm_prologue_cache *cache)
1745 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1747 CORE_ADDR prologue_start, prologue_end;
1748 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1749 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1751 /* Assume there is no frame until proven otherwise. */
1752 cache->framereg = ARM_SP_REGNUM;
1753 cache->framesize = 0;
1755 /* Check for Thumb prologue. */
1756 if (arm_frame_is_thumb (this_frame))
1758 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1762 /* Find the function prologue. If we can't find the function in
1763 the symbol table, peek in the stack frame to find the PC. */
1764 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1767 /* One way to find the end of the prologue (which works well
1768 for unoptimized code) is to do the following:
1770 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1773 prologue_end = prev_pc;
1774 else if (sal.end < prologue_end)
1775 prologue_end = sal.end;
1777 This mechanism is very accurate so long as the optimizer
1778 doesn't move any instructions from the function body into the
1779 prologue. If this happens, sal.end will be the last
1780 instruction in the first hunk of prologue code just before
1781 the first instruction that the scheduler has moved from
1782 the body to the prologue.
1784 In order to make sure that we scan all of the prologue
1785 instructions, we use a slightly less accurate mechanism which
1786 may scan more than necessary. To help compensate for this
1787 lack of accuracy, the prologue scanning loop below contains
1788 several clauses which'll cause the loop to terminate early if
1789 an implausible prologue instruction is encountered.
1795 is a suitable endpoint since it accounts for the largest
1796 possible prologue plus up to five instructions inserted by
1799 if (prologue_end > prologue_start + 64)
1801 prologue_end = prologue_start + 64; /* See above. */
1806 /* We have no symbol information. Our only option is to assume this
1807 function has a standard stack frame and the normal frame register.
1808 Then, we can find the value of our frame pointer on entrance to
1809 the callee (or at the present moment if this is the innermost frame).
1810 The value stored there should be the address of the stmfd + 8. */
1811 CORE_ADDR frame_loc;
1812 ULONGEST return_value;
1814 /* AAPCS does not use a frame register, so we can abort here. */
1815 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1818 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1819 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1824 prologue_start = gdbarch_addr_bits_remove
1825 (gdbarch, return_value) - 8;
1826 prologue_end = prologue_start + 64; /* See above. */
1830 if (prev_pc < prologue_end)
1831 prologue_end = prev_pc;
1833 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1836 static struct arm_prologue_cache *
1837 arm_make_prologue_cache (struct frame_info *this_frame)
1840 struct arm_prologue_cache *cache;
1841 CORE_ADDR unwound_fp;
1843 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1844 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1846 arm_scan_prologue (this_frame, cache);
1848 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1849 if (unwound_fp == 0)
1852 cache->prev_sp = unwound_fp + cache->framesize;
1854 /* Calculate actual addresses of saved registers using offsets
1855 determined by arm_scan_prologue. */
1856 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1857 if (trad_frame_addr_p (cache->saved_regs, reg))
1858 cache->saved_regs[reg].addr += cache->prev_sp;
1863 /* Implementation of the stop_reason hook for arm_prologue frames. */
1865 static enum unwind_stop_reason
1866 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1869 struct arm_prologue_cache *cache;
1872 if (*this_cache == NULL)
1873 *this_cache = arm_make_prologue_cache (this_frame);
1874 cache = (struct arm_prologue_cache *) *this_cache;
1876 /* This is meant to halt the backtrace at "_start". */
1877 pc = get_frame_pc (this_frame);
1878 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1879 return UNWIND_OUTERMOST;
1881 /* If we've hit a wall, stop. */
1882 if (cache->prev_sp == 0)
1883 return UNWIND_OUTERMOST;
1885 return UNWIND_NO_REASON;
1888 /* Our frame ID for a normal frame is the current function's starting PC
1889 and the caller's SP when we were called. */
1892 arm_prologue_this_id (struct frame_info *this_frame,
1894 struct frame_id *this_id)
1896 struct arm_prologue_cache *cache;
1900 if (*this_cache == NULL)
1901 *this_cache = arm_make_prologue_cache (this_frame);
1902 cache = (struct arm_prologue_cache *) *this_cache;
1904 /* Use function start address as part of the frame ID. If we cannot
1905 identify the start address (due to missing symbol information),
1906 fall back to just using the current PC. */
1907 pc = get_frame_pc (this_frame);
1908 func = get_frame_func (this_frame);
1912 id = frame_id_build (cache->prev_sp, func);
1916 static struct value *
1917 arm_prologue_prev_register (struct frame_info *this_frame,
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 struct arm_prologue_cache *cache;
1924 if (*this_cache == NULL)
1925 *this_cache = arm_make_prologue_cache (this_frame);
1926 cache = (struct arm_prologue_cache *) *this_cache;
1928 /* If we are asked to unwind the PC, then we need to return the LR
1929 instead. The prologue may save PC, but it will point into this
1930 frame's prologue, not the next frame's resume location. Also
1931 strip the saved T bit. A valid LR may have the low bit set, but
1932 a valid PC never does. */
1933 if (prev_regnum == ARM_PC_REGNUM)
1937 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1938 return frame_unwind_got_constant (this_frame, prev_regnum,
1939 arm_addr_bits_remove (gdbarch, lr));
1942 /* SP is generally not saved to the stack, but this frame is
1943 identified by the next frame's stack pointer at the time of the call.
1944 The value was already reconstructed into PREV_SP. */
1945 if (prev_regnum == ARM_SP_REGNUM)
1946 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1948 /* The CPSR may have been changed by the call instruction and by the
1949 called function. The only bit we can reconstruct is the T bit,
1950 by checking the low bit of LR as of the call. This is a reliable
1951 indicator of Thumb-ness except for some ARM v4T pre-interworking
1952 Thumb code, which could get away with a clear low bit as long as
1953 the called function did not use bx. Guess that all other
1954 bits are unchanged; the condition flags are presumably lost,
1955 but the processor status is likely valid. */
1956 if (prev_regnum == ARM_PS_REGNUM)
1959 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1961 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1962 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1963 if (IS_THUMB_ADDR (lr))
1967 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1970 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1974 struct frame_unwind arm_prologue_unwind = {
1976 arm_prologue_unwind_stop_reason,
1977 arm_prologue_this_id,
1978 arm_prologue_prev_register,
1980 default_frame_sniffer
1983 /* Maintain a list of ARM exception table entries per objfile, similar to the
1984 list of mapping symbols. We only cache entries for standard ARM-defined
1985 personality routines; the cache will contain only the frame unwinding
1986 instructions associated with the entry (not the descriptors). */
1988 struct arm_exidx_entry
1993 bool operator< (const arm_exidx_entry &other) const
1995 return addr < other.addr;
1999 struct arm_exidx_data
2001 std::vector<std::vector<arm_exidx_entry>> section_maps;
2004 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2009 struct obj_section *osect;
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_get_section_flags (objfile->obfd,
2013 osect->the_bfd_section) & SEC_ALLOC)
2015 bfd_vma start, size;
2016 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2017 size = bfd_get_section_size (osect->the_bfd_section);
2019 if (start <= vma && vma < start + size)
2026 /* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2041 arm_exidx_new_objfile (struct objfile *objfile)
2043 struct arm_exidx_data *data;
2044 asection *exidx, *extab;
2045 bfd_vma exidx_vma = 0, extab_vma = 0;
2048 /* If we've already touched this file, do nothing. */
2049 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2052 /* Read contents of exception table and index. */
2053 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2054 gdb::byte_vector exidx_data;
2057 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2058 exidx_data.resize (bfd_get_section_size (exidx));
2060 if (!bfd_get_section_contents (objfile->obfd, exidx,
2061 exidx_data.data (), 0,
2062 exidx_data.size ()))
2066 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2067 gdb::byte_vector extab_data;
2070 extab_vma = bfd_section_vma (objfile->obfd, extab);
2071 extab_data.resize (bfd_get_section_size (extab));
2073 if (!bfd_get_section_contents (objfile->obfd, extab,
2074 extab_data.data (), 0,
2075 extab_data.size ()))
2079 /* Allocate exception table data structure. */
2080 data = arm_exidx_data_key.emplace (objfile);
2081 data->section_maps.resize (objfile->obfd->section_count);
2083 /* Fill in exception table. */
2084 for (i = 0; i < exidx_data.size () / 8; i++)
2086 struct arm_exidx_entry new_exidx_entry;
2087 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2088 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2089 exidx_data.data () + i * 8 + 4);
2090 bfd_vma addr = 0, word = 0;
2091 int n_bytes = 0, n_words = 0;
2092 struct obj_section *sec;
2093 gdb_byte *entry = NULL;
2095 /* Extract address of start of function. */
2096 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2097 idx += exidx_vma + i * 8;
2099 /* Find section containing function and compute section offset. */
2100 sec = arm_obj_section_from_vma (objfile, idx);
2103 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2105 /* Determine address of exception table entry. */
2108 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2110 else if ((val & 0xff000000) == 0x80000000)
2112 /* Exception table entry embedded in .ARM.exidx
2113 -- must be short form. */
2117 else if (!(val & 0x80000000))
2119 /* Exception table entry in .ARM.extab. */
2120 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2121 addr += exidx_vma + i * 8 + 4;
2123 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2125 word = bfd_h_get_32 (objfile->obfd,
2126 extab_data.data () + addr - extab_vma);
2129 if ((word & 0xff000000) == 0x80000000)
2134 else if ((word & 0xff000000) == 0x81000000
2135 || (word & 0xff000000) == 0x82000000)
2139 n_words = ((word >> 16) & 0xff);
2141 else if (!(word & 0x80000000))
2144 struct obj_section *pers_sec;
2145 int gnu_personality = 0;
2147 /* Custom personality routine. */
2148 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2149 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2151 /* Check whether we've got one of the variants of the
2152 GNU personality routines. */
2153 pers_sec = arm_obj_section_from_vma (objfile, pers);
2156 static const char *personality[] =
2158 "__gcc_personality_v0",
2159 "__gxx_personality_v0",
2160 "__gcj_personality_v0",
2161 "__gnu_objc_personality_v0",
2165 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2168 for (k = 0; personality[k]; k++)
2169 if (lookup_minimal_symbol_by_pc_name
2170 (pc, personality[k], objfile))
2172 gnu_personality = 1;
2177 /* If so, the next word contains a word count in the high
2178 byte, followed by the same unwind instructions as the
2179 pre-defined forms. */
2181 && addr + 4 <= extab_vma + extab_data.size ())
2183 word = bfd_h_get_32 (objfile->obfd,
2185 + addr - extab_vma));
2188 n_words = ((word >> 24) & 0xff);
2194 /* Sanity check address. */
2196 if (addr < extab_vma
2197 || addr + 4 * n_words > extab_vma + extab_data.size ())
2198 n_words = n_bytes = 0;
2200 /* The unwind instructions reside in WORD (only the N_BYTES least
2201 significant bytes are valid), followed by N_WORDS words in the
2202 extab section starting at ADDR. */
2203 if (n_bytes || n_words)
2206 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2207 n_bytes + n_words * 4 + 1);
2210 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2214 word = bfd_h_get_32 (objfile->obfd,
2215 extab_data.data () + addr - extab_vma);
2218 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2220 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2221 *p++ = (gdb_byte) (word & 0xff);
2224 /* Implied "Finish" to terminate the list. */
2228 /* Push entry onto vector. They are guaranteed to always
2229 appear in order of increasing addresses. */
2230 new_exidx_entry.addr = idx;
2231 new_exidx_entry.entry = entry;
2232 data->section_maps[sec->the_bfd_section->index].push_back
2237 /* Search for the exception table entry covering MEMADDR. If one is found,
2238 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2239 set *START to the start of the region covered by this entry. */
2242 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2244 struct obj_section *sec;
2246 sec = find_pc_section (memaddr);
2249 struct arm_exidx_data *data;
2250 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2252 data = arm_exidx_data_key.get (sec->objfile);
2255 std::vector<arm_exidx_entry> &map
2256 = data->section_maps[sec->the_bfd_section->index];
2259 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2261 /* std::lower_bound finds the earliest ordered insertion
2262 point. If the following symbol starts at this exact
2263 address, we use that; otherwise, the preceding
2264 exception table entry covers this address. */
2265 if (idx < map.end ())
2267 if (idx->addr == map_key.addr)
2270 *start = idx->addr + obj_section_addr (sec);
2275 if (idx > map.begin ())
2279 *start = idx->addr + obj_section_addr (sec);
2289 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2290 instruction list from the ARM exception table entry ENTRY, allocate and
2291 return a prologue cache structure describing how to unwind this frame.
2293 Return NULL if the unwinding instruction list contains a "spare",
2294 "reserved" or "refuse to unwind" instruction as defined in section
2295 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2296 for the ARM Architecture" document. */
2298 static struct arm_prologue_cache *
2299 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2304 struct arm_prologue_cache *cache;
2305 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2306 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2312 /* Whenever we reload SP, we actually have to retrieve its
2313 actual value in the current frame. */
2316 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2318 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2319 vsp = get_frame_register_unsigned (this_frame, reg);
2323 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2324 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2330 /* Decode next unwind instruction. */
2333 if ((insn & 0xc0) == 0)
2335 int offset = insn & 0x3f;
2336 vsp += (offset << 2) + 4;
2338 else if ((insn & 0xc0) == 0x40)
2340 int offset = insn & 0x3f;
2341 vsp -= (offset << 2) + 4;
2343 else if ((insn & 0xf0) == 0x80)
2345 int mask = ((insn & 0xf) << 8) | *entry++;
2348 /* The special case of an all-zero mask identifies
2349 "Refuse to unwind". We return NULL to fall back
2350 to the prologue analyzer. */
2354 /* Pop registers r4..r15 under mask. */
2355 for (i = 0; i < 12; i++)
2356 if (mask & (1 << i))
2358 cache->saved_regs[4 + i].addr = vsp;
2362 /* Special-case popping SP -- we need to reload vsp. */
2363 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2366 else if ((insn & 0xf0) == 0x90)
2368 int reg = insn & 0xf;
2370 /* Reserved cases. */
2371 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2374 /* Set SP from another register and mark VSP for reload. */
2375 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2378 else if ((insn & 0xf0) == 0xa0)
2380 int count = insn & 0x7;
2381 int pop_lr = (insn & 0x8) != 0;
2384 /* Pop r4..r[4+count]. */
2385 for (i = 0; i <= count; i++)
2387 cache->saved_regs[4 + i].addr = vsp;
2391 /* If indicated by flag, pop LR as well. */
2394 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2398 else if (insn == 0xb0)
2400 /* We could only have updated PC by popping into it; if so, it
2401 will show up as address. Otherwise, copy LR into PC. */
2402 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2403 cache->saved_regs[ARM_PC_REGNUM]
2404 = cache->saved_regs[ARM_LR_REGNUM];
2409 else if (insn == 0xb1)
2411 int mask = *entry++;
2414 /* All-zero mask and mask >= 16 is "spare". */
2415 if (mask == 0 || mask >= 16)
2418 /* Pop r0..r3 under mask. */
2419 for (i = 0; i < 4; i++)
2420 if (mask & (1 << i))
2422 cache->saved_regs[i].addr = vsp;
2426 else if (insn == 0xb2)
2428 ULONGEST offset = 0;
2433 offset |= (*entry & 0x7f) << shift;
2436 while (*entry++ & 0x80);
2438 vsp += 0x204 + (offset << 2);
2440 else if (insn == 0xb3)
2442 int start = *entry >> 4;
2443 int count = (*entry++) & 0xf;
2446 /* Only registers D0..D15 are valid here. */
2447 if (start + count >= 16)
2450 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2451 for (i = 0; i <= count; i++)
2453 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2457 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2460 else if ((insn & 0xf8) == 0xb8)
2462 int count = insn & 0x7;
2465 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2466 for (i = 0; i <= count; i++)
2468 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2472 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2475 else if (insn == 0xc6)
2477 int start = *entry >> 4;
2478 int count = (*entry++) & 0xf;
2481 /* Only registers WR0..WR15 are valid. */
2482 if (start + count >= 16)
2485 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2486 for (i = 0; i <= count; i++)
2488 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2492 else if (insn == 0xc7)
2494 int mask = *entry++;
2497 /* All-zero mask and mask >= 16 is "spare". */
2498 if (mask == 0 || mask >= 16)
2501 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2502 for (i = 0; i < 4; i++)
2503 if (mask & (1 << i))
2505 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2509 else if ((insn & 0xf8) == 0xc0)
2511 int count = insn & 0x7;
2514 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2515 for (i = 0; i <= count; i++)
2517 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2521 else if (insn == 0xc8)
2523 int start = *entry >> 4;
2524 int count = (*entry++) & 0xf;
2527 /* Only registers D0..D31 are valid. */
2528 if (start + count >= 16)
2531 /* Pop VFP double-precision registers
2532 D[16+start]..D[16+start+count]. */
2533 for (i = 0; i <= count; i++)
2535 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2539 else if (insn == 0xc9)
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2545 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2546 for (i = 0; i <= count; i++)
2548 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2552 else if ((insn & 0xf8) == 0xd0)
2554 int count = insn & 0x7;
2557 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2558 for (i = 0; i <= count; i++)
2560 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2566 /* Everything else is "spare". */
2571 /* If we restore SP from a register, assume this was the frame register.
2572 Otherwise just fall back to SP as frame register. */
2573 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2574 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2576 cache->framereg = ARM_SP_REGNUM;
2578 /* Determine offset to previous frame. */
2580 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2582 /* We already got the previous SP. */
2583 cache->prev_sp = vsp;
2588 /* Unwinding via ARM exception table entries. Note that the sniffer
2589 already computes a filled-in prologue cache, which is then used
2590 with the same arm_prologue_this_id and arm_prologue_prev_register
2591 routines also used for prologue-parsing based unwinding. */
2594 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2595 struct frame_info *this_frame,
2596 void **this_prologue_cache)
2598 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2600 CORE_ADDR addr_in_block, exidx_region, func_start;
2601 struct arm_prologue_cache *cache;
2604 /* See if we have an ARM exception table entry covering this address. */
2605 addr_in_block = get_frame_address_in_block (this_frame);
2606 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2610 /* The ARM exception table does not describe unwind information
2611 for arbitrary PC values, but is guaranteed to be correct only
2612 at call sites. We have to decide here whether we want to use
2613 ARM exception table information for this frame, or fall back
2614 to using prologue parsing. (Note that if we have DWARF CFI,
2615 this sniffer isn't even called -- CFI is always preferred.)
2617 Before we make this decision, however, we check whether we
2618 actually have *symbol* information for the current frame.
2619 If not, prologue parsing would not work anyway, so we might
2620 as well use the exception table and hope for the best. */
2621 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2625 /* If the next frame is "normal", we are at a call site in this
2626 frame, so exception information is guaranteed to be valid. */
2627 if (get_next_frame (this_frame)
2628 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2631 /* We also assume exception information is valid if we're currently
2632 blocked in a system call. The system library is supposed to
2633 ensure this, so that e.g. pthread cancellation works. */
2634 if (arm_frame_is_thumb (this_frame))
2638 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2639 2, byte_order_for_code, &insn)
2640 && (insn & 0xff00) == 0xdf00 /* svc */)
2647 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2648 4, byte_order_for_code, &insn)
2649 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2653 /* Bail out if we don't know that exception information is valid. */
2657 /* The ARM exception index does not mark the *end* of the region
2658 covered by the entry, and some functions will not have any entry.
2659 To correctly recognize the end of the covered region, the linker
2660 should have inserted dummy records with a CANTUNWIND marker.
2662 Unfortunately, current versions of GNU ld do not reliably do
2663 this, and thus we may have found an incorrect entry above.
2664 As a (temporary) sanity check, we only use the entry if it
2665 lies *within* the bounds of the function. Note that this check
2666 might reject perfectly valid entries that just happen to cover
2667 multiple functions; therefore this check ought to be removed
2668 once the linker is fixed. */
2669 if (func_start > exidx_region)
2673 /* Decode the list of unwinding instructions into a prologue cache.
2674 Note that this may fail due to e.g. a "refuse to unwind" code. */
2675 cache = arm_exidx_fill_cache (this_frame, entry);
2679 *this_prologue_cache = cache;
2683 struct frame_unwind arm_exidx_unwind = {
2685 default_frame_unwind_stop_reason,
2686 arm_prologue_this_id,
2687 arm_prologue_prev_register,
2689 arm_exidx_unwind_sniffer
2692 static struct arm_prologue_cache *
2693 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2695 struct arm_prologue_cache *cache;
2698 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2699 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2701 /* Still rely on the offset calculated from prologue. */
2702 arm_scan_prologue (this_frame, cache);
2704 /* Since we are in epilogue, the SP has been restored. */
2705 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2707 /* Calculate actual addresses of saved registers using offsets
2708 determined by arm_scan_prologue. */
2709 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2710 if (trad_frame_addr_p (cache->saved_regs, reg))
2711 cache->saved_regs[reg].addr += cache->prev_sp;
2716 /* Implementation of function hook 'this_id' in
2717 'struct frame_uwnind' for epilogue unwinder. */
2720 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2722 struct frame_id *this_id)
2724 struct arm_prologue_cache *cache;
2727 if (*this_cache == NULL)
2728 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2729 cache = (struct arm_prologue_cache *) *this_cache;
2731 /* Use function start address as part of the frame ID. If we cannot
2732 identify the start address (due to missing symbol information),
2733 fall back to just using the current PC. */
2734 pc = get_frame_pc (this_frame);
2735 func = get_frame_func (this_frame);
2739 (*this_id) = frame_id_build (cache->prev_sp, pc);
2742 /* Implementation of function hook 'prev_register' in
2743 'struct frame_uwnind' for epilogue unwinder. */
2745 static struct value *
2746 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2747 void **this_cache, int regnum)
2749 if (*this_cache == NULL)
2750 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2752 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2755 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2757 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2760 /* Implementation of function hook 'sniffer' in
2761 'struct frame_uwnind' for epilogue unwinder. */
2764 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2765 struct frame_info *this_frame,
2766 void **this_prologue_cache)
2768 if (frame_relative_level (this_frame) == 0)
2770 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2771 CORE_ADDR pc = get_frame_pc (this_frame);
2773 if (arm_frame_is_thumb (this_frame))
2774 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2776 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2782 /* Frame unwinder from epilogue. */
2784 static const struct frame_unwind arm_epilogue_frame_unwind =
2787 default_frame_unwind_stop_reason,
2788 arm_epilogue_frame_this_id,
2789 arm_epilogue_frame_prev_register,
2791 arm_epilogue_frame_sniffer,
2794 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2795 trampoline, return the target PC. Otherwise return 0.
2797 void call0a (char c, short s, int i, long l) {}
2801 (*pointer_to_call0a) (c, s, i, l);
2804 Instead of calling a stub library function _call_via_xx (xx is
2805 the register name), GCC may inline the trampoline in the object
2806 file as below (register r2 has the address of call0a).
2809 .type main, %function
2818 The trampoline 'bx r2' doesn't belong to main. */
2821 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2823 /* The heuristics of recognizing such trampoline is that FRAME is
2824 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2825 if (arm_frame_is_thumb (frame))
2829 if (target_read_memory (pc, buf, 2) == 0)
2831 struct gdbarch *gdbarch = get_frame_arch (frame);
2832 enum bfd_endian byte_order_for_code
2833 = gdbarch_byte_order_for_code (gdbarch);
2835 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2837 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2840 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2842 /* Clear the LSB so that gdb core sets step-resume
2843 breakpoint at the right address. */
2844 return UNMAKE_THUMB_ADDR (dest);
2852 static struct arm_prologue_cache *
2853 arm_make_stub_cache (struct frame_info *this_frame)
2855 struct arm_prologue_cache *cache;
2857 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2858 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2860 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2865 /* Our frame ID for a stub frame is the current SP and LR. */
2868 arm_stub_this_id (struct frame_info *this_frame,
2870 struct frame_id *this_id)
2872 struct arm_prologue_cache *cache;
2874 if (*this_cache == NULL)
2875 *this_cache = arm_make_stub_cache (this_frame);
2876 cache = (struct arm_prologue_cache *) *this_cache;
2878 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2882 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2883 struct frame_info *this_frame,
2884 void **this_prologue_cache)
2886 CORE_ADDR addr_in_block;
2888 CORE_ADDR pc, start_addr;
2891 addr_in_block = get_frame_address_in_block (this_frame);
2892 pc = get_frame_pc (this_frame);
2893 if (in_plt_section (addr_in_block)
2894 /* We also use the stub winder if the target memory is unreadable
2895 to avoid having the prologue unwinder trying to read it. */
2896 || target_read_memory (pc, dummy, 4) != 0)
2899 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2900 && arm_skip_bx_reg (this_frame, pc) != 0)
2906 struct frame_unwind arm_stub_unwind = {
2908 default_frame_unwind_stop_reason,
2910 arm_prologue_prev_register,
2912 arm_stub_unwind_sniffer
2915 /* Put here the code to store, into CACHE->saved_regs, the addresses
2916 of the saved registers of frame described by THIS_FRAME. CACHE is
2919 static struct arm_prologue_cache *
2920 arm_m_exception_cache (struct frame_info *this_frame)
2922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2923 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2924 struct arm_prologue_cache *cache;
2925 CORE_ADDR unwound_sp;
2928 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2929 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2931 unwound_sp = get_frame_register_unsigned (this_frame,
2934 /* The hardware saves eight 32-bit words, comprising xPSR,
2935 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2936 "B1.5.6 Exception entry behavior" in
2937 "ARMv7-M Architecture Reference Manual". */
2938 cache->saved_regs[0].addr = unwound_sp;
2939 cache->saved_regs[1].addr = unwound_sp + 4;
2940 cache->saved_regs[2].addr = unwound_sp + 8;
2941 cache->saved_regs[3].addr = unwound_sp + 12;
2942 cache->saved_regs[12].addr = unwound_sp + 16;
2943 cache->saved_regs[14].addr = unwound_sp + 20;
2944 cache->saved_regs[15].addr = unwound_sp + 24;
2945 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2947 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2948 aligner between the top of the 32-byte stack frame and the
2949 previous context's stack pointer. */
2950 cache->prev_sp = unwound_sp + 32;
2951 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2952 && (xpsr & (1 << 9)) != 0)
2953 cache->prev_sp += 4;
2958 /* Implementation of function hook 'this_id' in
2959 'struct frame_uwnind'. */
2962 arm_m_exception_this_id (struct frame_info *this_frame,
2964 struct frame_id *this_id)
2966 struct arm_prologue_cache *cache;
2968 if (*this_cache == NULL)
2969 *this_cache = arm_m_exception_cache (this_frame);
2970 cache = (struct arm_prologue_cache *) *this_cache;
2972 /* Our frame ID for a stub frame is the current SP and LR. */
2973 *this_id = frame_id_build (cache->prev_sp,
2974 get_frame_pc (this_frame));
2977 /* Implementation of function hook 'prev_register' in
2978 'struct frame_uwnind'. */
2980 static struct value *
2981 arm_m_exception_prev_register (struct frame_info *this_frame,
2985 struct arm_prologue_cache *cache;
2987 if (*this_cache == NULL)
2988 *this_cache = arm_m_exception_cache (this_frame);
2989 cache = (struct arm_prologue_cache *) *this_cache;
2991 /* The value was already reconstructed into PREV_SP. */
2992 if (prev_regnum == ARM_SP_REGNUM)
2993 return frame_unwind_got_constant (this_frame, prev_regnum,
2996 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3000 /* Implementation of function hook 'sniffer' in
3001 'struct frame_uwnind'. */
3004 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3005 struct frame_info *this_frame,
3006 void **this_prologue_cache)
3008 CORE_ADDR this_pc = get_frame_pc (this_frame);
3010 /* No need to check is_m; this sniffer is only registered for
3011 M-profile architectures. */
3013 /* Check if exception frame returns to a magic PC value. */
3014 return arm_m_addr_is_magic (this_pc);
3017 /* Frame unwinder for M-profile exceptions. */
3019 struct frame_unwind arm_m_exception_unwind =
3022 default_frame_unwind_stop_reason,
3023 arm_m_exception_this_id,
3024 arm_m_exception_prev_register,
3026 arm_m_exception_unwind_sniffer
3030 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3032 struct arm_prologue_cache *cache;
3034 if (*this_cache == NULL)
3035 *this_cache = arm_make_prologue_cache (this_frame);
3036 cache = (struct arm_prologue_cache *) *this_cache;
3038 return cache->prev_sp - cache->framesize;
3041 struct frame_base arm_normal_base = {
3042 &arm_prologue_unwind,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base,
3045 arm_normal_frame_base
3048 static struct value *
3049 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3052 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3054 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3059 /* The PC is normally copied from the return column, which
3060 describes saves of LR. However, that version may have an
3061 extra bit set to indicate Thumb state. The bit is not
3063 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3064 return frame_unwind_got_constant (this_frame, regnum,
3065 arm_addr_bits_remove (gdbarch, lr));
3068 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3069 cpsr = get_frame_register_unsigned (this_frame, regnum);
3070 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3071 if (IS_THUMB_ADDR (lr))
3075 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3078 internal_error (__FILE__, __LINE__,
3079 _("Unexpected register %d"), regnum);
3084 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3085 struct dwarf2_frame_state_reg *reg,
3086 struct frame_info *this_frame)
3092 reg->how = DWARF2_FRAME_REG_FN;
3093 reg->loc.fn = arm_dwarf2_prev_register;
3096 reg->how = DWARF2_FRAME_REG_CFA;
3101 /* Implement the stack_frame_destroyed_p gdbarch method. */
3104 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3106 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3107 unsigned int insn, insn2;
3108 int found_return = 0, found_stack_adjust = 0;
3109 CORE_ADDR func_start, func_end;
3113 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3116 /* The epilogue is a sequence of instructions along the following lines:
3118 - add stack frame size to SP or FP
3119 - [if frame pointer used] restore SP from FP
3120 - restore registers from SP [may include PC]
3121 - a return-type instruction [if PC wasn't already restored]
3123 In a first pass, we scan forward from the current PC and verify the
3124 instructions we find as compatible with this sequence, ending in a
3127 However, this is not sufficient to distinguish indirect function calls
3128 within a function from indirect tail calls in the epilogue in some cases.
3129 Therefore, if we didn't already find any SP-changing instruction during
3130 forward scan, we add a backward scanning heuristic to ensure we actually
3131 are in the epilogue. */
3134 while (scan_pc < func_end && !found_return)
3136 if (target_read_memory (scan_pc, buf, 2))
3140 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3142 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3144 else if (insn == 0x46f7) /* mov pc, lr */
3146 else if (thumb_instruction_restores_sp (insn))
3148 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3151 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3153 if (target_read_memory (scan_pc, buf, 2))
3157 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3159 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3161 if (insn2 & 0x8000) /* <registers> include PC. */
3164 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3165 && (insn2 & 0x0fff) == 0x0b04)
3167 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3170 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3171 && (insn2 & 0x0e00) == 0x0a00)
3183 /* Since any instruction in the epilogue sequence, with the possible
3184 exception of return itself, updates the stack pointer, we need to
3185 scan backwards for at most one instruction. Try either a 16-bit or
3186 a 32-bit instruction. This is just a heuristic, so we do not worry
3187 too much about false positives. */
3189 if (pc - 4 < func_start)
3191 if (target_read_memory (pc - 4, buf, 4))
3194 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3195 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3197 if (thumb_instruction_restores_sp (insn2))
3198 found_stack_adjust = 1;
3199 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3200 found_stack_adjust = 1;
3201 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3202 && (insn2 & 0x0fff) == 0x0b04)
3203 found_stack_adjust = 1;
3204 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3205 && (insn2 & 0x0e00) == 0x0a00)
3206 found_stack_adjust = 1;
3208 return found_stack_adjust;
3212 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3214 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3217 CORE_ADDR func_start, func_end;
3219 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3222 /* We are in the epilogue if the previous instruction was a stack
3223 adjustment and the next instruction is a possible return (bx, mov
3224 pc, or pop). We could have to scan backwards to find the stack
3225 adjustment, or forwards to find the return, but this is a decent
3226 approximation. First scan forwards. */
3229 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3230 if (bits (insn, 28, 31) != INST_NV)
3232 if ((insn & 0x0ffffff0) == 0x012fff10)
3235 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3238 else if ((insn & 0x0fff0000) == 0x08bd0000
3239 && (insn & 0x0000c000) != 0)
3240 /* POP (LDMIA), including PC or LR. */
3247 /* Scan backwards. This is just a heuristic, so do not worry about
3248 false positives from mode changes. */
3250 if (pc < func_start + 4)
3253 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3254 if (arm_instruction_restores_sp (insn))
3260 /* Implement the stack_frame_destroyed_p gdbarch method. */
3263 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3265 if (arm_pc_is_thumb (gdbarch, pc))
3266 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3268 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3271 /* When arguments must be pushed onto the stack, they go on in reverse
3272 order. The code below implements a FILO (stack) to do this. */
3277 struct stack_item *prev;
3281 static struct stack_item *
3282 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3284 struct stack_item *si;
3285 si = XNEW (struct stack_item);
3286 si->data = (gdb_byte *) xmalloc (len);
3289 memcpy (si->data, contents, len);
3293 static struct stack_item *
3294 pop_stack_item (struct stack_item *si)
3296 struct stack_item *dead = si;
3303 /* Implement the gdbarch type alignment method, overrides the generic
3304 alignment algorithm for anything that is arm specific. */
3307 arm_type_align (gdbarch *gdbarch, struct type *t)
3309 t = check_typedef (t);
3310 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3312 /* Use the natural alignment for vector types (the same for
3313 scalar type), but the maximum alignment is 64-bit. */
3314 if (TYPE_LENGTH (t) > 8)
3317 return TYPE_LENGTH (t);
3320 /* Allow the common code to calculate the alignment. */
3324 /* Possible base types for a candidate for passing and returning in
3327 enum arm_vfp_cprc_base_type
3336 /* The length of one element of base type B. */
3339 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3343 case VFP_CPRC_SINGLE:
3345 case VFP_CPRC_DOUBLE:
3347 case VFP_CPRC_VEC64:
3349 case VFP_CPRC_VEC128:
3352 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3357 /* The character ('s', 'd' or 'q') for the type of VFP register used
3358 for passing base type B. */
3361 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3365 case VFP_CPRC_SINGLE:
3367 case VFP_CPRC_DOUBLE:
3369 case VFP_CPRC_VEC64:
3371 case VFP_CPRC_VEC128:
3374 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3379 /* Determine whether T may be part of a candidate for passing and
3380 returning in VFP registers, ignoring the limit on the total number
3381 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3382 classification of the first valid component found; if it is not
3383 VFP_CPRC_UNKNOWN, all components must have the same classification
3384 as *BASE_TYPE. If it is found that T contains a type not permitted
3385 for passing and returning in VFP registers, a type differently
3386 classified from *BASE_TYPE, or two types differently classified
3387 from each other, return -1, otherwise return the total number of
3388 base-type elements found (possibly 0 in an empty structure or
3389 array). Vector types are not currently supported, matching the
3390 generic AAPCS support. */
3393 arm_vfp_cprc_sub_candidate (struct type *t,
3394 enum arm_vfp_cprc_base_type *base_type)
3396 t = check_typedef (t);
3397 switch (TYPE_CODE (t))
3400 switch (TYPE_LENGTH (t))
3403 if (*base_type == VFP_CPRC_UNKNOWN)
3404 *base_type = VFP_CPRC_SINGLE;
3405 else if (*base_type != VFP_CPRC_SINGLE)
3410 if (*base_type == VFP_CPRC_UNKNOWN)
3411 *base_type = VFP_CPRC_DOUBLE;
3412 else if (*base_type != VFP_CPRC_DOUBLE)
3421 case TYPE_CODE_COMPLEX:
3422 /* Arguments of complex T where T is one of the types float or
3423 double get treated as if they are implemented as:
3432 switch (TYPE_LENGTH (t))
3435 if (*base_type == VFP_CPRC_UNKNOWN)
3436 *base_type = VFP_CPRC_SINGLE;
3437 else if (*base_type != VFP_CPRC_SINGLE)
3442 if (*base_type == VFP_CPRC_UNKNOWN)
3443 *base_type = VFP_CPRC_DOUBLE;
3444 else if (*base_type != VFP_CPRC_DOUBLE)
3453 case TYPE_CODE_ARRAY:
3455 if (TYPE_VECTOR (t))
3457 /* A 64-bit or 128-bit containerized vector type are VFP
3459 switch (TYPE_LENGTH (t))
3462 if (*base_type == VFP_CPRC_UNKNOWN)
3463 *base_type = VFP_CPRC_VEC64;
3466 if (*base_type == VFP_CPRC_UNKNOWN)
3467 *base_type = VFP_CPRC_VEC128;
3478 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3482 if (TYPE_LENGTH (t) == 0)
3484 gdb_assert (count == 0);
3487 else if (count == 0)
3489 unitlen = arm_vfp_cprc_unit_length (*base_type);
3490 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3491 return TYPE_LENGTH (t) / unitlen;
3496 case TYPE_CODE_STRUCT:
3501 for (i = 0; i < TYPE_NFIELDS (t); i++)
3505 if (!field_is_static (&TYPE_FIELD (t, i)))
3506 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3508 if (sub_count == -1)
3512 if (TYPE_LENGTH (t) == 0)
3514 gdb_assert (count == 0);
3517 else if (count == 0)
3519 unitlen = arm_vfp_cprc_unit_length (*base_type);
3520 if (TYPE_LENGTH (t) != unitlen * count)
3525 case TYPE_CODE_UNION:
3530 for (i = 0; i < TYPE_NFIELDS (t); i++)
3532 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3534 if (sub_count == -1)
3536 count = (count > sub_count ? count : sub_count);
3538 if (TYPE_LENGTH (t) == 0)
3540 gdb_assert (count == 0);
3543 else if (count == 0)
3545 unitlen = arm_vfp_cprc_unit_length (*base_type);
3546 if (TYPE_LENGTH (t) != unitlen * count)
3558 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3559 if passed to or returned from a non-variadic function with the VFP
3560 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3561 *BASE_TYPE to the base type for T and *COUNT to the number of
3562 elements of that base type before returning. */
3565 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3568 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3569 int c = arm_vfp_cprc_sub_candidate (t, &b);
3570 if (c <= 0 || c > 4)
3577 /* Return 1 if the VFP ABI should be used for passing arguments to and
3578 returning values from a function of type FUNC_TYPE, 0
3582 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3584 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3585 /* Variadic functions always use the base ABI. Assume that functions
3586 without debug info are not variadic. */
3587 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3589 /* The VFP ABI is only supported as a variant of AAPCS. */
3590 if (tdep->arm_abi != ARM_ABI_AAPCS)
3592 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3595 /* We currently only support passing parameters in integer registers, which
3596 conforms with GCC's default model, and VFP argument passing following
3597 the VFP variant of AAPCS. Several other variants exist and
3598 we should probably support some of them based on the selected ABI. */
3601 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3602 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3603 struct value **args, CORE_ADDR sp,
3604 function_call_return_method return_method,
3605 CORE_ADDR struct_addr)
3607 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3611 struct stack_item *si = NULL;
3614 unsigned vfp_regs_free = (1 << 16) - 1;
3616 /* Determine the type of this function and whether the VFP ABI
3618 ftype = check_typedef (value_type (function));
3619 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3620 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3621 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3623 /* Set the return address. For the ARM, the return breakpoint is
3624 always at BP_ADDR. */
3625 if (arm_pc_is_thumb (gdbarch, bp_addr))
3627 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3629 /* Walk through the list of args and determine how large a temporary
3630 stack is required. Need to take care here as structs may be
3631 passed on the stack, and we have to push them. */
3634 argreg = ARM_A1_REGNUM;
3637 /* The struct_return pointer occupies the first parameter
3638 passing register. */
3639 if (return_method == return_method_struct)
3642 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3643 gdbarch_register_name (gdbarch, argreg),
3644 paddress (gdbarch, struct_addr));
3645 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3649 for (argnum = 0; argnum < nargs; argnum++)
3652 struct type *arg_type;
3653 struct type *target_type;
3654 enum type_code typecode;
3655 const bfd_byte *val;
3657 enum arm_vfp_cprc_base_type vfp_base_type;
3659 int may_use_core_reg = 1;
3661 arg_type = check_typedef (value_type (args[argnum]));
3662 len = TYPE_LENGTH (arg_type);
3663 target_type = TYPE_TARGET_TYPE (arg_type);
3664 typecode = TYPE_CODE (arg_type);
3665 val = value_contents (args[argnum]);
3667 align = type_align (arg_type);
3668 /* Round alignment up to a whole number of words. */
3669 align = (align + ARM_INT_REGISTER_SIZE - 1)
3670 & ~(ARM_INT_REGISTER_SIZE - 1);
3671 /* Different ABIs have different maximum alignments. */
3672 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3674 /* The APCS ABI only requires word alignment. */
3675 align = ARM_INT_REGISTER_SIZE;
3679 /* The AAPCS requires at most doubleword alignment. */
3680 if (align > ARM_INT_REGISTER_SIZE * 2)
3681 align = ARM_INT_REGISTER_SIZE * 2;
3685 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3693 /* Because this is a CPRC it cannot go in a core register or
3694 cause a core register to be skipped for alignment.
3695 Either it goes in VFP registers and the rest of this loop
3696 iteration is skipped for this argument, or it goes on the
3697 stack (and the stack alignment code is correct for this
3699 may_use_core_reg = 0;
3701 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3702 shift = unit_length / 4;
3703 mask = (1 << (shift * vfp_base_count)) - 1;
3704 for (regno = 0; regno < 16; regno += shift)
3705 if (((vfp_regs_free >> regno) & mask) == mask)
3714 vfp_regs_free &= ~(mask << regno);
3715 reg_scaled = regno / shift;
3716 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3717 for (i = 0; i < vfp_base_count; i++)
3721 if (reg_char == 'q')
3722 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3723 val + i * unit_length);
3726 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3727 reg_char, reg_scaled + i);
3728 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3730 regcache->cooked_write (regnum, val + i * unit_length);
3737 /* This CPRC could not go in VFP registers, so all VFP
3738 registers are now marked as used. */
3743 /* Push stack padding for dowubleword alignment. */
3744 if (nstack & (align - 1))
3746 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3747 nstack += ARM_INT_REGISTER_SIZE;
3750 /* Doubleword aligned quantities must go in even register pairs. */
3751 if (may_use_core_reg
3752 && argreg <= ARM_LAST_ARG_REGNUM
3753 && align > ARM_INT_REGISTER_SIZE
3757 /* If the argument is a pointer to a function, and it is a
3758 Thumb function, create a LOCAL copy of the value and set
3759 the THUMB bit in it. */
3760 if (TYPE_CODE_PTR == typecode
3761 && target_type != NULL
3762 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3764 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3765 if (arm_pc_is_thumb (gdbarch, regval))
3767 bfd_byte *copy = (bfd_byte *) alloca (len);
3768 store_unsigned_integer (copy, len, byte_order,
3769 MAKE_THUMB_ADDR (regval));
3774 /* Copy the argument to general registers or the stack in
3775 register-sized pieces. Large arguments are split between
3776 registers and stack. */
3779 int partial_len = len < ARM_INT_REGISTER_SIZE
3780 ? len : ARM_INT_REGISTER_SIZE;
3782 = extract_unsigned_integer (val, partial_len, byte_order);
3784 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3786 /* The argument is being passed in a general purpose
3788 if (byte_order == BFD_ENDIAN_BIG)
3789 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3791 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3793 gdbarch_register_name
3795 phex (regval, ARM_INT_REGISTER_SIZE));
3796 regcache_cooked_write_unsigned (regcache, argreg, regval);
3801 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3803 memset (buf, 0, sizeof (buf));
3804 store_unsigned_integer (buf, partial_len, byte_order, regval);
3806 /* Push the arguments onto the stack. */
3808 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3810 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3811 nstack += ARM_INT_REGISTER_SIZE;
3818 /* If we have an odd number of words to push, then decrement the stack
3819 by one word now, so first stack argument will be dword aligned. */
3826 write_memory (sp, si->data, si->len);
3827 si = pop_stack_item (si);
3830 /* Finally, update teh SP register. */
3831 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3837 /* Always align the frame to an 8-byte boundary. This is required on
3838 some platforms and harmless on the rest. */
3841 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3843 /* Align the stack to eight bytes. */
3844 return sp & ~ (CORE_ADDR) 7;
3848 print_fpu_flags (struct ui_file *file, int flags)
3850 if (flags & (1 << 0))
3851 fputs_filtered ("IVO ", file);
3852 if (flags & (1 << 1))
3853 fputs_filtered ("DVZ ", file);
3854 if (flags & (1 << 2))
3855 fputs_filtered ("OFL ", file);
3856 if (flags & (1 << 3))
3857 fputs_filtered ("UFL ", file);
3858 if (flags & (1 << 4))
3859 fputs_filtered ("INX ", file);
3860 fputc_filtered ('\n', file);
3863 /* Print interesting information about the floating point processor
3864 (if present) or emulator. */
3866 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3867 struct frame_info *frame, const char *args)
3869 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3872 type = (status >> 24) & 127;
3873 if (status & (1 << 31))
3874 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3876 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3877 /* i18n: [floating point unit] mask */
3878 fputs_filtered (_("mask: "), file);
3879 print_fpu_flags (file, status >> 16);
3880 /* i18n: [floating point unit] flags */
3881 fputs_filtered (_("flags: "), file);
3882 print_fpu_flags (file, status);
3885 /* Construct the ARM extended floating point type. */
3886 static struct type *
3887 arm_ext_type (struct gdbarch *gdbarch)
3889 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3891 if (!tdep->arm_ext_type)
3893 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3894 floatformats_arm_ext);
3896 return tdep->arm_ext_type;
3899 static struct type *
3900 arm_neon_double_type (struct gdbarch *gdbarch)
3902 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3904 if (tdep->neon_double_type == NULL)
3906 struct type *t, *elem;
3908 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3910 elem = builtin_type (gdbarch)->builtin_uint8;
3911 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3912 elem = builtin_type (gdbarch)->builtin_uint16;
3913 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3914 elem = builtin_type (gdbarch)->builtin_uint32;
3915 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3916 elem = builtin_type (gdbarch)->builtin_uint64;
3917 append_composite_type_field (t, "u64", elem);
3918 elem = builtin_type (gdbarch)->builtin_float;
3919 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3920 elem = builtin_type (gdbarch)->builtin_double;
3921 append_composite_type_field (t, "f64", elem);
3923 TYPE_VECTOR (t) = 1;
3924 TYPE_NAME (t) = "neon_d";
3925 tdep->neon_double_type = t;
3928 return tdep->neon_double_type;
3931 /* FIXME: The vector types are not correctly ordered on big-endian
3932 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3933 bits of d0 - regardless of what unit size is being held in d0. So
3934 the offset of the first uint8 in d0 is 7, but the offset of the
3935 first float is 4. This code works as-is for little-endian
3938 static struct type *
3939 arm_neon_quad_type (struct gdbarch *gdbarch)
3941 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3943 if (tdep->neon_quad_type == NULL)
3945 struct type *t, *elem;
3947 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3949 elem = builtin_type (gdbarch)->builtin_uint8;
3950 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3951 elem = builtin_type (gdbarch)->builtin_uint16;
3952 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3953 elem = builtin_type (gdbarch)->builtin_uint32;
3954 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3955 elem = builtin_type (gdbarch)->builtin_uint64;
3956 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3957 elem = builtin_type (gdbarch)->builtin_float;
3958 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3959 elem = builtin_type (gdbarch)->builtin_double;
3960 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3962 TYPE_VECTOR (t) = 1;
3963 TYPE_NAME (t) = "neon_q";
3964 tdep->neon_quad_type = t;
3967 return tdep->neon_quad_type;
3970 /* Return the GDB type object for the "standard" data type of data in
3973 static struct type *
3974 arm_register_type (struct gdbarch *gdbarch, int regnum)
3976 int num_regs = gdbarch_num_regs (gdbarch);
3978 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3979 && regnum >= num_regs && regnum < num_regs + 32)
3980 return builtin_type (gdbarch)->builtin_float;
3982 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3983 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3984 return arm_neon_quad_type (gdbarch);
3986 /* If the target description has register information, we are only
3987 in this function so that we can override the types of
3988 double-precision registers for NEON. */
3989 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3991 struct type *t = tdesc_register_type (gdbarch, regnum);
3993 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3994 && TYPE_CODE (t) == TYPE_CODE_FLT
3995 && gdbarch_tdep (gdbarch)->have_neon)
3996 return arm_neon_double_type (gdbarch);
4001 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4003 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4004 return builtin_type (gdbarch)->builtin_void;
4006 return arm_ext_type (gdbarch);
4008 else if (regnum == ARM_SP_REGNUM)
4009 return builtin_type (gdbarch)->builtin_data_ptr;
4010 else if (regnum == ARM_PC_REGNUM)
4011 return builtin_type (gdbarch)->builtin_func_ptr;
4012 else if (regnum >= ARRAY_SIZE (arm_register_names))
4013 /* These registers are only supported on targets which supply
4014 an XML description. */
4015 return builtin_type (gdbarch)->builtin_int0;
4017 return builtin_type (gdbarch)->builtin_uint32;
4020 /* Map a DWARF register REGNUM onto the appropriate GDB register
4024 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4026 /* Core integer regs. */
4027 if (reg >= 0 && reg <= 15)
4030 /* Legacy FPA encoding. These were once used in a way which
4031 overlapped with VFP register numbering, so their use is
4032 discouraged, but GDB doesn't support the ARM toolchain
4033 which used them for VFP. */
4034 if (reg >= 16 && reg <= 23)
4035 return ARM_F0_REGNUM + reg - 16;
4037 /* New assignments for the FPA registers. */
4038 if (reg >= 96 && reg <= 103)
4039 return ARM_F0_REGNUM + reg - 96;
4041 /* WMMX register assignments. */
4042 if (reg >= 104 && reg <= 111)
4043 return ARM_WCGR0_REGNUM + reg - 104;
4045 if (reg >= 112 && reg <= 127)
4046 return ARM_WR0_REGNUM + reg - 112;
4048 if (reg >= 192 && reg <= 199)
4049 return ARM_WC0_REGNUM + reg - 192;
4051 /* VFP v2 registers. A double precision value is actually
4052 in d1 rather than s2, but the ABI only defines numbering
4053 for the single precision registers. This will "just work"
4054 in GDB for little endian targets (we'll read eight bytes,
4055 starting in s0 and then progressing to s1), but will be
4056 reversed on big endian targets with VFP. This won't
4057 be a problem for the new Neon quad registers; you're supposed
4058 to use DW_OP_piece for those. */
4059 if (reg >= 64 && reg <= 95)
4063 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4064 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4068 /* VFP v3 / Neon registers. This range is also used for VFP v2
4069 registers, except that it now describes d0 instead of s0. */
4070 if (reg >= 256 && reg <= 287)
4074 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4075 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4082 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4084 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4087 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4089 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4090 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4092 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4093 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4095 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4096 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4098 if (reg < NUM_GREGS)
4099 return SIM_ARM_R0_REGNUM + reg;
4102 if (reg < NUM_FREGS)
4103 return SIM_ARM_FP0_REGNUM + reg;
4106 if (reg < NUM_SREGS)
4107 return SIM_ARM_FPS_REGNUM + reg;
4110 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4113 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4114 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4115 NULL if an error occurs. BUF is freed. */
4118 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4119 int old_len, int new_len)
4122 int bytes_to_read = new_len - old_len;
4124 new_buf = (gdb_byte *) xmalloc (new_len);
4125 memcpy (new_buf + bytes_to_read, buf, old_len);
4127 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4135 /* An IT block is at most the 2-byte IT instruction followed by
4136 four 4-byte instructions. The furthest back we must search to
4137 find an IT block that affects the current instruction is thus
4138 2 + 3 * 4 == 14 bytes. */
4139 #define MAX_IT_BLOCK_PREFIX 14
4141 /* Use a quick scan if there are more than this many bytes of
4143 #define IT_SCAN_THRESHOLD 32
4145 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4146 A breakpoint in an IT block may not be hit, depending on the
4149 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4153 CORE_ADDR boundary, func_start;
4155 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4156 int i, any, last_it, last_it_count;
4158 /* If we are using BKPT breakpoints, none of this is necessary. */
4159 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4162 /* ARM mode does not have this problem. */
4163 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4166 /* We are setting a breakpoint in Thumb code that could potentially
4167 contain an IT block. The first step is to find how much Thumb
4168 code there is; we do not need to read outside of known Thumb
4170 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4172 /* Thumb-2 code must have mapping symbols to have a chance. */
4175 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4177 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4178 && func_start > boundary)
4179 boundary = func_start;
4181 /* Search for a candidate IT instruction. We have to do some fancy
4182 footwork to distinguish a real IT instruction from the second
4183 half of a 32-bit instruction, but there is no need for that if
4184 there's no candidate. */
4185 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4187 /* No room for an IT instruction. */
4190 buf = (gdb_byte *) xmalloc (buf_len);
4191 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4194 for (i = 0; i < buf_len; i += 2)
4196 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4197 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4210 /* OK, the code bytes before this instruction contain at least one
4211 halfword which resembles an IT instruction. We know that it's
4212 Thumb code, but there are still two possibilities. Either the
4213 halfword really is an IT instruction, or it is the second half of
4214 a 32-bit Thumb instruction. The only way we can tell is to
4215 scan forwards from a known instruction boundary. */
4216 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4220 /* There's a lot of code before this instruction. Start with an
4221 optimistic search; it's easy to recognize halfwords that can
4222 not be the start of a 32-bit instruction, and use that to
4223 lock on to the instruction boundaries. */
4224 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4227 buf_len = IT_SCAN_THRESHOLD;
4230 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4232 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4233 if (thumb_insn_size (inst1) == 2)
4240 /* At this point, if DEFINITE, BUF[I] is the first place we
4241 are sure that we know the instruction boundaries, and it is far
4242 enough from BPADDR that we could not miss an IT instruction
4243 affecting BPADDR. If ! DEFINITE, give up - start from a
4247 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4251 buf_len = bpaddr - boundary;
4257 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4260 buf_len = bpaddr - boundary;
4264 /* Scan forwards. Find the last IT instruction before BPADDR. */
4269 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4271 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4276 else if (inst1 & 0x0002)
4278 else if (inst1 & 0x0004)
4283 i += thumb_insn_size (inst1);
4289 /* There wasn't really an IT instruction after all. */
4292 if (last_it_count < 1)
4293 /* It was too far away. */
4296 /* This really is a trouble spot. Move the breakpoint to the IT
4298 return bpaddr - buf_len + last_it;
4301 /* ARM displaced stepping support.
4303 Generally ARM displaced stepping works as follows:
4305 1. When an instruction is to be single-stepped, it is first decoded by
4306 arm_process_displaced_insn. Depending on the type of instruction, it is
4307 then copied to a scratch location, possibly in a modified form. The
4308 copy_* set of functions performs such modification, as necessary. A
4309 breakpoint is placed after the modified instruction in the scratch space
4310 to return control to GDB. Note in particular that instructions which
4311 modify the PC will no longer do so after modification.
4313 2. The instruction is single-stepped, by setting the PC to the scratch
4314 location address, and resuming. Control returns to GDB when the
4317 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4318 function used for the current instruction. This function's job is to
4319 put the CPU/memory state back to what it would have been if the
4320 instruction had been executed unmodified in its original location. */
4322 /* NOP instruction (mov r0, r0). */
4323 #define ARM_NOP 0xe1a00000
4324 #define THUMB_NOP 0x4600
4326 /* Helper for register reads for displaced stepping. In particular, this
4327 returns the PC as it would be seen by the instruction at its original
4331 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4335 CORE_ADDR from = dsc->insn_addr;
4337 if (regno == ARM_PC_REGNUM)
4339 /* Compute pipeline offset:
4340 - When executing an ARM instruction, PC reads as the address of the
4341 current instruction plus 8.
4342 - When executing a Thumb instruction, PC reads as the address of the
4343 current instruction plus 4. */
4350 if (debug_displaced)
4351 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4352 (unsigned long) from);
4353 return (ULONGEST) from;
4357 regcache_cooked_read_unsigned (regs, regno, &ret);
4358 if (debug_displaced)
4359 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4360 regno, (unsigned long) ret);
4366 displaced_in_arm_mode (struct regcache *regs)
4369 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4371 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4373 return (ps & t_bit) == 0;
4376 /* Write to the PC as from a branch instruction. */
4379 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4383 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4384 architecture versions < 6. */
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x3);
4388 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4389 val & ~(ULONGEST) 0x1);
4392 /* Write to the PC as from a branch-exchange instruction. */
4395 bx_write_pc (struct regcache *regs, ULONGEST val)
4398 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4400 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4404 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4405 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4407 else if ((val & 2) == 0)
4409 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4410 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4414 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4415 mode, align dest to 4 bytes). */
4416 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4417 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4418 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4422 /* Write to the PC as if from a load instruction. */
4425 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4428 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4429 bx_write_pc (regs, val);
4431 branch_write_pc (regs, dsc, val);
4434 /* Write to the PC as if from an ALU instruction. */
4437 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4441 bx_write_pc (regs, val);
4443 branch_write_pc (regs, dsc, val);
4446 /* Helper for writing to registers for displaced stepping. Writing to the PC
4447 has a varying effects depending on the instruction which does the write:
4448 this is controlled by the WRITE_PC argument. */
4451 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4452 int regno, ULONGEST val, enum pc_write_style write_pc)
4454 if (regno == ARM_PC_REGNUM)
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4458 (unsigned long) val);
4461 case BRANCH_WRITE_PC:
4462 branch_write_pc (regs, dsc, val);
4466 bx_write_pc (regs, val);
4470 load_write_pc (regs, dsc, val);
4474 alu_write_pc (regs, dsc, val);
4477 case CANNOT_WRITE_PC:
4478 warning (_("Instruction wrote to PC in an unexpected way when "
4479 "single-stepping"));
4483 internal_error (__FILE__, __LINE__,
4484 _("Invalid argument to displaced_write_reg"));
4487 dsc->wrote_to_pc = 1;
4491 if (debug_displaced)
4492 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4493 regno, (unsigned long) val);
4494 regcache_cooked_write_unsigned (regs, regno, val);
4498 /* This function is used to concisely determine if an instruction INSN
4499 references PC. Register fields of interest in INSN should have the
4500 corresponding fields of BITMASK set to 0b1111. The function
4501 returns return 1 if any of these fields in INSN reference the PC
4502 (also 0b1111, r15), else it returns 0. */
4505 insn_references_pc (uint32_t insn, uint32_t bitmask)
4507 uint32_t lowbit = 1;
4509 while (bitmask != 0)
4513 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4519 mask = lowbit * 0xf;
4521 if ((insn & mask) == mask)
4530 /* The simplest copy function. Many instructions have the same effect no
4531 matter what address they are executed at: in those cases, use this. */
4534 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4535 const char *iname, arm_displaced_step_closure *dsc)
4537 if (debug_displaced)
4538 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4539 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4542 dsc->modinsn[0] = insn;
4548 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4549 uint16_t insn2, const char *iname,
4550 arm_displaced_step_closure *dsc)
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4554 "opcode/class '%s' unmodified\n", insn1, insn2,
4557 dsc->modinsn[0] = insn1;
4558 dsc->modinsn[1] = insn2;
4564 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4567 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4569 arm_displaced_step_closure *dsc)
4571 if (debug_displaced)
4572 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4573 "opcode/class '%s' unmodified\n", insn,
4576 dsc->modinsn[0] = insn;
4581 /* Preload instructions with immediate offset. */
4584 cleanup_preload (struct gdbarch *gdbarch,
4585 struct regcache *regs, arm_displaced_step_closure *dsc)
4587 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4588 if (!dsc->u.preload.immed)
4589 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4593 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4594 arm_displaced_step_closure *dsc, unsigned int rn)
4597 /* Preload instructions:
4599 {pli/pld} [rn, #+/-imm]
4601 {pli/pld} [r0, #+/-imm]. */
4603 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4604 rn_val = displaced_read_reg (regs, dsc, rn);
4605 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4606 dsc->u.preload.immed = 1;
4608 dsc->cleanup = &cleanup_preload;
4612 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4613 arm_displaced_step_closure *dsc)
4615 unsigned int rn = bits (insn, 16, 19);
4617 if (!insn_references_pc (insn, 0x000f0000ul))
4618 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4620 if (debug_displaced)
4621 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4622 (unsigned long) insn);
4624 dsc->modinsn[0] = insn & 0xfff0ffff;
4626 install_preload (gdbarch, regs, dsc, rn);
4632 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4633 struct regcache *regs, arm_displaced_step_closure *dsc)
4635 unsigned int rn = bits (insn1, 0, 3);
4636 unsigned int u_bit = bit (insn1, 7);
4637 int imm12 = bits (insn2, 0, 11);
4640 if (rn != ARM_PC_REGNUM)
4641 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4643 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4644 PLD (literal) Encoding T1. */
4645 if (debug_displaced)
4646 fprintf_unfiltered (gdb_stdlog,
4647 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4648 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4654 /* Rewrite instruction {pli/pld} PC imm12 into:
4655 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4659 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4661 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4662 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4666 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4667 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4668 dsc->u.preload.immed = 0;
4670 /* {pli/pld} [r0, r1] */
4671 dsc->modinsn[0] = insn1 & 0xfff0;
4672 dsc->modinsn[1] = 0xf001;
4675 dsc->cleanup = &cleanup_preload;
4679 /* Preload instructions with register offset. */
4682 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4683 arm_displaced_step_closure *dsc, unsigned int rn,
4686 ULONGEST rn_val, rm_val;
4688 /* Preload register-offset instructions:
4690 {pli/pld} [rn, rm {, shift}]
4692 {pli/pld} [r0, r1 {, shift}]. */
4694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4695 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4696 rn_val = displaced_read_reg (regs, dsc, rn);
4697 rm_val = displaced_read_reg (regs, dsc, rm);
4698 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4699 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4700 dsc->u.preload.immed = 0;
4702 dsc->cleanup = &cleanup_preload;
4706 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4707 struct regcache *regs,
4708 arm_displaced_step_closure *dsc)
4710 unsigned int rn = bits (insn, 16, 19);
4711 unsigned int rm = bits (insn, 0, 3);
4714 if (!insn_references_pc (insn, 0x000f000ful))
4715 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4717 if (debug_displaced)
4718 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4719 (unsigned long) insn);
4721 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4723 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4727 /* Copy/cleanup coprocessor load and store instructions. */
4730 cleanup_copro_load_store (struct gdbarch *gdbarch,
4731 struct regcache *regs,
4732 arm_displaced_step_closure *dsc)
4734 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4736 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4738 if (dsc->u.ldst.writeback)
4739 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4743 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4744 arm_displaced_step_closure *dsc,
4745 int writeback, unsigned int rn)
4749 /* Coprocessor load/store instructions:
4751 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4753 {stc/stc2} [r0, #+/-imm].
4755 ldc/ldc2 are handled identically. */
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 rn_val = displaced_read_reg (regs, dsc, rn);
4759 /* PC should be 4-byte aligned. */
4760 rn_val = rn_val & 0xfffffffc;
4761 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4763 dsc->u.ldst.writeback = writeback;
4764 dsc->u.ldst.rn = rn;
4766 dsc->cleanup = &cleanup_copro_load_store;
4770 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4771 struct regcache *regs,
4772 arm_displaced_step_closure *dsc)
4774 unsigned int rn = bits (insn, 16, 19);
4776 if (!insn_references_pc (insn, 0x000f0000ul))
4777 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4781 "load/store insn %.8lx\n", (unsigned long) insn);
4783 dsc->modinsn[0] = insn & 0xfff0ffff;
4785 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4791 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4792 uint16_t insn2, struct regcache *regs,
4793 arm_displaced_step_closure *dsc)
4795 unsigned int rn = bits (insn1, 0, 3);
4797 if (rn != ARM_PC_REGNUM)
4798 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4799 "copro load/store", dsc);
4801 if (debug_displaced)
4802 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4803 "load/store insn %.4x%.4x\n", insn1, insn2);
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = insn2;
4809 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4810 doesn't support writeback, so pass 0. */
4811 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4816 /* Clean up branch instructions (actually perform the branch, by setting
4820 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4821 arm_displaced_step_closure *dsc)
4823 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4824 int branch_taken = condition_true (dsc->u.branch.cond, status);
4825 enum pc_write_style write_pc = dsc->u.branch.exchange
4826 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4831 if (dsc->u.branch.link)
4833 /* The value of LR should be the next insn of current one. In order
4834 not to confuse logic hanlding later insn `bx lr', if current insn mode
4835 is Thumb, the bit 0 of LR value should be set to 1. */
4836 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4839 next_insn_addr |= 0x1;
4841 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4845 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4848 /* Copy B/BL/BLX instructions with immediate destinations. */
4851 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4852 arm_displaced_step_closure *dsc,
4853 unsigned int cond, int exchange, int link, long offset)
4855 /* Implement "BL<cond> <label>" as:
4857 Preparation: cond <- instruction condition
4858 Insn: mov r0, r0 (nop)
4859 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4861 B<cond> similar, but don't set r14 in cleanup. */
4863 dsc->u.branch.cond = cond;
4864 dsc->u.branch.link = link;
4865 dsc->u.branch.exchange = exchange;
4867 dsc->u.branch.dest = dsc->insn_addr;
4868 if (link && exchange)
4869 /* For BLX, offset is computed from the Align (PC, 4). */
4870 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4873 dsc->u.branch.dest += 4 + offset;
4875 dsc->u.branch.dest += 8 + offset;
4877 dsc->cleanup = &cleanup_branch;
4880 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs, arm_displaced_step_closure *dsc)
4883 unsigned int cond = bits (insn, 28, 31);
4884 int exchange = (cond == 0xf);
4885 int link = exchange || bit (insn, 24);
4888 if (debug_displaced)
4889 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4890 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4891 (unsigned long) insn);
4893 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4894 then arrange the switch into Thumb mode. */
4895 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4897 offset = bits (insn, 0, 23) << 2;
4899 if (bit (offset, 25))
4900 offset = offset | ~0x3ffffff;
4902 dsc->modinsn[0] = ARM_NOP;
4904 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4909 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4910 uint16_t insn2, struct regcache *regs,
4911 arm_displaced_step_closure *dsc)
4913 int link = bit (insn2, 14);
4914 int exchange = link && !bit (insn2, 12);
4917 int j1 = bit (insn2, 13);
4918 int j2 = bit (insn2, 11);
4919 int s = sbits (insn1, 10, 10);
4920 int i1 = !(j1 ^ bit (insn1, 10));
4921 int i2 = !(j2 ^ bit (insn1, 10));
4923 if (!link && !exchange) /* B */
4925 offset = (bits (insn2, 0, 10) << 1);
4926 if (bit (insn2, 12)) /* Encoding T4 */
4928 offset |= (bits (insn1, 0, 9) << 12)
4934 else /* Encoding T3 */
4936 offset |= (bits (insn1, 0, 5) << 12)
4940 cond = bits (insn1, 6, 9);
4945 offset = (bits (insn1, 0, 9) << 12);
4946 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4947 offset |= exchange ?
4948 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4951 if (debug_displaced)
4952 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4953 "%.4x %.4x with offset %.8lx\n",
4954 link ? (exchange) ? "blx" : "bl" : "b",
4955 insn1, insn2, offset);
4957 dsc->modinsn[0] = THUMB_NOP;
4959 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4963 /* Copy B Thumb instructions. */
4965 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4966 arm_displaced_step_closure *dsc)
4968 unsigned int cond = 0;
4970 unsigned short bit_12_15 = bits (insn, 12, 15);
4971 CORE_ADDR from = dsc->insn_addr;
4973 if (bit_12_15 == 0xd)
4975 /* offset = SignExtend (imm8:0, 32) */
4976 offset = sbits ((insn << 1), 0, 8);
4977 cond = bits (insn, 8, 11);
4979 else if (bit_12_15 == 0xe) /* Encoding T2 */
4981 offset = sbits ((insn << 1), 0, 11);
4985 if (debug_displaced)
4986 fprintf_unfiltered (gdb_stdlog,
4987 "displaced: copying b immediate insn %.4x "
4988 "with offset %d\n", insn, offset);
4990 dsc->u.branch.cond = cond;
4991 dsc->u.branch.link = 0;
4992 dsc->u.branch.exchange = 0;
4993 dsc->u.branch.dest = from + 4 + offset;
4995 dsc->modinsn[0] = THUMB_NOP;
4997 dsc->cleanup = &cleanup_branch;
5002 /* Copy BX/BLX with register-specified destinations. */
5005 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5006 arm_displaced_step_closure *dsc, int link,
5007 unsigned int cond, unsigned int rm)
5009 /* Implement {BX,BLX}<cond> <reg>" as:
5011 Preparation: cond <- instruction condition
5012 Insn: mov r0, r0 (nop)
5013 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5015 Don't set r14 in cleanup for BX. */
5017 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5019 dsc->u.branch.cond = cond;
5020 dsc->u.branch.link = link;
5022 dsc->u.branch.exchange = 1;
5024 dsc->cleanup = &cleanup_branch;
5028 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5029 struct regcache *regs, arm_displaced_step_closure *dsc)
5031 unsigned int cond = bits (insn, 28, 31);
5034 int link = bit (insn, 5);
5035 unsigned int rm = bits (insn, 0, 3);
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5039 (unsigned long) insn);
5041 dsc->modinsn[0] = ARM_NOP;
5043 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5048 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5049 struct regcache *regs,
5050 arm_displaced_step_closure *dsc)
5052 int link = bit (insn, 7);
5053 unsigned int rm = bits (insn, 3, 6);
5055 if (debug_displaced)
5056 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5057 (unsigned short) insn);
5059 dsc->modinsn[0] = THUMB_NOP;
5061 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5067 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5070 cleanup_alu_imm (struct gdbarch *gdbarch,
5071 struct regcache *regs, arm_displaced_step_closure *dsc)
5073 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5074 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5076 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5080 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5081 arm_displaced_step_closure *dsc)
5083 unsigned int rn = bits (insn, 16, 19);
5084 unsigned int rd = bits (insn, 12, 15);
5085 unsigned int op = bits (insn, 21, 24);
5086 int is_mov = (op == 0xd);
5087 ULONGEST rd_val, rn_val;
5089 if (!insn_references_pc (insn, 0x000ff000ul))
5090 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5092 if (debug_displaced)
5093 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5094 "%.8lx\n", is_mov ? "move" : "ALU",
5095 (unsigned long) insn);
5097 /* Instruction is of form:
5099 <op><cond> rd, [rn,] #imm
5103 Preparation: tmp1, tmp2 <- r0, r1;
5105 Insn: <op><cond> r0, r1, #imm
5106 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5109 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5110 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5111 rn_val = displaced_read_reg (regs, dsc, rn);
5112 rd_val = displaced_read_reg (regs, dsc, rd);
5113 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5114 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5118 dsc->modinsn[0] = insn & 0xfff00fff;
5120 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5122 dsc->cleanup = &cleanup_alu_imm;
5128 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5129 uint16_t insn2, struct regcache *regs,
5130 arm_displaced_step_closure *dsc)
5132 unsigned int op = bits (insn1, 5, 8);
5133 unsigned int rn, rm, rd;
5134 ULONGEST rd_val, rn_val;
5136 rn = bits (insn1, 0, 3); /* Rn */
5137 rm = bits (insn2, 0, 3); /* Rm */
5138 rd = bits (insn2, 8, 11); /* Rd */
5140 /* This routine is only called for instruction MOV. */
5141 gdb_assert (op == 0x2 && rn == 0xf);
5143 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5146 if (debug_displaced)
5147 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5148 "ALU", insn1, insn2);
5150 /* Instruction is of form:
5152 <op><cond> rd, [rn,] #imm
5156 Preparation: tmp1, tmp2 <- r0, r1;
5158 Insn: <op><cond> r0, r1, #imm
5159 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5162 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5163 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5164 rn_val = displaced_read_reg (regs, dsc, rn);
5165 rd_val = displaced_read_reg (regs, dsc, rd);
5166 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5167 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5170 dsc->modinsn[0] = insn1;
5171 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5174 dsc->cleanup = &cleanup_alu_imm;
5179 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5182 cleanup_alu_reg (struct gdbarch *gdbarch,
5183 struct regcache *regs, arm_displaced_step_closure *dsc)
5188 rd_val = displaced_read_reg (regs, dsc, 0);
5190 for (i = 0; i < 3; i++)
5191 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5193 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5197 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5198 arm_displaced_step_closure *dsc,
5199 unsigned int rd, unsigned int rn, unsigned int rm)
5201 ULONGEST rd_val, rn_val, rm_val;
5203 /* Instruction is of form:
5205 <op><cond> rd, [rn,] rm [, <shift>]
5209 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5210 r0, r1, r2 <- rd, rn, rm
5211 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5212 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
5219 rn_val = displaced_read_reg (regs, dsc, rn);
5220 rm_val = displaced_read_reg (regs, dsc, rm);
5221 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5223 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5226 dsc->cleanup = &cleanup_alu_reg;
5230 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5231 arm_displaced_step_closure *dsc)
5233 unsigned int op = bits (insn, 21, 24);
5234 int is_mov = (op == 0xd);
5236 if (!insn_references_pc (insn, 0x000ff00ful))
5237 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5239 if (debug_displaced)
5240 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5241 is_mov ? "move" : "ALU", (unsigned long) insn);
5244 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5246 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5248 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5254 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 arm_displaced_step_closure *dsc)
5260 rm = bits (insn, 3, 6);
5261 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5263 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5264 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5266 if (debug_displaced)
5267 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5268 (unsigned short) insn);
5270 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5272 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5277 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5280 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5281 struct regcache *regs,
5282 arm_displaced_step_closure *dsc)
5284 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5287 for (i = 0; i < 4; i++)
5288 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5290 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5294 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5295 arm_displaced_step_closure *dsc,
5296 unsigned int rd, unsigned int rn, unsigned int rm,
5300 ULONGEST rd_val, rn_val, rm_val, rs_val;
5302 /* Instruction is of form:
5304 <op><cond> rd, [rn,] rm, <shift> rs
5308 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5309 r0, r1, r2, r3 <- rd, rn, rm, rs
5310 Insn: <op><cond> r0, r1, r2, <shift> r3
5312 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5316 for (i = 0; i < 4; i++)
5317 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5319 rd_val = displaced_read_reg (regs, dsc, rd);
5320 rn_val = displaced_read_reg (regs, dsc, rn);
5321 rm_val = displaced_read_reg (regs, dsc, rm);
5322 rs_val = displaced_read_reg (regs, dsc, rs);
5323 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5326 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5328 dsc->cleanup = &cleanup_alu_shifted_reg;
5332 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5333 struct regcache *regs,
5334 arm_displaced_step_closure *dsc)
5336 unsigned int op = bits (insn, 21, 24);
5337 int is_mov = (op == 0xd);
5338 unsigned int rd, rn, rm, rs;
5340 if (!insn_references_pc (insn, 0x000fff0ful))
5341 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5343 if (debug_displaced)
5344 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5345 "%.8lx\n", is_mov ? "move" : "ALU",
5346 (unsigned long) insn);
5348 rn = bits (insn, 16, 19);
5349 rm = bits (insn, 0, 3);
5350 rs = bits (insn, 8, 11);
5351 rd = bits (insn, 12, 15);
5354 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5356 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5358 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5363 /* Clean up load instructions. */
5366 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5367 arm_displaced_step_closure *dsc)
5369 ULONGEST rt_val, rt_val2 = 0, rn_val;
5371 rt_val = displaced_read_reg (regs, dsc, 0);
5372 if (dsc->u.ldst.xfersize == 8)
5373 rt_val2 = displaced_read_reg (regs, dsc, 1);
5374 rn_val = displaced_read_reg (regs, dsc, 2);
5376 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5377 if (dsc->u.ldst.xfersize > 4)
5378 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5379 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5380 if (!dsc->u.ldst.immed)
5381 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5383 /* Handle register writeback. */
5384 if (dsc->u.ldst.writeback)
5385 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5386 /* Put result in right place. */
5387 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5388 if (dsc->u.ldst.xfersize == 8)
5389 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5392 /* Clean up store instructions. */
5395 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5396 arm_displaced_step_closure *dsc)
5398 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5400 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5401 if (dsc->u.ldst.xfersize > 4)
5402 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5403 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5404 if (!dsc->u.ldst.immed)
5405 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5406 if (!dsc->u.ldst.restore_r4)
5407 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5410 if (dsc->u.ldst.writeback)
5411 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5414 /* Copy "extra" load/store instructions. These are halfword/doubleword
5415 transfers, which have a different encoding to byte/word transfers. */
5418 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5419 struct regcache *regs, arm_displaced_step_closure *dsc)
5421 unsigned int op1 = bits (insn, 20, 24);
5422 unsigned int op2 = bits (insn, 5, 6);
5423 unsigned int rt = bits (insn, 12, 15);
5424 unsigned int rn = bits (insn, 16, 19);
5425 unsigned int rm = bits (insn, 0, 3);
5426 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5427 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5428 int immed = (op1 & 0x4) != 0;
5430 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5432 if (!insn_references_pc (insn, 0x000ff00ful))
5433 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5435 if (debug_displaced)
5436 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5437 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5438 (unsigned long) insn);
5440 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5443 internal_error (__FILE__, __LINE__,
5444 _("copy_extra_ld_st: instruction decode error"));
5446 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5447 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5448 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5450 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5452 rt_val = displaced_read_reg (regs, dsc, rt);
5453 if (bytesize[opcode] == 8)
5454 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5455 rn_val = displaced_read_reg (regs, dsc, rn);
5457 rm_val = displaced_read_reg (regs, dsc, rm);
5459 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5460 if (bytesize[opcode] == 8)
5461 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5462 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5464 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5467 dsc->u.ldst.xfersize = bytesize[opcode];
5468 dsc->u.ldst.rn = rn;
5469 dsc->u.ldst.immed = immed;
5470 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5471 dsc->u.ldst.restore_r4 = 0;
5474 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5476 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5477 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5479 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5481 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5482 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5484 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5489 /* Copy byte/half word/word loads and stores. */
5492 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5493 arm_displaced_step_closure *dsc, int load,
5494 int immed, int writeback, int size, int usermode,
5495 int rt, int rm, int rn)
5497 ULONGEST rt_val, rn_val, rm_val = 0;
5499 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5500 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5502 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5504 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5506 rt_val = displaced_read_reg (regs, dsc, rt);
5507 rn_val = displaced_read_reg (regs, dsc, rn);
5509 rm_val = displaced_read_reg (regs, dsc, rm);
5511 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5512 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5514 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5516 dsc->u.ldst.xfersize = size;
5517 dsc->u.ldst.rn = rn;
5518 dsc->u.ldst.immed = immed;
5519 dsc->u.ldst.writeback = writeback;
5521 /* To write PC we can do:
5523 Before this sequence of instructions:
5524 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5525 r2 is the Rn value got from dispalced_read_reg.
5527 Insn1: push {pc} Write address of STR instruction + offset on stack
5528 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5529 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5530 = addr(Insn1) + offset - addr(Insn3) - 8
5532 Insn4: add r4, r4, #8 r4 = offset - 8
5533 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5535 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5537 Otherwise we don't know what value to write for PC, since the offset is
5538 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5539 of this can be found in Section "Saving from r15" in
5540 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5542 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5547 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5548 uint16_t insn2, struct regcache *regs,
5549 arm_displaced_step_closure *dsc, int size)
5551 unsigned int u_bit = bit (insn1, 7);
5552 unsigned int rt = bits (insn2, 12, 15);
5553 int imm12 = bits (insn2, 0, 11);
5556 if (debug_displaced)
5557 fprintf_unfiltered (gdb_stdlog,
5558 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5559 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5565 /* Rewrite instruction LDR Rt imm12 into:
5567 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5571 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5574 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5575 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5576 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5578 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5580 pc_val = pc_val & 0xfffffffc;
5582 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5583 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5587 dsc->u.ldst.xfersize = size;
5588 dsc->u.ldst.immed = 0;
5589 dsc->u.ldst.writeback = 0;
5590 dsc->u.ldst.restore_r4 = 0;
5592 /* LDR R0, R2, R3 */
5593 dsc->modinsn[0] = 0xf852;
5594 dsc->modinsn[1] = 0x3;
5597 dsc->cleanup = &cleanup_load;
5603 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5604 uint16_t insn2, struct regcache *regs,
5605 arm_displaced_step_closure *dsc,
5606 int writeback, int immed)
5608 unsigned int rt = bits (insn2, 12, 15);
5609 unsigned int rn = bits (insn1, 0, 3);
5610 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5611 /* In LDR (register), there is also a register Rm, which is not allowed to
5612 be PC, so we don't have to check it. */
5614 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5615 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5618 if (debug_displaced)
5619 fprintf_unfiltered (gdb_stdlog,
5620 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5621 rt, rn, insn1, insn2);
5623 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5626 dsc->u.ldst.restore_r4 = 0;
5629 /* ldr[b]<cond> rt, [rn, #imm], etc.
5631 ldr[b]<cond> r0, [r2, #imm]. */
5633 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5634 dsc->modinsn[1] = insn2 & 0x0fff;
5637 /* ldr[b]<cond> rt, [rn, rm], etc.
5639 ldr[b]<cond> r0, [r2, r3]. */
5641 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5642 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5652 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5653 struct regcache *regs,
5654 arm_displaced_step_closure *dsc,
5655 int load, int size, int usermode)
5657 int immed = !bit (insn, 25);
5658 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5659 unsigned int rt = bits (insn, 12, 15);
5660 unsigned int rn = bits (insn, 16, 19);
5661 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5663 if (!insn_references_pc (insn, 0x000ff00ful))
5664 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5669 load ? (size == 1 ? "ldrb" : "ldr")
5670 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5672 (unsigned long) insn);
5674 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5675 usermode, rt, rm, rn);
5677 if (load || rt != ARM_PC_REGNUM)
5679 dsc->u.ldst.restore_r4 = 0;
5682 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5684 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5685 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5687 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5689 {ldr,str}[b]<cond> r0, [r2, r3]. */
5690 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5694 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5695 dsc->u.ldst.restore_r4 = 1;
5696 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5697 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5698 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5699 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5700 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5704 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5706 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5711 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5716 /* Cleanup LDM instructions with fully-populated register list. This is an
5717 unfortunate corner case: it's impossible to implement correctly by modifying
5718 the instruction. The issue is as follows: we have an instruction,
5722 which we must rewrite to avoid loading PC. A possible solution would be to
5723 do the load in two halves, something like (with suitable cleanup
5727 ldm[id][ab] r8!, {r0-r7}
5729 ldm[id][ab] r8, {r7-r14}
5732 but at present there's no suitable place for <temp>, since the scratch space
5733 is overwritten before the cleanup routine is called. For now, we simply
5734 emulate the instruction. */
5737 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5738 arm_displaced_step_closure *dsc)
5740 int inc = dsc->u.block.increment;
5741 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5742 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5743 uint32_t regmask = dsc->u.block.regmask;
5744 int regno = inc ? 0 : 15;
5745 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5746 int exception_return = dsc->u.block.load && dsc->u.block.user
5747 && (regmask & 0x8000) != 0;
5748 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5749 int do_transfer = condition_true (dsc->u.block.cond, status);
5750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5755 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5756 sensible we can do here. Complain loudly. */
5757 if (exception_return)
5758 error (_("Cannot single-step exception return"));
5760 /* We don't handle any stores here for now. */
5761 gdb_assert (dsc->u.block.load != 0);
5763 if (debug_displaced)
5764 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5765 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5766 dsc->u.block.increment ? "inc" : "dec",
5767 dsc->u.block.before ? "before" : "after");
5774 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5777 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5780 xfer_addr += bump_before;
5782 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5783 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5785 xfer_addr += bump_after;
5787 regmask &= ~(1 << regno);
5790 if (dsc->u.block.writeback)
5791 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5795 /* Clean up an STM which included the PC in the register list. */
5798 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5799 arm_displaced_step_closure *dsc)
5801 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5802 int store_executed = condition_true (dsc->u.block.cond, status);
5803 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5813 if (dsc->u.block.increment)
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5817 if (dsc->u.block.before)
5822 pc_stored_at = dsc->u.block.xfer_addr;
5824 if (dsc->u.block.before)
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 arm_displaced_step_closure *dsc)
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = bitcount (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5865 clobbered = (1 << num_to_shuffle) - 1;
5867 while (num_to_shuffle > 0)
5869 if ((mask & (1 << write_reg)) != 0)
5871 unsigned int read_reg = num_to_shuffle - 1;
5873 if (read_reg != write_reg)
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5887 clobbered &= ~(1 << write_reg);
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5898 if ((clobbered & (1 << write_reg)) != 0)
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5917 new_rn_val -= regs_loaded * 4;
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 arm_displaced_step_closure *dsc)
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5939 /* Block transfers which don't mention PC can be run directly
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5944 if (rn == ARM_PC_REGNUM)
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5965 dsc->u.block.regmask = insn & 0xffff;
5969 if ((insn & 0xffff) == 0xffff)
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5976 dsc->modinsn[0] = ARM_NOP;
5978 dsc->cleanup = &cleanup_block_load_all;
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = bitcount (regmask), new_regmask;
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5998 ldm r14!, {r0-r13,pc}
6000 which would need to be rewritten as:
6004 but that can't work, because there's no free register for N.
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6012 new_regmask = (1 << num_in_list) - 1;
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6022 dsc->cleanup = &cleanup_block_load_pc;
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6035 dsc->cleanup = &cleanup_block_store_pc;
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 arm_displaced_step_closure *dsc)
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6050 /* Block transfers which don't mention PC can be run directly
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6055 if (rn == ARM_PC_REGNUM)
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6081 if (dsc->u.block.regmask == 0xffff)
6083 /* This branch is impossible to happen. */
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = bitcount (regmask), new_regmask;
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6098 new_regmask = (1 << num_in_list) - 1;
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6110 dsc->cleanup = &cleanup_block_load_pc;
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6130 return read_memory_unsigned_integer (memaddr, len,
6131 (enum bfd_endian) byte_order);
6134 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6137 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6140 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6143 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6146 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6151 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6154 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6156 return arm_is_thumb (self->regcache);
6159 /* single_step() is called just before we want to resume the inferior,
6160 if we want to single-step it but there is no hardware or kernel
6161 single-step support. We find the target of the coming instructions
6162 and breakpoint them. */
6164 std::vector<CORE_ADDR>
6165 arm_software_single_step (struct regcache *regcache)
6167 struct gdbarch *gdbarch = regcache->arch ();
6168 struct arm_get_next_pcs next_pcs_ctx;
6170 arm_get_next_pcs_ctor (&next_pcs_ctx,
6171 &arm_get_next_pcs_ops,
6172 gdbarch_byte_order (gdbarch),
6173 gdbarch_byte_order_for_code (gdbarch),
6177 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6179 for (CORE_ADDR &pc_ref : next_pcs)
6180 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6185 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6186 for Linux, where some SVC instructions must be treated specially. */
6189 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6190 arm_displaced_step_closure *dsc)
6192 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6194 if (debug_displaced)
6195 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6196 "%.8lx\n", (unsigned long) resume_addr);
6198 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6202 /* Common copy routine for svc instruciton. */
6205 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6206 arm_displaced_step_closure *dsc)
6208 /* Preparation: none.
6209 Insn: unmodified svc.
6210 Cleanup: pc <- insn_addr + insn_size. */
6212 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6214 dsc->wrote_to_pc = 1;
6216 /* Allow OS-specific code to override SVC handling. */
6217 if (dsc->u.svc.copy_svc_os)
6218 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6221 dsc->cleanup = &cleanup_svc;
6227 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6228 struct regcache *regs, arm_displaced_step_closure *dsc)
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6233 (unsigned long) insn);
6235 dsc->modinsn[0] = insn;
6237 return install_svc (gdbarch, regs, dsc);
6241 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6242 struct regcache *regs, arm_displaced_step_closure *dsc)
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6249 dsc->modinsn[0] = insn;
6251 return install_svc (gdbarch, regs, dsc);
6254 /* Copy undefined instructions. */
6257 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6258 arm_displaced_step_closure *dsc)
6260 if (debug_displaced)
6261 fprintf_unfiltered (gdb_stdlog,
6262 "displaced: copying undefined insn %.8lx\n",
6263 (unsigned long) insn);
6265 dsc->modinsn[0] = insn;
6271 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6272 arm_displaced_step_closure *dsc)
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6277 "%.4x %.4x\n", (unsigned short) insn1,
6278 (unsigned short) insn2);
6280 dsc->modinsn[0] = insn1;
6281 dsc->modinsn[1] = insn2;
6287 /* Copy unpredictable instructions. */
6290 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6291 arm_displaced_step_closure *dsc)
6293 if (debug_displaced)
6294 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6295 "%.8lx\n", (unsigned long) insn);
6297 dsc->modinsn[0] = insn;
6302 /* The decode_* functions are instruction decoding helpers. They mostly follow
6303 the presentation in the ARM ARM. */
6306 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6307 struct regcache *regs,
6308 arm_displaced_step_closure *dsc)
6310 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6311 unsigned int rn = bits (insn, 16, 19);
6313 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6314 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6315 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6316 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6317 else if ((op1 & 0x60) == 0x20)
6318 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6319 else if ((op1 & 0x71) == 0x40)
6320 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6322 else if ((op1 & 0x77) == 0x41)
6323 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6324 else if ((op1 & 0x77) == 0x45)
6325 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6326 else if ((op1 & 0x77) == 0x51)
6329 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6331 return arm_copy_unpred (gdbarch, insn, dsc);
6333 else if ((op1 & 0x77) == 0x55)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6335 else if (op1 == 0x57)
6338 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6339 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6340 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6341 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6342 default: return arm_copy_unpred (gdbarch, insn, dsc);
6344 else if ((op1 & 0x63) == 0x43)
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op2 & 0x1) == 0x0)
6347 switch (op1 & ~0x80)
6350 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6353 case 0x71: case 0x75:
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6356 case 0x63: case 0x67: case 0x73: case 0x77:
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6359 return arm_copy_undef (gdbarch, insn, dsc);
6362 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6366 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs,
6368 arm_displaced_step_closure *dsc)
6370 if (bit (insn, 27) == 0)
6371 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6372 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6373 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6376 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6379 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6381 case 0x4: case 0x5: case 0x6: case 0x7:
6382 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6385 switch ((insn & 0xe00000) >> 21)
6387 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6389 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6392 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6395 return arm_copy_undef (gdbarch, insn, dsc);
6400 int rn_f = (bits (insn, 16, 19) == 0xf);
6401 switch ((insn & 0xe00000) >> 21)
6404 /* ldc/ldc2 imm (undefined for rn == pc). */
6405 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6406 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6409 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6411 case 0x4: case 0x5: case 0x6: case 0x7:
6412 /* ldc/ldc2 lit (undefined for rn != pc). */
6413 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6414 : arm_copy_undef (gdbarch, insn, dsc);
6417 return arm_copy_undef (gdbarch, insn, dsc);
6422 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6425 if (bits (insn, 16, 19) == 0xf)
6427 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6429 return arm_copy_undef (gdbarch, insn, dsc);
6433 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6435 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6439 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6441 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6444 return arm_copy_undef (gdbarch, insn, dsc);
6448 /* Decode miscellaneous instructions in dp/misc encoding space. */
6451 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 arm_displaced_step_closure *dsc)
6455 unsigned int op2 = bits (insn, 4, 6);
6456 unsigned int op = bits (insn, 21, 22);
6461 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6464 if (op == 0x1) /* bx. */
6465 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6467 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6469 return arm_copy_undef (gdbarch, insn, dsc);
6473 /* Not really supported. */
6474 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6476 return arm_copy_undef (gdbarch, insn, dsc);
6480 return arm_copy_bx_blx_reg (gdbarch, insn,
6481 regs, dsc); /* blx register. */
6483 return arm_copy_undef (gdbarch, insn, dsc);
6486 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6490 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6492 /* Not really supported. */
6493 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6497 return arm_copy_undef (gdbarch, insn, dsc);
6502 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 arm_displaced_step_closure *dsc)
6507 switch (bits (insn, 20, 24))
6510 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6513 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6515 case 0x12: case 0x16:
6516 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6519 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6523 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6525 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6526 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6528 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6530 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6532 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6537 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6538 /* 2nd arg means "unprivileged". */
6539 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6543 /* Should be unreachable. */
6548 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6549 struct regcache *regs,
6550 arm_displaced_step_closure *dsc)
6552 int a = bit (insn, 25), b = bit (insn, 4);
6553 uint32_t op1 = bits (insn, 20, 24);
6555 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6556 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6558 else if ((!a && (op1 & 0x17) == 0x02)
6559 || (a && (op1 & 0x17) == 0x02 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6561 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6562 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6564 else if ((!a && (op1 & 0x17) == 0x03)
6565 || (a && (op1 & 0x17) == 0x03 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6567 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6568 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6570 else if ((!a && (op1 & 0x17) == 0x06)
6571 || (a && (op1 & 0x17) == 0x06 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6573 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6574 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6576 else if ((!a && (op1 & 0x17) == 0x07)
6577 || (a && (op1 & 0x17) == 0x07 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6580 /* Should be unreachable. */
6585 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6586 arm_displaced_step_closure *dsc)
6588 switch (bits (insn, 20, 24))
6590 case 0x00: case 0x01: case 0x02: case 0x03:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6593 case 0x04: case 0x05: case 0x06: case 0x07:
6594 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6596 case 0x08: case 0x09: case 0x0a: case 0x0b:
6597 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6598 return arm_copy_unmodified (gdbarch, insn,
6599 "decode/pack/unpack/saturate/reverse", dsc);
6602 if (bits (insn, 5, 7) == 0) /* op2. */
6604 if (bits (insn, 12, 15) == 0xf)
6605 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6607 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6610 return arm_copy_undef (gdbarch, insn, dsc);
6612 case 0x1a: case 0x1b:
6613 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6614 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6616 return arm_copy_undef (gdbarch, insn, dsc);
6618 case 0x1c: case 0x1d:
6619 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6621 if (bits (insn, 0, 3) == 0xf)
6622 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6624 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6627 return arm_copy_undef (gdbarch, insn, dsc);
6629 case 0x1e: case 0x1f:
6630 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6631 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6633 return arm_copy_undef (gdbarch, insn, dsc);
6636 /* Should be unreachable. */
6641 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6642 struct regcache *regs,
6643 arm_displaced_step_closure *dsc)
6646 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6648 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6652 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6653 struct regcache *regs,
6654 arm_displaced_step_closure *dsc)
6656 unsigned int opcode = bits (insn, 20, 24);
6660 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6661 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6663 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6664 case 0x12: case 0x16:
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6667 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6668 case 0x13: case 0x17:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6671 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6672 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6673 /* Note: no writeback for these instructions. Bit 25 will always be
6674 zero though (via caller), so the following works OK. */
6675 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6678 /* Should be unreachable. */
6682 /* Decode shifted register instructions. */
6685 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6686 uint16_t insn2, struct regcache *regs,
6687 arm_displaced_step_closure *dsc)
6689 /* PC is only allowed to be used in instruction MOV. */
6691 unsigned int op = bits (insn1, 5, 8);
6692 unsigned int rn = bits (insn1, 0, 3);
6694 if (op == 0x2 && rn == 0xf) /* MOV */
6695 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6697 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6698 "dp (shift reg)", dsc);
6702 /* Decode extension register load/store. Exactly the same as
6703 arm_decode_ext_reg_ld_st. */
6706 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6707 uint16_t insn2, struct regcache *regs,
6708 arm_displaced_step_closure *dsc)
6710 unsigned int opcode = bits (insn1, 4, 8);
6714 case 0x04: case 0x05:
6715 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6716 "vfp/neon vmov", dsc);
6718 case 0x08: case 0x0c: /* 01x00 */
6719 case 0x0a: case 0x0e: /* 01x10 */
6720 case 0x12: case 0x16: /* 10x10 */
6721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6722 "vfp/neon vstm/vpush", dsc);
6724 case 0x09: case 0x0d: /* 01x01 */
6725 case 0x0b: case 0x0f: /* 01x11 */
6726 case 0x13: case 0x17: /* 10x11 */
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vldm/vpop", dsc);
6730 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6733 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6734 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6737 /* Should be unreachable. */
6742 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6743 struct regcache *regs, arm_displaced_step_closure *dsc)
6745 unsigned int op1 = bits (insn, 20, 25);
6746 int op = bit (insn, 4);
6747 unsigned int coproc = bits (insn, 8, 11);
6749 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6750 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6751 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6752 && (coproc & 0xe) != 0xa)
6754 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6757 /* ldc/ldc2 imm/lit. */
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x3e) == 0x00)
6760 return arm_copy_undef (gdbarch, insn, dsc);
6761 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6763 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6765 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6767 else if ((op1 & 0x30) == 0x20 && !op)
6769 if ((coproc & 0xe) == 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6772 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6774 else if ((op1 & 0x30) == 0x20 && op)
6775 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6776 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6778 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6780 else if ((op1 & 0x30) == 0x30)
6781 return arm_copy_svc (gdbarch, insn, regs, dsc);
6783 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6787 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_closure *dsc)
6791 unsigned int coproc = bits (insn2, 8, 11);
6792 unsigned int bit_5_8 = bits (insn1, 5, 8);
6793 unsigned int bit_9 = bit (insn1, 9);
6794 unsigned int bit_4 = bit (insn1, 4);
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6802 else if (bit_5_8 == 0) /* UNDEFINED. */
6803 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6806 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6807 if ((coproc & 0xe) == 0xa)
6808 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6810 else /* coproc is not 101x. */
6812 if (bit_4 == 0) /* STC/STC2. */
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6815 else /* LDC/LDC2 {literal, immeidate}. */
6816 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6828 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6829 arm_displaced_step_closure *dsc, int rd)
6835 Preparation: Rd <- PC
6841 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6842 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6846 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6847 arm_displaced_step_closure *dsc,
6848 int rd, unsigned int imm)
6851 /* Encoding T2: ADDS Rd, #imm */
6852 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6854 install_pc_relative (gdbarch, regs, dsc, rd);
6860 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6861 struct regcache *regs,
6862 arm_displaced_step_closure *dsc)
6864 unsigned int rd = bits (insn, 8, 10);
6865 unsigned int imm8 = bits (insn, 0, 7);
6867 if (debug_displaced)
6868 fprintf_unfiltered (gdb_stdlog,
6869 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6872 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6876 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6877 uint16_t insn2, struct regcache *regs,
6878 arm_displaced_step_closure *dsc)
6880 unsigned int rd = bits (insn2, 8, 11);
6881 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6882 extract raw immediate encoding rather than computing immediate. When
6883 generating ADD or SUB instruction, we can simply perform OR operation to
6884 set immediate into ADD. */
6885 unsigned int imm_3_8 = insn2 & 0x70ff;
6886 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6891 rd, imm_i, imm_3_8, insn1, insn2);
6893 if (bit (insn1, 7)) /* Encoding T2 */
6895 /* Encoding T3: SUB Rd, Rd, #imm */
6896 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6897 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6899 else /* Encoding T3 */
6901 /* Encoding T3: ADD Rd, Rd, #imm */
6902 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6903 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6907 install_pc_relative (gdbarch, regs, dsc, rd);
6913 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6914 struct regcache *regs,
6915 arm_displaced_step_closure *dsc)
6917 unsigned int rt = bits (insn1, 8, 10);
6919 int imm8 = (bits (insn1, 0, 7) << 2);
6925 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6927 Insn: LDR R0, [R2, R3];
6928 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6930 if (debug_displaced)
6931 fprintf_unfiltered (gdb_stdlog,
6932 "displaced: copying thumb ldr r%d [pc #%d]\n"
6935 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6936 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6937 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6938 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6939 /* The assembler calculates the required value of the offset from the
6940 Align(PC,4) value of this instruction to the label. */
6941 pc = pc & 0xfffffffc;
6943 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6944 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6947 dsc->u.ldst.xfersize = 4;
6949 dsc->u.ldst.immed = 0;
6950 dsc->u.ldst.writeback = 0;
6951 dsc->u.ldst.restore_r4 = 0;
6953 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6955 dsc->cleanup = &cleanup_load;
6960 /* Copy Thumb cbnz/cbz insruction. */
6963 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6964 struct regcache *regs,
6965 arm_displaced_step_closure *dsc)
6967 int non_zero = bit (insn1, 11);
6968 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6969 CORE_ADDR from = dsc->insn_addr;
6970 int rn = bits (insn1, 0, 2);
6971 int rn_val = displaced_read_reg (regs, dsc, rn);
6973 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6974 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6975 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6976 condition is false, let it be, cleanup_branch will do nothing. */
6977 if (dsc->u.branch.cond)
6979 dsc->u.branch.cond = INST_AL;
6980 dsc->u.branch.dest = from + 4 + imm5;
6983 dsc->u.branch.dest = from + 2;
6985 dsc->u.branch.link = 0;
6986 dsc->u.branch.exchange = 0;
6988 if (debug_displaced)
6989 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6990 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6991 rn, rn_val, insn1, dsc->u.branch.dest);
6993 dsc->modinsn[0] = THUMB_NOP;
6995 dsc->cleanup = &cleanup_branch;
6999 /* Copy Table Branch Byte/Halfword */
7001 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7002 uint16_t insn2, struct regcache *regs,
7003 arm_displaced_step_closure *dsc)
7005 ULONGEST rn_val, rm_val;
7006 int is_tbh = bit (insn2, 4);
7007 CORE_ADDR halfwords = 0;
7008 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7010 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7011 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7017 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7018 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7024 target_read_memory (rn_val + rm_val, buf, 1);
7025 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7030 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7031 (unsigned int) rn_val, (unsigned int) rm_val,
7032 (unsigned int) halfwords);
7034 dsc->u.branch.cond = INST_AL;
7035 dsc->u.branch.link = 0;
7036 dsc->u.branch.exchange = 0;
7037 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7039 dsc->cleanup = &cleanup_branch;
7045 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7046 arm_displaced_step_closure *dsc)
7049 int val = displaced_read_reg (regs, dsc, 7);
7050 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7053 val = displaced_read_reg (regs, dsc, 8);
7054 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7057 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7062 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7063 struct regcache *regs,
7064 arm_displaced_step_closure *dsc)
7066 dsc->u.block.regmask = insn1 & 0x00ff;
7068 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7071 (1) register list is full, that is, r0-r7 are used.
7072 Prepare: tmp[0] <- r8
7074 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7075 MOV r8, r7; Move value of r7 to r8;
7076 POP {r7}; Store PC value into r7.
7078 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7080 (2) register list is not full, supposing there are N registers in
7081 register list (except PC, 0 <= N <= 7).
7082 Prepare: for each i, 0 - N, tmp[i] <- ri.
7084 POP {r0, r1, ...., rN};
7086 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7087 from tmp[] properly.
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog,
7091 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7092 dsc->u.block.regmask, insn1);
7094 if (dsc->u.block.regmask == 0xff)
7096 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7098 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7099 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7100 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7103 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7107 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7109 unsigned int new_regmask;
7111 for (i = 0; i < num_in_list + 1; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7114 new_regmask = (1 << (num_in_list + 1)) - 1;
7116 if (debug_displaced)
7117 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7118 "{..., pc}: original reg list %.4x,"
7119 " modified list %.4x\n"),
7120 (int) dsc->u.block.regmask, new_regmask);
7122 dsc->u.block.regmask |= 0x8000;
7123 dsc->u.block.writeback = 0;
7124 dsc->u.block.cond = INST_AL;
7126 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7128 dsc->cleanup = &cleanup_block_load_pc;
7135 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7136 struct regcache *regs,
7137 arm_displaced_step_closure *dsc)
7139 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7140 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7143 /* 16-bit thumb instructions. */
7144 switch (op_bit_12_15)
7146 /* Shift (imme), add, subtract, move and compare. */
7147 case 0: case 1: case 2: case 3:
7148 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7149 "shift/add/sub/mov/cmp",
7153 switch (op_bit_10_11)
7155 case 0: /* Data-processing */
7156 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7160 case 1: /* Special data instructions and branch and exchange. */
7162 unsigned short op = bits (insn1, 7, 9);
7163 if (op == 6 || op == 7) /* BX or BLX */
7164 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7165 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7166 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7172 default: /* LDR (literal) */
7173 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7176 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7177 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7180 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7181 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7182 else /* Generate SP-relative address */
7183 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7185 case 11: /* Misc 16-bit instructions */
7187 switch (bits (insn1, 8, 11))
7189 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7190 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7192 case 12: case 13: /* POP */
7193 if (bit (insn1, 8)) /* PC is in register list. */
7194 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7198 case 15: /* If-Then, and hints */
7199 if (bits (insn1, 0, 3))
7200 /* If-Then makes up to four following instructions conditional.
7201 IT instruction itself is not conditional, so handle it as a
7202 common unmodified instruction. */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7214 if (op_bit_10_11 < 2) /* Store multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7216 else /* Load multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7219 case 13: /* Conditional branch and supervisor call */
7220 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7221 err = thumb_copy_b (gdbarch, insn1, dsc);
7223 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7225 case 14: /* Unconditional branch */
7226 err = thumb_copy_b (gdbarch, insn1, dsc);
7233 internal_error (__FILE__, __LINE__,
7234 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7238 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7239 uint16_t insn1, uint16_t insn2,
7240 struct regcache *regs,
7241 arm_displaced_step_closure *dsc)
7243 int rt = bits (insn2, 12, 15);
7244 int rn = bits (insn1, 0, 3);
7245 int op1 = bits (insn1, 7, 8);
7247 switch (bits (insn1, 5, 6))
7249 case 0: /* Load byte and memory hints */
7250 if (rt == 0xf) /* PLD/PLI */
7253 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7254 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7261 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7262 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7266 "ldrb{reg, immediate}/ldrbt",
7271 case 1: /* Load halfword and memory hints. */
7272 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7274 "pld/unalloc memhint", dsc);
7278 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7285 case 2: /* Load word */
7287 int insn2_bit_8_11 = bits (insn2, 8, 11);
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7291 else if (op1 == 0x1) /* Encoding T3 */
7292 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7294 else /* op1 == 0x0 */
7296 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7297 /* LDR (immediate) */
7298 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7299 dsc, bit (insn2, 8), 1);
7300 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7304 /* LDR (register) */
7305 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7311 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7318 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7319 uint16_t insn2, struct regcache *regs,
7320 arm_displaced_step_closure *dsc)
7323 unsigned short op = bit (insn2, 15);
7324 unsigned int op1 = bits (insn1, 11, 12);
7330 switch (bits (insn1, 9, 10))
7335 /* Load/store {dual, execlusive}, table branch. */
7336 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7337 && bits (insn2, 5, 7) == 0)
7338 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7341 /* PC is not allowed to use in load/store {dual, exclusive}
7343 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "load/store dual/ex", dsc);
7346 else /* load/store multiple */
7348 switch (bits (insn1, 7, 8))
7350 case 0: case 3: /* SRS, RFE */
7351 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7354 case 1: case 2: /* LDM/STM/PUSH/POP */
7355 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7362 /* Data-processing (shift register). */
7363 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7366 default: /* Coprocessor instructions. */
7367 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7372 case 2: /* op1 = 2 */
7373 if (op) /* Branch and misc control. */
7375 if (bit (insn2, 14) /* BLX/BL */
7376 || bit (insn2, 12) /* Unconditional branch */
7377 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7378 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7385 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7387 int dp_op = bits (insn1, 4, 8);
7388 int rn = bits (insn1, 0, 3);
7389 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7390 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7393 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7396 else /* Data processing (modified immeidate) */
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7401 case 3: /* op1 = 3 */
7402 switch (bits (insn1, 9, 10))
7406 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7408 else /* NEON Load/Store and Store single data item */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "neon elt/struct load/store",
7413 case 1: /* op1 = 3, bits (9, 10) == 1 */
7414 switch (bits (insn1, 7, 8))
7416 case 0: case 1: /* Data processing (register) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 case 2: /* Multiply and absolute difference */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "mul/mua/diff", dsc);
7424 case 3: /* Long multiply and divide */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7430 default: /* Coprocessor instructions */
7431 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7440 internal_error (__FILE__, __LINE__,
7441 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7446 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7447 struct regcache *regs,
7448 arm_displaced_step_closure *dsc)
7450 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7452 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7454 if (debug_displaced)
7455 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7456 "at %.8lx\n", insn1, (unsigned long) from);
7459 dsc->insn_size = thumb_insn_size (insn1);
7460 if (thumb_insn_size (insn1) == 4)
7463 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7464 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7467 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7471 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7472 CORE_ADDR to, struct regcache *regs,
7473 arm_displaced_step_closure *dsc)
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7479 /* Most displaced instructions use a 1-instruction scratch space, so set this
7480 here and override below if/when necessary. */
7482 dsc->insn_addr = from;
7483 dsc->scratch_base = to;
7484 dsc->cleanup = NULL;
7485 dsc->wrote_to_pc = 0;
7487 if (!displaced_in_arm_mode (regs))
7488 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7492 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7493 if (debug_displaced)
7494 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7495 "at %.8lx\n", (unsigned long) insn,
7496 (unsigned long) from);
7498 if ((insn & 0xf0000000) == 0xf0000000)
7499 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7500 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7502 case 0x0: case 0x1: case 0x2: case 0x3:
7503 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7506 case 0x4: case 0x5: case 0x6:
7507 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7511 err = arm_decode_media (gdbarch, insn, dsc);
7514 case 0x8: case 0x9: case 0xa: case 0xb:
7515 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7518 case 0xc: case 0xd: case 0xe: case 0xf:
7519 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7524 internal_error (__FILE__, __LINE__,
7525 _("arm_process_displaced_insn: Instruction decode error"));
7528 /* Actually set up the scratch space for a displaced instruction. */
7531 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7532 CORE_ADDR to, arm_displaced_step_closure *dsc)
7534 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7535 unsigned int i, len, offset;
7536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 int size = dsc->is_thumb? 2 : 4;
7538 const gdb_byte *bkp_insn;
7541 /* Poke modified instruction(s). */
7542 for (i = 0; i < dsc->numinsns; i++)
7544 if (debug_displaced)
7546 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7548 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7551 fprintf_unfiltered (gdb_stdlog, "%.4x",
7552 (unsigned short)dsc->modinsn[i]);
7554 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7555 (unsigned long) to + offset);
7558 write_memory_unsigned_integer (to + offset, size,
7559 byte_order_for_code,
7564 /* Choose the correct breakpoint instruction. */
7567 bkp_insn = tdep->thumb_breakpoint;
7568 len = tdep->thumb_breakpoint_size;
7572 bkp_insn = tdep->arm_breakpoint;
7573 len = tdep->arm_breakpoint_size;
7576 /* Put breakpoint afterwards. */
7577 write_memory (to + offset, bkp_insn, len);
7579 if (debug_displaced)
7580 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7581 paddress (gdbarch, from), paddress (gdbarch, to));
7584 /* Entry point for cleaning things up after a displaced instruction has been
7588 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7589 struct displaced_step_closure *dsc_,
7590 CORE_ADDR from, CORE_ADDR to,
7591 struct regcache *regs)
7593 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7596 dsc->cleanup (gdbarch, regs, dsc);
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7610 gdb_disassembler *di
7611 = static_cast<gdb_disassembler *>(info->application_data);
7612 struct gdbarch *gdbarch = di->arch ();
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7622 if (csym.native == NULL)
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7643 info->symbols = NULL;
7645 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7646 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7647 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7648 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7649 in default_print_insn. */
7650 if (exec_bfd != NULL)
7651 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7653 return default_print_insn (memaddr, info);
7656 /* The following define instruction sequences that will cause ARM
7657 cpu's to take an undefined instruction trap. These are used to
7658 signal a breakpoint to GDB.
7660 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7661 modes. A different instruction is required for each mode. The ARM
7662 cpu's can also be big or little endian. Thus four different
7663 instructions are needed to support all cases.
7665 Note: ARMv4 defines several new instructions that will take the
7666 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7667 not in fact add the new instructions. The new undefined
7668 instructions in ARMv4 are all instructions that had no defined
7669 behaviour in earlier chips. There is no guarantee that they will
7670 raise an exception, but may be treated as NOP's. In practice, it
7671 may only safe to rely on instructions matching:
7673 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7674 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7675 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7677 Even this may only true if the condition predicate is true. The
7678 following use a condition predicate of ALWAYS so it is always TRUE.
7680 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7681 and NetBSD all use a software interrupt rather than an undefined
7682 instruction to force a trap. This can be handled by by the
7683 abi-specific code during establishment of the gdbarch vector. */
7685 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7686 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7687 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7688 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7690 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7695 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7698 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7703 if (arm_pc_is_thumb (gdbarch, *pcptr))
7705 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7707 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7708 check whether we are replacing a 32-bit instruction. */
7709 if (tdep->thumb2_breakpoint != NULL)
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7715 unsigned short inst1;
7717 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7718 if (thumb_insn_size (inst1) == 4)
7719 return ARM_BP_KIND_THUMB2;
7723 return ARM_BP_KIND_THUMB;
7726 return ARM_BP_KIND_ARM;
7730 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7732 static const gdb_byte *
7733 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7735 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7739 case ARM_BP_KIND_ARM:
7740 *size = tdep->arm_breakpoint_size;
7741 return tdep->arm_breakpoint;
7742 case ARM_BP_KIND_THUMB:
7743 *size = tdep->thumb_breakpoint_size;
7744 return tdep->thumb_breakpoint;
7745 case ARM_BP_KIND_THUMB2:
7746 *size = tdep->thumb2_breakpoint_size;
7747 return tdep->thumb2_breakpoint;
7749 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7753 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7756 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7757 struct regcache *regcache,
7762 /* Check the memory pointed by PC is readable. */
7763 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7765 struct arm_get_next_pcs next_pcs_ctx;
7767 arm_get_next_pcs_ctor (&next_pcs_ctx,
7768 &arm_get_next_pcs_ops,
7769 gdbarch_byte_order (gdbarch),
7770 gdbarch_byte_order_for_code (gdbarch),
7774 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7776 /* If MEMADDR is the next instruction of current pc, do the
7777 software single step computation, and get the thumb mode by
7778 the destination address. */
7779 for (CORE_ADDR pc : next_pcs)
7781 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7783 if (IS_THUMB_ADDR (pc))
7785 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7786 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7789 return ARM_BP_KIND_ARM;
7794 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7797 /* Extract from an array REGBUF containing the (raw) register state a
7798 function return value of type TYPE, and copy that, in virtual
7799 format, into VALBUF. */
7802 arm_extract_return_value (struct type *type, struct regcache *regs,
7805 struct gdbarch *gdbarch = regs->arch ();
7806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7808 if (TYPE_CODE_FLT == TYPE_CODE (type))
7810 switch (gdbarch_tdep (gdbarch)->fp_model)
7814 /* The value is in register F0 in internal format. We need to
7815 extract the raw value and then convert it to the desired
7817 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7819 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7820 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7825 case ARM_FLOAT_SOFT_FPA:
7826 case ARM_FLOAT_SOFT_VFP:
7827 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7828 not using the VFP ABI code. */
7830 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7831 if (TYPE_LENGTH (type) > 4)
7832 regs->cooked_read (ARM_A1_REGNUM + 1,
7833 valbuf + ARM_INT_REGISTER_SIZE);
7837 internal_error (__FILE__, __LINE__,
7838 _("arm_extract_return_value: "
7839 "Floating point model not supported"));
7843 else if (TYPE_CODE (type) == TYPE_CODE_INT
7844 || TYPE_CODE (type) == TYPE_CODE_CHAR
7845 || TYPE_CODE (type) == TYPE_CODE_BOOL
7846 || TYPE_CODE (type) == TYPE_CODE_PTR
7847 || TYPE_IS_REFERENCE (type)
7848 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7850 /* If the type is a plain integer, then the access is
7851 straight-forward. Otherwise we have to play around a bit
7853 int len = TYPE_LENGTH (type);
7854 int regno = ARM_A1_REGNUM;
7859 /* By using store_unsigned_integer we avoid having to do
7860 anything special for small big-endian values. */
7861 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7862 store_unsigned_integer (valbuf,
7863 (len > ARM_INT_REGISTER_SIZE
7864 ? ARM_INT_REGISTER_SIZE : len),
7866 len -= ARM_INT_REGISTER_SIZE;
7867 valbuf += ARM_INT_REGISTER_SIZE;
7872 /* For a structure or union the behaviour is as if the value had
7873 been stored to word-aligned memory and then loaded into
7874 registers with 32-bit load instruction(s). */
7875 int len = TYPE_LENGTH (type);
7876 int regno = ARM_A1_REGNUM;
7877 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7881 regs->cooked_read (regno++, tmpbuf);
7882 memcpy (valbuf, tmpbuf,
7883 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7884 len -= ARM_INT_REGISTER_SIZE;
7885 valbuf += ARM_INT_REGISTER_SIZE;
7891 /* Will a function return an aggregate type in memory or in a
7892 register? Return 0 if an aggregate type can be returned in a
7893 register, 1 if it must be returned in memory. */
7896 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7898 enum type_code code;
7900 type = check_typedef (type);
7902 /* Simple, non-aggregate types (ie not including vectors and
7903 complex) are always returned in a register (or registers). */
7904 code = TYPE_CODE (type);
7905 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7906 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7909 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7911 /* Vector values should be returned using ARM registers if they
7912 are not over 16 bytes. */
7913 return (TYPE_LENGTH (type) > 16);
7916 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7918 /* The AAPCS says all aggregates not larger than a word are returned
7920 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7929 /* All aggregate types that won't fit in a register must be returned
7931 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7934 /* In the ARM ABI, "integer" like aggregate types are returned in
7935 registers. For an aggregate type to be integer like, its size
7936 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7937 offset of each addressable subfield must be zero. Note that bit
7938 fields are not addressable, and all addressable subfields of
7939 unions always start at offset zero.
7941 This function is based on the behaviour of GCC 2.95.1.
7942 See: gcc/arm.c: arm_return_in_memory() for details.
7944 Note: All versions of GCC before GCC 2.95.2 do not set up the
7945 parameters correctly for a function returning the following
7946 structure: struct { float f;}; This should be returned in memory,
7947 not a register. Richard Earnshaw sent me a patch, but I do not
7948 know of any way to detect if a function like the above has been
7949 compiled with the correct calling convention. */
7951 /* Assume all other aggregate types can be returned in a register.
7952 Run a check for structures, unions and arrays. */
7955 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7958 /* Need to check if this struct/union is "integer" like. For
7959 this to be true, its size must be less than or equal to
7960 ARM_INT_REGISTER_SIZE and the offset of each addressable
7961 subfield must be zero. Note that bit fields are not
7962 addressable, and unions always start at offset zero. If any
7963 of the subfields is a floating point type, the struct/union
7964 cannot be an integer type. */
7966 /* For each field in the object, check:
7967 1) Is it FP? --> yes, nRc = 1;
7968 2) Is it addressable (bitpos != 0) and
7969 not packed (bitsize == 0)?
7973 for (i = 0; i < TYPE_NFIELDS (type); i++)
7975 enum type_code field_type_code;
7978 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7981 /* Is it a floating point type field? */
7982 if (field_type_code == TYPE_CODE_FLT)
7988 /* If bitpos != 0, then we have to care about it. */
7989 if (TYPE_FIELD_BITPOS (type, i) != 0)
7991 /* Bitfields are not addressable. If the field bitsize is
7992 zero, then the field is not packed. Hence it cannot be
7993 a bitfield or any other packed type. */
7994 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8007 /* Write into appropriate registers a function return value of type
8008 TYPE, given in virtual format. */
8011 arm_store_return_value (struct type *type, struct regcache *regs,
8012 const gdb_byte *valbuf)
8014 struct gdbarch *gdbarch = regs->arch ();
8015 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8017 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8019 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8021 switch (gdbarch_tdep (gdbarch)->fp_model)
8025 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8026 regs->cooked_write (ARM_F0_REGNUM, buf);
8029 case ARM_FLOAT_SOFT_FPA:
8030 case ARM_FLOAT_SOFT_VFP:
8031 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8032 not using the VFP ABI code. */
8034 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8035 if (TYPE_LENGTH (type) > 4)
8036 regs->cooked_write (ARM_A1_REGNUM + 1,
8037 valbuf + ARM_INT_REGISTER_SIZE);
8041 internal_error (__FILE__, __LINE__,
8042 _("arm_store_return_value: Floating "
8043 "point model not supported"));
8047 else if (TYPE_CODE (type) == TYPE_CODE_INT
8048 || TYPE_CODE (type) == TYPE_CODE_CHAR
8049 || TYPE_CODE (type) == TYPE_CODE_BOOL
8050 || TYPE_CODE (type) == TYPE_CODE_PTR
8051 || TYPE_IS_REFERENCE (type)
8052 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8054 if (TYPE_LENGTH (type) <= 4)
8056 /* Values of one word or less are zero/sign-extended and
8058 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8059 LONGEST val = unpack_long (type, valbuf);
8061 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8062 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8066 /* Integral values greater than one word are stored in consecutive
8067 registers starting with r0. This will always be a multiple of
8068 the regiser size. */
8069 int len = TYPE_LENGTH (type);
8070 int regno = ARM_A1_REGNUM;
8074 regs->cooked_write (regno++, valbuf);
8075 len -= ARM_INT_REGISTER_SIZE;
8076 valbuf += ARM_INT_REGISTER_SIZE;
8082 /* For a structure or union the behaviour is as if the value had
8083 been stored to word-aligned memory and then loaded into
8084 registers with 32-bit load instruction(s). */
8085 int len = TYPE_LENGTH (type);
8086 int regno = ARM_A1_REGNUM;
8087 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8091 memcpy (tmpbuf, valbuf,
8092 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8093 regs->cooked_write (regno++, tmpbuf);
8094 len -= ARM_INT_REGISTER_SIZE;
8095 valbuf += ARM_INT_REGISTER_SIZE;
8101 /* Handle function return values. */
8103 static enum return_value_convention
8104 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8105 struct type *valtype, struct regcache *regcache,
8106 gdb_byte *readbuf, const gdb_byte *writebuf)
8108 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8109 struct type *func_type = function ? value_type (function) : NULL;
8110 enum arm_vfp_cprc_base_type vfp_base_type;
8113 if (arm_vfp_abi_for_function (gdbarch, func_type)
8114 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8116 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8117 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8119 for (i = 0; i < vfp_base_count; i++)
8121 if (reg_char == 'q')
8124 arm_neon_quad_write (gdbarch, regcache, i,
8125 writebuf + i * unit_length);
8128 arm_neon_quad_read (gdbarch, regcache, i,
8129 readbuf + i * unit_length);
8136 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8137 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8140 regcache->cooked_write (regnum, writebuf + i * unit_length);
8142 regcache->cooked_read (regnum, readbuf + i * unit_length);
8145 return RETURN_VALUE_REGISTER_CONVENTION;
8148 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8149 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8150 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8152 if (tdep->struct_return == pcc_struct_return
8153 || arm_return_in_memory (gdbarch, valtype))
8154 return RETURN_VALUE_STRUCT_CONVENTION;
8156 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8158 if (arm_return_in_memory (gdbarch, valtype))
8159 return RETURN_VALUE_STRUCT_CONVENTION;
8163 arm_store_return_value (valtype, regcache, writebuf);
8166 arm_extract_return_value (valtype, regcache, readbuf);
8168 return RETURN_VALUE_REGISTER_CONVENTION;
8173 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8175 struct gdbarch *gdbarch = get_frame_arch (frame);
8176 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8179 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8181 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8183 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8184 ARM_INT_REGISTER_SIZE))
8187 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8190 /* A call to cmse secure entry function "foo" at "a" is modified by
8197 b) bl yyyy <__acle_se_foo>
8199 section .gnu.sgstubs:
8201 yyyy: sg // secure gateway
8202 b.w xxxx <__acle_se_foo> // original_branch_dest
8207 When the control at "b", the pc contains "yyyy" (sg address) which is a
8208 trampoline and does not exist in source code. This function returns the
8209 target pc "xxxx". For more details please refer to section 5.4
8210 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8211 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8212 document on www.developer.arm.com. */
8215 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8217 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8218 char *target_name = (char *) alloca (target_len);
8219 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8221 struct bound_minimal_symbol minsym
8222 = lookup_minimal_symbol (target_name, NULL, objfile);
8224 if (minsym.minsym != nullptr)
8225 return BMSYMBOL_VALUE_ADDRESS (minsym);
8230 /* Return true when SEC points to ".gnu.sgstubs" section. */
8233 arm_is_sgstubs_section (struct obj_section *sec)
8235 return (sec != nullptr
8236 && sec->the_bfd_section != nullptr
8237 && sec->the_bfd_section->name != nullptr
8238 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8241 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8242 return the target PC. Otherwise return 0. */
8245 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8249 CORE_ADDR start_addr;
8251 /* Find the starting address and name of the function containing the PC. */
8252 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8254 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8256 start_addr = arm_skip_bx_reg (frame, pc);
8257 if (start_addr != 0)
8263 /* If PC is in a Thumb call or return stub, return the address of the
8264 target PC, which is in a register. The thunk functions are called
8265 _call_via_xx, where x is the register name. The possible names
8266 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8267 functions, named __ARM_call_via_r[0-7]. */
8268 if (startswith (name, "_call_via_")
8269 || startswith (name, "__ARM_call_via_"))
8271 /* Use the name suffix to determine which register contains the
8273 static const char *table[15] =
8274 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8275 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8278 int offset = strlen (name) - 2;
8280 for (regno = 0; regno <= 14; regno++)
8281 if (strcmp (&name[offset], table[regno]) == 0)
8282 return get_frame_register_unsigned (frame, regno);
8285 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8286 non-interworking calls to foo. We could decode the stubs
8287 to find the target but it's easier to use the symbol table. */
8288 namelen = strlen (name);
8289 if (name[0] == '_' && name[1] == '_'
8290 && ((namelen > 2 + strlen ("_from_thumb")
8291 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8292 || (namelen > 2 + strlen ("_from_arm")
8293 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8296 int target_len = namelen - 2;
8297 struct bound_minimal_symbol minsym;
8298 struct objfile *objfile;
8299 struct obj_section *sec;
8301 if (name[namelen - 1] == 'b')
8302 target_len -= strlen ("_from_thumb");
8304 target_len -= strlen ("_from_arm");
8306 target_name = (char *) alloca (target_len + 1);
8307 memcpy (target_name, name + 2, target_len);
8308 target_name[target_len] = '\0';
8310 sec = find_pc_section (pc);
8311 objfile = (sec == NULL) ? NULL : sec->objfile;
8312 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8313 if (minsym.minsym != NULL)
8314 return BMSYMBOL_VALUE_ADDRESS (minsym);
8319 struct obj_section *section = find_pc_section (pc);
8321 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8322 if (arm_is_sgstubs_section (section))
8323 return arm_skip_cmse_entry (pc, name, section->objfile);
8325 return 0; /* not a stub */
8329 set_arm_command (const char *args, int from_tty)
8331 printf_unfiltered (_("\
8332 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8333 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8337 show_arm_command (const char *args, int from_tty)
8339 cmd_show_list (showarmcmdlist, from_tty, "");
8343 arm_update_current_architecture (void)
8345 struct gdbarch_info info;
8347 /* If the current architecture is not ARM, we have nothing to do. */
8348 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8351 /* Update the architecture. */
8352 gdbarch_info_init (&info);
8354 if (!gdbarch_update_p (info))
8355 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8359 set_fp_model_sfunc (const char *args, int from_tty,
8360 struct cmd_list_element *c)
8364 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8365 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8367 arm_fp_model = (enum arm_float_model) fp_model;
8371 if (fp_model == ARM_FLOAT_LAST)
8372 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8375 arm_update_current_architecture ();
8379 show_fp_model (struct ui_file *file, int from_tty,
8380 struct cmd_list_element *c, const char *value)
8382 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8384 if (arm_fp_model == ARM_FLOAT_AUTO
8385 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8386 fprintf_filtered (file, _("\
8387 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8388 fp_model_strings[tdep->fp_model]);
8390 fprintf_filtered (file, _("\
8391 The current ARM floating point model is \"%s\".\n"),
8392 fp_model_strings[arm_fp_model]);
8396 arm_set_abi (const char *args, int from_tty,
8397 struct cmd_list_element *c)
8401 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8402 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8404 arm_abi_global = (enum arm_abi_kind) arm_abi;
8408 if (arm_abi == ARM_ABI_LAST)
8409 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8412 arm_update_current_architecture ();
8416 arm_show_abi (struct ui_file *file, int from_tty,
8417 struct cmd_list_element *c, const char *value)
8419 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8421 if (arm_abi_global == ARM_ABI_AUTO
8422 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8423 fprintf_filtered (file, _("\
8424 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8425 arm_abi_strings[tdep->arm_abi]);
8427 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8432 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8433 struct cmd_list_element *c, const char *value)
8435 fprintf_filtered (file,
8436 _("The current execution mode assumed "
8437 "(when symbols are unavailable) is \"%s\".\n"),
8438 arm_fallback_mode_string);
8442 arm_show_force_mode (struct ui_file *file, int from_tty,
8443 struct cmd_list_element *c, const char *value)
8445 fprintf_filtered (file,
8446 _("The current execution mode assumed "
8447 "(even when symbols are available) is \"%s\".\n"),
8448 arm_force_mode_string);
8451 /* If the user changes the register disassembly style used for info
8452 register and other commands, we have to also switch the style used
8453 in opcodes for disassembly output. This function is run in the "set
8454 arm disassembly" command, and does that. */
8457 set_disassembly_style_sfunc (const char *args, int from_tty,
8458 struct cmd_list_element *c)
8460 /* Convert the short style name into the long style name (eg, reg-names-*)
8461 before calling the generic set_disassembler_options() function. */
8462 std::string long_name = std::string ("reg-names-") + disassembly_style;
8463 set_disassembler_options (&long_name[0]);
8467 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8468 struct cmd_list_element *c, const char *value)
8470 struct gdbarch *gdbarch = get_current_arch ();
8471 char *options = get_disassembler_options (gdbarch);
8472 const char *style = "";
8476 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8477 if (CONST_STRNEQ (opt, "reg-names-"))
8479 style = &opt[strlen ("reg-names-")];
8480 len = strcspn (style, ",");
8483 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8486 /* Return the ARM register name corresponding to register I. */
8488 arm_register_name (struct gdbarch *gdbarch, int i)
8490 const int num_regs = gdbarch_num_regs (gdbarch);
8492 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8493 && i >= num_regs && i < num_regs + 32)
8495 static const char *const vfp_pseudo_names[] = {
8496 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8497 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8498 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8499 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8502 return vfp_pseudo_names[i - num_regs];
8505 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8506 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8508 static const char *const neon_pseudo_names[] = {
8509 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8510 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8513 return neon_pseudo_names[i - num_regs - 32];
8516 if (i >= ARRAY_SIZE (arm_register_names))
8517 /* These registers are only supported on targets which supply
8518 an XML description. */
8521 return arm_register_names[i];
8524 /* Test whether the coff symbol specific value corresponds to a Thumb
8528 coff_sym_is_thumb (int val)
8530 return (val == C_THUMBEXT
8531 || val == C_THUMBSTAT
8532 || val == C_THUMBEXTFUNC
8533 || val == C_THUMBSTATFUNC
8534 || val == C_THUMBLABEL);
8537 /* arm_coff_make_msymbol_special()
8538 arm_elf_make_msymbol_special()
8540 These functions test whether the COFF or ELF symbol corresponds to
8541 an address in thumb code, and set a "special" bit in a minimal
8542 symbol to indicate that it does. */
8545 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8547 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8549 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8550 == ST_BRANCH_TO_THUMB)
8551 MSYMBOL_SET_SPECIAL (msym);
8555 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8557 if (coff_sym_is_thumb (val))
8558 MSYMBOL_SET_SPECIAL (msym);
8562 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8565 const char *name = bfd_asymbol_name (sym);
8566 struct arm_per_objfile *data;
8567 struct arm_mapping_symbol new_map_sym;
8569 gdb_assert (name[0] == '$');
8570 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8573 data = arm_objfile_data_key.get (objfile);
8575 data = arm_objfile_data_key.emplace (objfile,
8576 objfile->obfd->section_count);
8577 arm_mapping_symbol_vec &map
8578 = data->section_maps[bfd_get_section (sym)->index];
8580 new_map_sym.value = sym->value;
8581 new_map_sym.type = name[1];
8583 /* Insert at the end, the vector will be sorted on first use. */
8584 map.push_back (new_map_sym);
8588 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8590 struct gdbarch *gdbarch = regcache->arch ();
8591 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8593 /* If necessary, set the T bit. */
8596 ULONGEST val, t_bit;
8597 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8598 t_bit = arm_psr_thumb_bit (gdbarch);
8599 if (arm_pc_is_thumb (gdbarch, pc))
8600 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8603 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8608 /* Read the contents of a NEON quad register, by reading from two
8609 double registers. This is used to implement the quad pseudo
8610 registers, and for argument passing in case the quad registers are
8611 missing; vectors are passed in quad registers when using the VFP
8612 ABI, even if a NEON unit is not present. REGNUM is the index of
8613 the quad register, in [0, 15]. */
8615 static enum register_status
8616 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8617 int regnum, gdb_byte *buf)
8620 gdb_byte reg_buf[8];
8621 int offset, double_regnum;
8622 enum register_status status;
8624 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8625 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8628 /* d0 is always the least significant half of q0. */
8629 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8634 status = regcache->raw_read (double_regnum, reg_buf);
8635 if (status != REG_VALID)
8637 memcpy (buf + offset, reg_buf, 8);
8639 offset = 8 - offset;
8640 status = regcache->raw_read (double_regnum + 1, reg_buf);
8641 if (status != REG_VALID)
8643 memcpy (buf + offset, reg_buf, 8);
8648 static enum register_status
8649 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8650 int regnum, gdb_byte *buf)
8652 const int num_regs = gdbarch_num_regs (gdbarch);
8654 gdb_byte reg_buf[8];
8655 int offset, double_regnum;
8657 gdb_assert (regnum >= num_regs);
8660 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8661 /* Quad-precision register. */
8662 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8665 enum register_status status;
8667 /* Single-precision register. */
8668 gdb_assert (regnum < 32);
8670 /* s0 is always the least significant half of d0. */
8671 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8672 offset = (regnum & 1) ? 0 : 4;
8674 offset = (regnum & 1) ? 4 : 0;
8676 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8677 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8680 status = regcache->raw_read (double_regnum, reg_buf);
8681 if (status == REG_VALID)
8682 memcpy (buf, reg_buf + offset, 4);
8687 /* Store the contents of BUF to a NEON quad register, by writing to
8688 two double registers. This is used to implement the quad pseudo
8689 registers, and for argument passing in case the quad registers are
8690 missing; vectors are passed in quad registers when using the VFP
8691 ABI, even if a NEON unit is not present. REGNUM is the index
8692 of the quad register, in [0, 15]. */
8695 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8696 int regnum, const gdb_byte *buf)
8699 int offset, double_regnum;
8701 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8702 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8705 /* d0 is always the least significant half of q0. */
8706 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8711 regcache->raw_write (double_regnum, buf + offset);
8712 offset = 8 - offset;
8713 regcache->raw_write (double_regnum + 1, buf + offset);
8717 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8718 int regnum, const gdb_byte *buf)
8720 const int num_regs = gdbarch_num_regs (gdbarch);
8722 gdb_byte reg_buf[8];
8723 int offset, double_regnum;
8725 gdb_assert (regnum >= num_regs);
8728 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8729 /* Quad-precision register. */
8730 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8733 /* Single-precision register. */
8734 gdb_assert (regnum < 32);
8736 /* s0 is always the least significant half of d0. */
8737 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8738 offset = (regnum & 1) ? 0 : 4;
8740 offset = (regnum & 1) ? 4 : 0;
8742 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8743 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8746 regcache->raw_read (double_regnum, reg_buf);
8747 memcpy (reg_buf + offset, buf, 4);
8748 regcache->raw_write (double_regnum, reg_buf);
8752 static struct value *
8753 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8755 const int *reg_p = (const int *) baton;
8756 return value_of_register (*reg_p, frame);
8759 static enum gdb_osabi
8760 arm_elf_osabi_sniffer (bfd *abfd)
8762 unsigned int elfosabi;
8763 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8765 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8767 if (elfosabi == ELFOSABI_ARM)
8768 /* GNU tools use this value. Check note sections in this case,
8770 bfd_map_over_sections (abfd,
8771 generic_elf_osabi_sniff_abi_tag_sections,
8774 /* Anything else will be handled by the generic ELF sniffer. */
8779 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8780 struct reggroup *group)
8782 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8783 this, FPS register belongs to save_regroup, restore_reggroup, and
8784 all_reggroup, of course. */
8785 if (regnum == ARM_FPS_REGNUM)
8786 return (group == float_reggroup
8787 || group == save_reggroup
8788 || group == restore_reggroup
8789 || group == all_reggroup);
8791 return default_register_reggroup_p (gdbarch, regnum, group);
8794 /* For backward-compatibility we allow two 'g' packet lengths with
8795 the remote protocol depending on whether FPA registers are
8796 supplied. M-profile targets do not have FPA registers, but some
8797 stubs already exist in the wild which use a 'g' packet which
8798 supplies them albeit with dummy values. The packet format which
8799 includes FPA registers should be considered deprecated for
8800 M-profile targets. */
8803 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8805 if (gdbarch_tdep (gdbarch)->is_m)
8807 const target_desc *tdesc;
8809 /* If we know from the executable this is an M-profile target,
8810 cater for remote targets whose register set layout is the
8811 same as the FPA layout. */
8812 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8813 register_remote_g_packet_guess (gdbarch,
8814 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8817 /* The regular M-profile layout. */
8818 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8819 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8822 /* M-profile plus M4F VFP. */
8823 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8824 register_remote_g_packet_guess (gdbarch,
8825 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8829 /* Otherwise we don't have a useful guess. */
8832 /* Implement the code_of_frame_writable gdbarch method. */
8835 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8837 if (gdbarch_tdep (gdbarch)->is_m
8838 && get_frame_type (frame) == SIGTRAMP_FRAME)
8840 /* M-profile exception frames return to some magic PCs, where
8841 isn't writable at all. */
8848 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8849 to be postfixed by a version (eg armv7hl). */
8852 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8854 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8855 return "arm(v[^- ]*)?";
8856 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8859 /* Initialize the current architecture based on INFO. If possible,
8860 re-use an architecture from ARCHES, which is a list of
8861 architectures already created during this debugging session.
8863 Called e.g. at program startup, when reading a core file, and when
8864 reading a binary file. */
8866 static struct gdbarch *
8867 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8869 struct gdbarch_tdep *tdep;
8870 struct gdbarch *gdbarch;
8871 struct gdbarch_list *best_arch;
8872 enum arm_abi_kind arm_abi = arm_abi_global;
8873 enum arm_float_model fp_model = arm_fp_model;
8874 struct tdesc_arch_data *tdesc_data = NULL;
8876 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8877 int have_wmmx_registers = 0;
8879 int have_fpa_registers = 1;
8880 const struct target_desc *tdesc = info.target_desc;
8882 /* If we have an object to base this architecture on, try to determine
8885 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8887 int ei_osabi, e_flags;
8889 switch (bfd_get_flavour (info.abfd))
8891 case bfd_target_coff_flavour:
8892 /* Assume it's an old APCS-style ABI. */
8894 arm_abi = ARM_ABI_APCS;
8897 case bfd_target_elf_flavour:
8898 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8899 e_flags = elf_elfheader (info.abfd)->e_flags;
8901 if (ei_osabi == ELFOSABI_ARM)
8903 /* GNU tools used to use this value, but do not for EABI
8904 objects. There's nowhere to tag an EABI version
8905 anyway, so assume APCS. */
8906 arm_abi = ARM_ABI_APCS;
8908 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8910 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8914 case EF_ARM_EABI_UNKNOWN:
8915 /* Assume GNU tools. */
8916 arm_abi = ARM_ABI_APCS;
8919 case EF_ARM_EABI_VER4:
8920 case EF_ARM_EABI_VER5:
8921 arm_abi = ARM_ABI_AAPCS;
8922 /* EABI binaries default to VFP float ordering.
8923 They may also contain build attributes that can
8924 be used to identify if the VFP argument-passing
8926 if (fp_model == ARM_FLOAT_AUTO)
8929 switch (bfd_elf_get_obj_attr_int (info.abfd,
8933 case AEABI_VFP_args_base:
8934 /* "The user intended FP parameter/result
8935 passing to conform to AAPCS, base
8937 fp_model = ARM_FLOAT_SOFT_VFP;
8939 case AEABI_VFP_args_vfp:
8940 /* "The user intended FP parameter/result
8941 passing to conform to AAPCS, VFP
8943 fp_model = ARM_FLOAT_VFP;
8945 case AEABI_VFP_args_toolchain:
8946 /* "The user intended FP parameter/result
8947 passing to conform to tool chain-specific
8948 conventions" - we don't know any such
8949 conventions, so leave it as "auto". */
8951 case AEABI_VFP_args_compatible:
8952 /* "Code is compatible with both the base
8953 and VFP variants; the user did not permit
8954 non-variadic functions to pass FP
8955 parameters/results" - leave it as
8959 /* Attribute value not mentioned in the
8960 November 2012 ABI, so leave it as
8965 fp_model = ARM_FLOAT_SOFT_VFP;
8971 /* Leave it as "auto". */
8972 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8977 /* Detect M-profile programs. This only works if the
8978 executable file includes build attributes; GCC does
8979 copy them to the executable, but e.g. RealView does
8982 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8985 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8986 Tag_CPU_arch_profile);
8988 /* GCC specifies the profile for v6-M; RealView only
8989 specifies the profile for architectures starting with
8990 V7 (as opposed to architectures with a tag
8991 numerically greater than TAG_CPU_ARCH_V7). */
8992 if (!tdesc_has_registers (tdesc)
8993 && (attr_arch == TAG_CPU_ARCH_V6_M
8994 || attr_arch == TAG_CPU_ARCH_V6S_M
8995 || attr_profile == 'M'))
9000 if (fp_model == ARM_FLOAT_AUTO)
9002 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9005 /* Leave it as "auto". Strictly speaking this case
9006 means FPA, but almost nobody uses that now, and
9007 many toolchains fail to set the appropriate bits
9008 for the floating-point model they use. */
9010 case EF_ARM_SOFT_FLOAT:
9011 fp_model = ARM_FLOAT_SOFT_FPA;
9013 case EF_ARM_VFP_FLOAT:
9014 fp_model = ARM_FLOAT_VFP;
9016 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9017 fp_model = ARM_FLOAT_SOFT_VFP;
9022 if (e_flags & EF_ARM_BE8)
9023 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9028 /* Leave it as "auto". */
9033 /* Check any target description for validity. */
9034 if (tdesc_has_registers (tdesc))
9036 /* For most registers we require GDB's default names; but also allow
9037 the numeric names for sp / lr / pc, as a convenience. */
9038 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9039 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9040 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9042 const struct tdesc_feature *feature;
9045 feature = tdesc_find_feature (tdesc,
9046 "org.gnu.gdb.arm.core");
9047 if (feature == NULL)
9049 feature = tdesc_find_feature (tdesc,
9050 "org.gnu.gdb.arm.m-profile");
9051 if (feature == NULL)
9057 tdesc_data = tdesc_data_alloc ();
9060 for (i = 0; i < ARM_SP_REGNUM; i++)
9061 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9062 arm_register_names[i]);
9063 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9066 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9069 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9073 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9074 ARM_PS_REGNUM, "xpsr");
9076 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9077 ARM_PS_REGNUM, "cpsr");
9081 tdesc_data_cleanup (tdesc_data);
9085 feature = tdesc_find_feature (tdesc,
9086 "org.gnu.gdb.arm.fpa");
9087 if (feature != NULL)
9090 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9091 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9092 arm_register_names[i]);
9095 tdesc_data_cleanup (tdesc_data);
9100 have_fpa_registers = 0;
9102 feature = tdesc_find_feature (tdesc,
9103 "org.gnu.gdb.xscale.iwmmxt");
9104 if (feature != NULL)
9106 static const char *const iwmmxt_names[] = {
9107 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9108 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9109 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9110 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9114 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9116 &= tdesc_numbered_register (feature, tdesc_data, i,
9117 iwmmxt_names[i - ARM_WR0_REGNUM]);
9119 /* Check for the control registers, but do not fail if they
9121 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9122 tdesc_numbered_register (feature, tdesc_data, i,
9123 iwmmxt_names[i - ARM_WR0_REGNUM]);
9125 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9127 &= tdesc_numbered_register (feature, tdesc_data, i,
9128 iwmmxt_names[i - ARM_WR0_REGNUM]);
9132 tdesc_data_cleanup (tdesc_data);
9136 have_wmmx_registers = 1;
9139 /* If we have a VFP unit, check whether the single precision registers
9140 are present. If not, then we will synthesize them as pseudo
9142 feature = tdesc_find_feature (tdesc,
9143 "org.gnu.gdb.arm.vfp");
9144 if (feature != NULL)
9146 static const char *const vfp_double_names[] = {
9147 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9148 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9149 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9150 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9153 /* Require the double precision registers. There must be either
9156 for (i = 0; i < 32; i++)
9158 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9160 vfp_double_names[i]);
9164 if (!valid_p && i == 16)
9167 /* Also require FPSCR. */
9168 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9169 ARM_FPSCR_REGNUM, "fpscr");
9172 tdesc_data_cleanup (tdesc_data);
9176 if (tdesc_unnumbered_register (feature, "s0") == 0)
9177 have_vfp_pseudos = 1;
9179 vfp_register_count = i;
9181 /* If we have VFP, also check for NEON. The architecture allows
9182 NEON without VFP (integer vector operations only), but GDB
9183 does not support that. */
9184 feature = tdesc_find_feature (tdesc,
9185 "org.gnu.gdb.arm.neon");
9186 if (feature != NULL)
9188 /* NEON requires 32 double-precision registers. */
9191 tdesc_data_cleanup (tdesc_data);
9195 /* If there are quad registers defined by the stub, use
9196 their type; otherwise (normally) provide them with
9197 the default type. */
9198 if (tdesc_unnumbered_register (feature, "q0") == 0)
9199 have_neon_pseudos = 1;
9206 /* If there is already a candidate, use it. */
9207 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9209 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9211 if (arm_abi != ARM_ABI_AUTO
9212 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9215 if (fp_model != ARM_FLOAT_AUTO
9216 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9219 /* There are various other properties in tdep that we do not
9220 need to check here: those derived from a target description,
9221 since gdbarches with a different target description are
9222 automatically disqualified. */
9224 /* Do check is_m, though, since it might come from the binary. */
9225 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9228 /* Found a match. */
9232 if (best_arch != NULL)
9234 if (tdesc_data != NULL)
9235 tdesc_data_cleanup (tdesc_data);
9236 return best_arch->gdbarch;
9239 tdep = XCNEW (struct gdbarch_tdep);
9240 gdbarch = gdbarch_alloc (&info, tdep);
9242 /* Record additional information about the architecture we are defining.
9243 These are gdbarch discriminators, like the OSABI. */
9244 tdep->arm_abi = arm_abi;
9245 tdep->fp_model = fp_model;
9247 tdep->have_fpa_registers = have_fpa_registers;
9248 tdep->have_wmmx_registers = have_wmmx_registers;
9249 gdb_assert (vfp_register_count == 0
9250 || vfp_register_count == 16
9251 || vfp_register_count == 32);
9252 tdep->vfp_register_count = vfp_register_count;
9253 tdep->have_vfp_pseudos = have_vfp_pseudos;
9254 tdep->have_neon_pseudos = have_neon_pseudos;
9255 tdep->have_neon = have_neon;
9257 arm_register_g_packet_guesses (gdbarch);
9260 switch (info.byte_order_for_code)
9262 case BFD_ENDIAN_BIG:
9263 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9264 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9265 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9266 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9270 case BFD_ENDIAN_LITTLE:
9271 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9272 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9273 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9274 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9279 internal_error (__FILE__, __LINE__,
9280 _("arm_gdbarch_init: bad byte order for float format"));
9283 /* On ARM targets char defaults to unsigned. */
9284 set_gdbarch_char_signed (gdbarch, 0);
9286 /* wchar_t is unsigned under the AAPCS. */
9287 if (tdep->arm_abi == ARM_ABI_AAPCS)
9288 set_gdbarch_wchar_signed (gdbarch, 0);
9290 set_gdbarch_wchar_signed (gdbarch, 1);
9292 /* Compute type alignment. */
9293 set_gdbarch_type_align (gdbarch, arm_type_align);
9295 /* Note: for displaced stepping, this includes the breakpoint, and one word
9296 of additional scratch space. This setting isn't used for anything beside
9297 displaced stepping at present. */
9298 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9300 /* This should be low enough for everything. */
9301 tdep->lowest_pc = 0x20;
9302 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9304 /* The default, for both APCS and AAPCS, is to return small
9305 structures in registers. */
9306 tdep->struct_return = reg_struct_return;
9308 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9309 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9312 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9314 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9316 frame_base_set_default (gdbarch, &arm_normal_base);
9318 /* Address manipulation. */
9319 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9321 /* Advance PC across function entry code. */
9322 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9324 /* Detect whether PC is at a point where the stack has been destroyed. */
9325 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9327 /* Skip trampolines. */
9328 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9330 /* The stack grows downward. */
9331 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9333 /* Breakpoint manipulation. */
9334 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9335 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9336 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9337 arm_breakpoint_kind_from_current_state);
9339 /* Information about registers, etc. */
9340 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9341 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9342 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9343 set_gdbarch_register_type (gdbarch, arm_register_type);
9344 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9346 /* This "info float" is FPA-specific. Use the generic version if we
9348 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9349 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9351 /* Internal <-> external register number maps. */
9352 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9353 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9355 set_gdbarch_register_name (gdbarch, arm_register_name);
9357 /* Returning results. */
9358 set_gdbarch_return_value (gdbarch, arm_return_value);
9361 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9363 /* Minsymbol frobbing. */
9364 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9365 set_gdbarch_coff_make_msymbol_special (gdbarch,
9366 arm_coff_make_msymbol_special);
9367 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9369 /* Thumb-2 IT block support. */
9370 set_gdbarch_adjust_breakpoint_address (gdbarch,
9371 arm_adjust_breakpoint_address);
9373 /* Virtual tables. */
9374 set_gdbarch_vbit_in_delta (gdbarch, 1);
9376 /* Hook in the ABI-specific overrides, if they have been registered. */
9377 gdbarch_init_osabi (info, gdbarch);
9379 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9381 /* Add some default predicates. */
9383 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9384 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9385 dwarf2_append_unwinders (gdbarch);
9386 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9387 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9388 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9390 /* Now we have tuned the configuration, set a few final things,
9391 based on what the OS ABI has told us. */
9393 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9394 binaries are always marked. */
9395 if (tdep->arm_abi == ARM_ABI_AUTO)
9396 tdep->arm_abi = ARM_ABI_APCS;
9398 /* Watchpoints are not steppable. */
9399 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9401 /* We used to default to FPA for generic ARM, but almost nobody
9402 uses that now, and we now provide a way for the user to force
9403 the model. So default to the most useful variant. */
9404 if (tdep->fp_model == ARM_FLOAT_AUTO)
9405 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9407 if (tdep->jb_pc >= 0)
9408 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9410 /* Floating point sizes and format. */
9411 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9412 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9414 set_gdbarch_double_format
9415 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9416 set_gdbarch_long_double_format
9417 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9421 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9422 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9425 if (have_vfp_pseudos)
9427 /* NOTE: These are the only pseudo registers used by
9428 the ARM target at the moment. If more are added, a
9429 little more care in numbering will be needed. */
9431 int num_pseudos = 32;
9432 if (have_neon_pseudos)
9434 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9435 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9436 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9441 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9443 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9445 /* Override tdesc_register_type to adjust the types of VFP
9446 registers for NEON. */
9447 set_gdbarch_register_type (gdbarch, arm_register_type);
9450 /* Add standard register aliases. We add aliases even for those
9451 nanes which are used by the current architecture - it's simpler,
9452 and does no harm, since nothing ever lists user registers. */
9453 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9454 user_reg_add (gdbarch, arm_register_aliases[i].name,
9455 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9457 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9458 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9460 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9466 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9468 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9473 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9474 (unsigned long) tdep->lowest_pc);
9480 static void arm_record_test (void);
9485 _initialize_arm_tdep (void)
9489 char regdesc[1024], *rdptr = regdesc;
9490 size_t rest = sizeof (regdesc);
9492 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9494 /* Add ourselves to objfile event chain. */
9495 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9497 /* Register an ELF OS ABI sniffer for ARM binaries. */
9498 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9499 bfd_target_elf_flavour,
9500 arm_elf_osabi_sniffer);
9502 /* Add root prefix command for all "set arm"/"show arm" commands. */
9503 add_prefix_cmd ("arm", no_class, set_arm_command,
9504 _("Various ARM-specific commands."),
9505 &setarmcmdlist, "set arm ", 0, &setlist);
9507 add_prefix_cmd ("arm", no_class, show_arm_command,
9508 _("Various ARM-specific commands."),
9509 &showarmcmdlist, "show arm ", 0, &showlist);
9512 arm_disassembler_options = xstrdup ("reg-names-std");
9513 const disasm_options_t *disasm_options
9514 = &disassembler_options_arm ()->options;
9515 int num_disassembly_styles = 0;
9516 for (i = 0; disasm_options->name[i] != NULL; i++)
9517 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9518 num_disassembly_styles++;
9520 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9521 valid_disassembly_styles = XNEWVEC (const char *,
9522 num_disassembly_styles + 1);
9523 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9524 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9526 size_t offset = strlen ("reg-names-");
9527 const char *style = disasm_options->name[i];
9528 valid_disassembly_styles[j++] = &style[offset];
9529 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9530 disasm_options->description[i]);
9534 /* Mark the end of valid options. */
9535 valid_disassembly_styles[num_disassembly_styles] = NULL;
9537 /* Create the help text. */
9538 std::string helptext = string_printf ("%s%s%s",
9539 _("The valid values are:\n"),
9541 _("The default is \"std\"."));
9543 add_setshow_enum_cmd("disassembler", no_class,
9544 valid_disassembly_styles, &disassembly_style,
9545 _("Set the disassembly style."),
9546 _("Show the disassembly style."),
9548 set_disassembly_style_sfunc,
9549 show_disassembly_style_sfunc,
9550 &setarmcmdlist, &showarmcmdlist);
9552 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9553 _("Set usage of ARM 32-bit mode."),
9554 _("Show usage of ARM 32-bit mode."),
9555 _("When off, a 26-bit PC will be used."),
9557 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9559 &setarmcmdlist, &showarmcmdlist);
9561 /* Add a command to allow the user to force the FPU model. */
9562 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9563 _("Set the floating point type."),
9564 _("Show the floating point type."),
9565 _("auto - Determine the FP typefrom the OS-ABI.\n\
9566 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9567 fpa - FPA co-processor (GCC compiled).\n\
9568 softvfp - Software FP with pure-endian doubles.\n\
9569 vfp - VFP co-processor."),
9570 set_fp_model_sfunc, show_fp_model,
9571 &setarmcmdlist, &showarmcmdlist);
9573 /* Add a command to allow the user to force the ABI. */
9574 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9577 NULL, arm_set_abi, arm_show_abi,
9578 &setarmcmdlist, &showarmcmdlist);
9580 /* Add two commands to allow the user to force the assumed
9582 add_setshow_enum_cmd ("fallback-mode", class_support,
9583 arm_mode_strings, &arm_fallback_mode_string,
9584 _("Set the mode assumed when symbols are unavailable."),
9585 _("Show the mode assumed when symbols are unavailable."),
9586 NULL, NULL, arm_show_fallback_mode,
9587 &setarmcmdlist, &showarmcmdlist);
9588 add_setshow_enum_cmd ("force-mode", class_support,
9589 arm_mode_strings, &arm_force_mode_string,
9590 _("Set the mode assumed even when symbols are available."),
9591 _("Show the mode assumed even when symbols are available."),
9592 NULL, NULL, arm_show_force_mode,
9593 &setarmcmdlist, &showarmcmdlist);
9595 /* Debugging flag. */
9596 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9597 _("Set ARM debugging."),
9598 _("Show ARM debugging."),
9599 _("When on, arm-specific debugging is enabled."),
9601 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9602 &setdebuglist, &showdebuglist);
9605 selftests::register_test ("arm-record", selftests::arm_record_test);
9610 /* ARM-reversible process record data structures. */
9612 #define ARM_INSN_SIZE_BYTES 4
9613 #define THUMB_INSN_SIZE_BYTES 2
9614 #define THUMB2_INSN_SIZE_BYTES 4
9617 /* Position of the bit within a 32-bit ARM instruction
9618 that defines whether the instruction is a load or store. */
9619 #define INSN_S_L_BIT_NUM 20
9621 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9624 unsigned int reg_len = LENGTH; \
9627 REGS = XNEWVEC (uint32_t, reg_len); \
9628 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9633 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9636 unsigned int mem_len = LENGTH; \
9639 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9640 memcpy(&MEMS->len, &RECORD_BUF[0], \
9641 sizeof(struct arm_mem_r) * LENGTH); \
9646 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9647 #define INSN_RECORDED(ARM_RECORD) \
9648 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9650 /* ARM memory record structure. */
9653 uint32_t len; /* Record length. */
9654 uint32_t addr; /* Memory address. */
9657 /* ARM instruction record contains opcode of current insn
9658 and execution state (before entry to decode_insn()),
9659 contains list of to-be-modified registers and
9660 memory blocks (on return from decode_insn()). */
9662 typedef struct insn_decode_record_t
9664 struct gdbarch *gdbarch;
9665 struct regcache *regcache;
9666 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9667 uint32_t arm_insn; /* Should accommodate thumb. */
9668 uint32_t cond; /* Condition code. */
9669 uint32_t opcode; /* Insn opcode. */
9670 uint32_t decode; /* Insn decode bits. */
9671 uint32_t mem_rec_count; /* No of mem records. */
9672 uint32_t reg_rec_count; /* No of reg records. */
9673 uint32_t *arm_regs; /* Registers to be saved for this record. */
9674 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9675 } insn_decode_record;
9678 /* Checks ARM SBZ and SBO mandatory fields. */
9681 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9683 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9702 enum arm_record_result
9704 ARM_RECORD_SUCCESS = 0,
9705 ARM_RECORD_FAILURE = 1
9712 } arm_record_strx_t;
9723 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9724 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9727 struct regcache *reg_cache = arm_insn_r->regcache;
9728 ULONGEST u_regval[2]= {0};
9730 uint32_t reg_src1 = 0, reg_src2 = 0;
9731 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9733 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9734 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9736 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9738 /* 1) Handle misc store, immediate offset. */
9739 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9740 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9741 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9742 regcache_raw_read_unsigned (reg_cache, reg_src1,
9744 if (ARM_PC_REGNUM == reg_src1)
9746 /* If R15 was used as Rn, hence current PC+8. */
9747 u_regval[0] = u_regval[0] + 8;
9749 offset_8 = (immed_high << 4) | immed_low;
9750 /* Calculate target store address. */
9751 if (14 == arm_insn_r->opcode)
9753 tgt_mem_addr = u_regval[0] + offset_8;
9757 tgt_mem_addr = u_regval[0] - offset_8;
9759 if (ARM_RECORD_STRH == str_type)
9761 record_buf_mem[0] = 2;
9762 record_buf_mem[1] = tgt_mem_addr;
9763 arm_insn_r->mem_rec_count = 1;
9765 else if (ARM_RECORD_STRD == str_type)
9767 record_buf_mem[0] = 4;
9768 record_buf_mem[1] = tgt_mem_addr;
9769 record_buf_mem[2] = 4;
9770 record_buf_mem[3] = tgt_mem_addr + 4;
9771 arm_insn_r->mem_rec_count = 2;
9774 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9776 /* 2) Store, register offset. */
9778 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9780 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9781 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9782 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9785 /* If R15 was used as Rn, hence current PC+8. */
9786 u_regval[0] = u_regval[0] + 8;
9788 /* Calculate target store address, Rn +/- Rm, register offset. */
9789 if (12 == arm_insn_r->opcode)
9791 tgt_mem_addr = u_regval[0] + u_regval[1];
9795 tgt_mem_addr = u_regval[1] - u_regval[0];
9797 if (ARM_RECORD_STRH == str_type)
9799 record_buf_mem[0] = 2;
9800 record_buf_mem[1] = tgt_mem_addr;
9801 arm_insn_r->mem_rec_count = 1;
9803 else if (ARM_RECORD_STRD == str_type)
9805 record_buf_mem[0] = 4;
9806 record_buf_mem[1] = tgt_mem_addr;
9807 record_buf_mem[2] = 4;
9808 record_buf_mem[3] = tgt_mem_addr + 4;
9809 arm_insn_r->mem_rec_count = 2;
9812 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9813 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9815 /* 3) Store, immediate pre-indexed. */
9816 /* 5) Store, immediate post-indexed. */
9817 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9818 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9819 offset_8 = (immed_high << 4) | immed_low;
9820 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9821 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9822 /* Calculate target store address, Rn +/- Rm, register offset. */
9823 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9825 tgt_mem_addr = u_regval[0] + offset_8;
9829 tgt_mem_addr = u_regval[0] - offset_8;
9831 if (ARM_RECORD_STRH == str_type)
9833 record_buf_mem[0] = 2;
9834 record_buf_mem[1] = tgt_mem_addr;
9835 arm_insn_r->mem_rec_count = 1;
9837 else if (ARM_RECORD_STRD == str_type)
9839 record_buf_mem[0] = 4;
9840 record_buf_mem[1] = tgt_mem_addr;
9841 record_buf_mem[2] = 4;
9842 record_buf_mem[3] = tgt_mem_addr + 4;
9843 arm_insn_r->mem_rec_count = 2;
9845 /* Record Rn also as it changes. */
9846 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9847 arm_insn_r->reg_rec_count = 1;
9849 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9850 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9852 /* 4) Store, register pre-indexed. */
9853 /* 6) Store, register post -indexed. */
9854 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9855 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9856 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9857 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9858 /* Calculate target store address, Rn +/- Rm, register offset. */
9859 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9861 tgt_mem_addr = u_regval[0] + u_regval[1];
9865 tgt_mem_addr = u_regval[1] - u_regval[0];
9867 if (ARM_RECORD_STRH == str_type)
9869 record_buf_mem[0] = 2;
9870 record_buf_mem[1] = tgt_mem_addr;
9871 arm_insn_r->mem_rec_count = 1;
9873 else if (ARM_RECORD_STRD == str_type)
9875 record_buf_mem[0] = 4;
9876 record_buf_mem[1] = tgt_mem_addr;
9877 record_buf_mem[2] = 4;
9878 record_buf_mem[3] = tgt_mem_addr + 4;
9879 arm_insn_r->mem_rec_count = 2;
9881 /* Record Rn also as it changes. */
9882 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9883 arm_insn_r->reg_rec_count = 1;
9888 /* Handling ARM extension space insns. */
9891 arm_record_extension_space (insn_decode_record *arm_insn_r)
9893 int ret = 0; /* Return value: -1:record failure ; 0:success */
9894 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9895 uint32_t record_buf[8], record_buf_mem[8];
9896 uint32_t reg_src1 = 0;
9897 struct regcache *reg_cache = arm_insn_r->regcache;
9898 ULONGEST u_regval = 0;
9900 gdb_assert (!INSN_RECORDED(arm_insn_r));
9901 /* Handle unconditional insn extension space. */
9903 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9904 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9905 if (arm_insn_r->cond)
9907 /* PLD has no affect on architectural state, it just affects
9909 if (5 == ((opcode1 & 0xE0) >> 5))
9912 record_buf[0] = ARM_PS_REGNUM;
9913 record_buf[1] = ARM_LR_REGNUM;
9914 arm_insn_r->reg_rec_count = 2;
9916 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9920 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9921 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9924 /* Undefined instruction on ARM V5; need to handle if later
9925 versions define it. */
9928 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9929 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9930 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9932 /* Handle arithmetic insn extension space. */
9933 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9934 && !INSN_RECORDED(arm_insn_r))
9936 /* Handle MLA(S) and MUL(S). */
9937 if (in_inclusive_range (insn_op1, 0U, 3U))
9939 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9940 record_buf[1] = ARM_PS_REGNUM;
9941 arm_insn_r->reg_rec_count = 2;
9943 else if (in_inclusive_range (insn_op1, 4U, 15U))
9945 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9946 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9947 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9948 record_buf[2] = ARM_PS_REGNUM;
9949 arm_insn_r->reg_rec_count = 3;
9953 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9954 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9955 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9957 /* Handle control insn extension space. */
9959 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9960 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9962 if (!bit (arm_insn_r->arm_insn,25))
9964 if (!bits (arm_insn_r->arm_insn, 4, 7))
9966 if ((0 == insn_op1) || (2 == insn_op1))
9969 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9970 arm_insn_r->reg_rec_count = 1;
9972 else if (1 == insn_op1)
9974 /* CSPR is going to be changed. */
9975 record_buf[0] = ARM_PS_REGNUM;
9976 arm_insn_r->reg_rec_count = 1;
9978 else if (3 == insn_op1)
9980 /* SPSR is going to be changed. */
9981 /* We need to get SPSR value, which is yet to be done. */
9985 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9990 record_buf[0] = ARM_PS_REGNUM;
9991 arm_insn_r->reg_rec_count = 1;
9993 else if (3 == insn_op1)
9996 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9997 arm_insn_r->reg_rec_count = 1;
10000 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10003 record_buf[0] = ARM_PS_REGNUM;
10004 record_buf[1] = ARM_LR_REGNUM;
10005 arm_insn_r->reg_rec_count = 2;
10007 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10009 /* QADD, QSUB, QDADD, QDSUB */
10010 record_buf[0] = ARM_PS_REGNUM;
10011 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10012 arm_insn_r->reg_rec_count = 2;
10014 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10017 record_buf[0] = ARM_PS_REGNUM;
10018 record_buf[1] = ARM_LR_REGNUM;
10019 arm_insn_r->reg_rec_count = 2;
10021 /* Save SPSR also;how? */
10024 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10025 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10026 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10027 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10030 if (0 == insn_op1 || 1 == insn_op1)
10032 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10033 /* We dont do optimization for SMULW<y> where we
10035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10036 record_buf[1] = ARM_PS_REGNUM;
10037 arm_insn_r->reg_rec_count = 2;
10039 else if (2 == insn_op1)
10042 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10043 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10044 arm_insn_r->reg_rec_count = 2;
10046 else if (3 == insn_op1)
10049 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10050 arm_insn_r->reg_rec_count = 1;
10056 /* MSR : immediate form. */
10059 /* CSPR is going to be changed. */
10060 record_buf[0] = ARM_PS_REGNUM;
10061 arm_insn_r->reg_rec_count = 1;
10063 else if (3 == insn_op1)
10065 /* SPSR is going to be changed. */
10066 /* we need to get SPSR value, which is yet to be done */
10072 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10073 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10074 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10076 /* Handle load/store insn extension space. */
10078 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10079 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10080 && !INSN_RECORDED(arm_insn_r))
10085 /* These insn, changes register and memory as well. */
10086 /* SWP or SWPB insn. */
10087 /* Get memory address given by Rn. */
10088 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10089 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10090 /* SWP insn ?, swaps word. */
10091 if (8 == arm_insn_r->opcode)
10093 record_buf_mem[0] = 4;
10097 /* SWPB insn, swaps only byte. */
10098 record_buf_mem[0] = 1;
10100 record_buf_mem[1] = u_regval;
10101 arm_insn_r->mem_rec_count = 1;
10102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10103 arm_insn_r->reg_rec_count = 1;
10105 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10108 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10111 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10115 record_buf[1] = record_buf[0] + 1;
10116 arm_insn_r->reg_rec_count = 2;
10118 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10121 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10124 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10126 /* LDRH, LDRSB, LDRSH. */
10127 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10128 arm_insn_r->reg_rec_count = 1;
10133 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10134 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10135 && !INSN_RECORDED(arm_insn_r))
10138 /* Handle coprocessor insn extension space. */
10141 /* To be done for ARMv5 and later; as of now we return -1. */
10145 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10146 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10151 /* Handling opcode 000 insns. */
10154 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10156 struct regcache *reg_cache = arm_insn_r->regcache;
10157 uint32_t record_buf[8], record_buf_mem[8];
10158 ULONGEST u_regval[2] = {0};
10160 uint32_t reg_src1 = 0;
10161 uint32_t opcode1 = 0;
10163 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10164 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10165 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10167 if (!((opcode1 & 0x19) == 0x10))
10169 /* Data-processing (register) and Data-processing (register-shifted
10171 /* Out of 11 shifter operands mode, all the insn modifies destination
10172 register, which is specified by 13-16 decode. */
10173 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10174 record_buf[1] = ARM_PS_REGNUM;
10175 arm_insn_r->reg_rec_count = 2;
10177 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10179 /* Miscellaneous instructions */
10181 if (3 == arm_insn_r->decode && 0x12 == opcode1
10182 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10184 /* Handle BLX, branch and link/exchange. */
10185 if (9 == arm_insn_r->opcode)
10187 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10188 and R14 stores the return address. */
10189 record_buf[0] = ARM_PS_REGNUM;
10190 record_buf[1] = ARM_LR_REGNUM;
10191 arm_insn_r->reg_rec_count = 2;
10194 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10196 /* Handle enhanced software breakpoint insn, BKPT. */
10197 /* CPSR is changed to be executed in ARM state, disabling normal
10198 interrupts, entering abort mode. */
10199 /* According to high vector configuration PC is set. */
10200 /* user hit breakpoint and type reverse, in
10201 that case, we need to go back with previous CPSR and
10202 Program Counter. */
10203 record_buf[0] = ARM_PS_REGNUM;
10204 record_buf[1] = ARM_LR_REGNUM;
10205 arm_insn_r->reg_rec_count = 2;
10207 /* Save SPSR also; how? */
10210 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10211 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10213 /* Handle BX, branch and link/exchange. */
10214 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10215 record_buf[0] = ARM_PS_REGNUM;
10216 arm_insn_r->reg_rec_count = 1;
10218 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10219 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10220 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10222 /* Count leading zeros: CLZ. */
10223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10224 arm_insn_r->reg_rec_count = 1;
10226 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10227 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10228 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10229 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10231 /* Handle MRS insn. */
10232 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10233 arm_insn_r->reg_rec_count = 1;
10236 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10238 /* Multiply and multiply-accumulate */
10240 /* Handle multiply instructions. */
10241 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10242 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10244 /* Handle MLA and MUL. */
10245 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10246 record_buf[1] = ARM_PS_REGNUM;
10247 arm_insn_r->reg_rec_count = 2;
10249 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10251 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10252 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10253 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10254 record_buf[2] = ARM_PS_REGNUM;
10255 arm_insn_r->reg_rec_count = 3;
10258 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10260 /* Synchronization primitives */
10262 /* Handling SWP, SWPB. */
10263 /* These insn, changes register and memory as well. */
10264 /* SWP or SWPB insn. */
10266 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10267 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10268 /* SWP insn ?, swaps word. */
10269 if (8 == arm_insn_r->opcode)
10271 record_buf_mem[0] = 4;
10275 /* SWPB insn, swaps only byte. */
10276 record_buf_mem[0] = 1;
10278 record_buf_mem[1] = u_regval[0];
10279 arm_insn_r->mem_rec_count = 1;
10280 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10281 arm_insn_r->reg_rec_count = 1;
10283 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10284 || 15 == arm_insn_r->decode)
10286 if ((opcode1 & 0x12) == 2)
10288 /* Extra load/store (unprivileged) */
10293 /* Extra load/store */
10294 switch (bits (arm_insn_r->arm_insn, 5, 6))
10297 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10299 /* STRH (register), STRH (immediate) */
10300 arm_record_strx (arm_insn_r, &record_buf[0],
10301 &record_buf_mem[0], ARM_RECORD_STRH);
10303 else if ((opcode1 & 0x05) == 0x1)
10305 /* LDRH (register) */
10306 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10307 arm_insn_r->reg_rec_count = 1;
10309 if (bit (arm_insn_r->arm_insn, 21))
10311 /* Write back to Rn. */
10312 record_buf[arm_insn_r->reg_rec_count++]
10313 = bits (arm_insn_r->arm_insn, 16, 19);
10316 else if ((opcode1 & 0x05) == 0x5)
10318 /* LDRH (immediate), LDRH (literal) */
10319 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10321 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10322 arm_insn_r->reg_rec_count = 1;
10326 /*LDRH (immediate) */
10327 if (bit (arm_insn_r->arm_insn, 21))
10329 /* Write back to Rn. */
10330 record_buf[arm_insn_r->reg_rec_count++] = rn;
10338 if ((opcode1 & 0x05) == 0x0)
10340 /* LDRD (register) */
10341 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10342 record_buf[1] = record_buf[0] + 1;
10343 arm_insn_r->reg_rec_count = 2;
10345 if (bit (arm_insn_r->arm_insn, 21))
10347 /* Write back to Rn. */
10348 record_buf[arm_insn_r->reg_rec_count++]
10349 = bits (arm_insn_r->arm_insn, 16, 19);
10352 else if ((opcode1 & 0x05) == 0x1)
10354 /* LDRSB (register) */
10355 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10356 arm_insn_r->reg_rec_count = 1;
10358 if (bit (arm_insn_r->arm_insn, 21))
10360 /* Write back to Rn. */
10361 record_buf[arm_insn_r->reg_rec_count++]
10362 = bits (arm_insn_r->arm_insn, 16, 19);
10365 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10367 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10369 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10371 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10372 arm_insn_r->reg_rec_count = 1;
10376 /*LDRD (immediate), LDRSB (immediate) */
10377 if (bit (arm_insn_r->arm_insn, 21))
10379 /* Write back to Rn. */
10380 record_buf[arm_insn_r->reg_rec_count++] = rn;
10388 if ((opcode1 & 0x05) == 0x0)
10390 /* STRD (register) */
10391 arm_record_strx (arm_insn_r, &record_buf[0],
10392 &record_buf_mem[0], ARM_RECORD_STRD);
10394 else if ((opcode1 & 0x05) == 0x1)
10396 /* LDRSH (register) */
10397 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10398 arm_insn_r->reg_rec_count = 1;
10400 if (bit (arm_insn_r->arm_insn, 21))
10402 /* Write back to Rn. */
10403 record_buf[arm_insn_r->reg_rec_count++]
10404 = bits (arm_insn_r->arm_insn, 16, 19);
10407 else if ((opcode1 & 0x05) == 0x4)
10409 /* STRD (immediate) */
10410 arm_record_strx (arm_insn_r, &record_buf[0],
10411 &record_buf_mem[0], ARM_RECORD_STRD);
10413 else if ((opcode1 & 0x05) == 0x5)
10415 /* LDRSH (immediate), LDRSH (literal) */
10416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10417 arm_insn_r->reg_rec_count = 1;
10419 if (bit (arm_insn_r->arm_insn, 21))
10421 /* Write back to Rn. */
10422 record_buf[arm_insn_r->reg_rec_count++]
10423 = bits (arm_insn_r->arm_insn, 16, 19);
10439 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10440 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10444 /* Handling opcode 001 insns. */
10447 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10449 uint32_t record_buf[8], record_buf_mem[8];
10451 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10452 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10454 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10455 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10456 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10459 /* Handle MSR insn. */
10460 if (9 == arm_insn_r->opcode)
10462 /* CSPR is going to be changed. */
10463 record_buf[0] = ARM_PS_REGNUM;
10464 arm_insn_r->reg_rec_count = 1;
10468 /* SPSR is going to be changed. */
10471 else if (arm_insn_r->opcode <= 15)
10473 /* Normal data processing insns. */
10474 /* Out of 11 shifter operands mode, all the insn modifies destination
10475 register, which is specified by 13-16 decode. */
10476 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10477 record_buf[1] = ARM_PS_REGNUM;
10478 arm_insn_r->reg_rec_count = 2;
10485 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10486 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10491 arm_record_media (insn_decode_record *arm_insn_r)
10493 uint32_t record_buf[8];
10495 switch (bits (arm_insn_r->arm_insn, 22, 24))
10498 /* Parallel addition and subtraction, signed */
10500 /* Parallel addition and subtraction, unsigned */
10503 /* Packing, unpacking, saturation and reversal */
10505 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10507 record_buf[arm_insn_r->reg_rec_count++] = rd;
10513 /* Signed multiplies */
10515 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10516 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10518 record_buf[arm_insn_r->reg_rec_count++] = rd;
10520 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10521 else if (op1 == 0x4)
10522 record_buf[arm_insn_r->reg_rec_count++]
10523 = bits (arm_insn_r->arm_insn, 12, 15);
10529 if (bit (arm_insn_r->arm_insn, 21)
10530 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10533 record_buf[arm_insn_r->reg_rec_count++]
10534 = bits (arm_insn_r->arm_insn, 12, 15);
10536 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10537 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10539 /* USAD8 and USADA8 */
10540 record_buf[arm_insn_r->reg_rec_count++]
10541 = bits (arm_insn_r->arm_insn, 16, 19);
10548 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10549 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10551 /* Permanently UNDEFINED */
10556 /* BFC, BFI and UBFX */
10557 record_buf[arm_insn_r->reg_rec_count++]
10558 = bits (arm_insn_r->arm_insn, 12, 15);
10567 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10572 /* Handle ARM mode instructions with opcode 010. */
10575 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10577 struct regcache *reg_cache = arm_insn_r->regcache;
10579 uint32_t reg_base , reg_dest;
10580 uint32_t offset_12, tgt_mem_addr;
10581 uint32_t record_buf[8], record_buf_mem[8];
10582 unsigned char wback;
10585 /* Calculate wback. */
10586 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10587 || (bit (arm_insn_r->arm_insn, 21) == 1);
10589 arm_insn_r->reg_rec_count = 0;
10590 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10592 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10594 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10597 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10598 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10600 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10601 preceeds a LDR instruction having R15 as reg_base, it
10602 emulates a branch and link instruction, and hence we need to save
10603 CPSR and PC as well. */
10604 if (ARM_PC_REGNUM == reg_dest)
10605 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10607 /* If wback is true, also save the base register, which is going to be
10610 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10614 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10616 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10617 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10619 /* Handle bit U. */
10620 if (bit (arm_insn_r->arm_insn, 23))
10622 /* U == 1: Add the offset. */
10623 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10627 /* U == 0: subtract the offset. */
10628 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10631 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10633 if (bit (arm_insn_r->arm_insn, 22))
10635 /* STRB and STRBT: 1 byte. */
10636 record_buf_mem[0] = 1;
10640 /* STR and STRT: 4 bytes. */
10641 record_buf_mem[0] = 4;
10644 /* Handle bit P. */
10645 if (bit (arm_insn_r->arm_insn, 24))
10646 record_buf_mem[1] = tgt_mem_addr;
10648 record_buf_mem[1] = (uint32_t) u_regval;
10650 arm_insn_r->mem_rec_count = 1;
10652 /* If wback is true, also save the base register, which is going to be
10655 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10658 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10659 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10663 /* Handling opcode 011 insns. */
10666 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10668 struct regcache *reg_cache = arm_insn_r->regcache;
10670 uint32_t shift_imm = 0;
10671 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10672 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10673 uint32_t record_buf[8], record_buf_mem[8];
10676 ULONGEST u_regval[2];
10678 if (bit (arm_insn_r->arm_insn, 4))
10679 return arm_record_media (arm_insn_r);
10681 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10682 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10684 /* Handle enhanced store insns and LDRD DSP insn,
10685 order begins according to addressing modes for store insns
10689 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10691 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10692 /* LDR insn has a capability to do branching, if
10693 MOV LR, PC is precedded by LDR insn having Rn as R15
10694 in that case, it emulates branch and link insn, and hence we
10695 need to save CSPR and PC as well. */
10696 if (15 != reg_dest)
10698 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10699 arm_insn_r->reg_rec_count = 1;
10703 record_buf[0] = reg_dest;
10704 record_buf[1] = ARM_PS_REGNUM;
10705 arm_insn_r->reg_rec_count = 2;
10710 if (! bits (arm_insn_r->arm_insn, 4, 11))
10712 /* Store insn, register offset and register pre-indexed,
10713 register post-indexed. */
10715 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10717 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10718 regcache_raw_read_unsigned (reg_cache, reg_src1
10720 regcache_raw_read_unsigned (reg_cache, reg_src2
10722 if (15 == reg_src2)
10724 /* If R15 was used as Rn, hence current PC+8. */
10725 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10726 u_regval[0] = u_regval[0] + 8;
10728 /* Calculate target store address, Rn +/- Rm, register offset. */
10730 if (bit (arm_insn_r->arm_insn, 23))
10732 tgt_mem_addr = u_regval[0] + u_regval[1];
10736 tgt_mem_addr = u_regval[1] - u_regval[0];
10739 switch (arm_insn_r->opcode)
10753 record_buf_mem[0] = 4;
10768 record_buf_mem[0] = 1;
10772 gdb_assert_not_reached ("no decoding pattern found");
10775 record_buf_mem[1] = tgt_mem_addr;
10776 arm_insn_r->mem_rec_count = 1;
10778 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10779 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10780 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10781 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10782 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10783 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10786 /* Rn is going to be changed in pre-indexed mode and
10787 post-indexed mode as well. */
10788 record_buf[0] = reg_src2;
10789 arm_insn_r->reg_rec_count = 1;
10794 /* Store insn, scaled register offset; scaled pre-indexed. */
10795 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10797 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10800 /* Get shift_imm. */
10801 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10802 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10803 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10804 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10805 /* Offset_12 used as shift. */
10809 /* Offset_12 used as index. */
10810 offset_12 = u_regval[0] << shift_imm;
10814 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10820 if (bit (u_regval[0], 31))
10822 offset_12 = 0xFFFFFFFF;
10831 /* This is arithmetic shift. */
10832 offset_12 = s_word >> shift_imm;
10839 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10841 /* Get C flag value and shift it by 31. */
10842 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10843 | (u_regval[0]) >> 1);
10847 offset_12 = (u_regval[0] >> shift_imm) \
10849 (sizeof(uint32_t) - shift_imm));
10854 gdb_assert_not_reached ("no decoding pattern found");
10858 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10860 if (bit (arm_insn_r->arm_insn, 23))
10862 tgt_mem_addr = u_regval[1] + offset_12;
10866 tgt_mem_addr = u_regval[1] - offset_12;
10869 switch (arm_insn_r->opcode)
10883 record_buf_mem[0] = 4;
10898 record_buf_mem[0] = 1;
10902 gdb_assert_not_reached ("no decoding pattern found");
10905 record_buf_mem[1] = tgt_mem_addr;
10906 arm_insn_r->mem_rec_count = 1;
10908 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10909 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10910 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10911 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10912 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10913 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10916 /* Rn is going to be changed in register scaled pre-indexed
10917 mode,and scaled post indexed mode. */
10918 record_buf[0] = reg_src2;
10919 arm_insn_r->reg_rec_count = 1;
10924 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10925 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10929 /* Handle ARM mode instructions with opcode 100. */
10932 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10934 struct regcache *reg_cache = arm_insn_r->regcache;
10935 uint32_t register_count = 0, register_bits;
10936 uint32_t reg_base, addr_mode;
10937 uint32_t record_buf[24], record_buf_mem[48];
10941 /* Fetch the list of registers. */
10942 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10943 arm_insn_r->reg_rec_count = 0;
10945 /* Fetch the base register that contains the address we are loading data
10947 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10949 /* Calculate wback. */
10950 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10952 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10954 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10956 /* Find out which registers are going to be loaded from memory. */
10957 while (register_bits)
10959 if (register_bits & 0x00000001)
10960 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10961 register_bits = register_bits >> 1;
10966 /* If wback is true, also save the base register, which is going to be
10969 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10971 /* Save the CPSR register. */
10972 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10976 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10978 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10980 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10982 /* Find out how many registers are going to be stored to memory. */
10983 while (register_bits)
10985 if (register_bits & 0x00000001)
10987 register_bits = register_bits >> 1;
10992 /* STMDA (STMED): Decrement after. */
10994 record_buf_mem[1] = (uint32_t) u_regval
10995 - register_count * ARM_INT_REGISTER_SIZE + 4;
10997 /* STM (STMIA, STMEA): Increment after. */
10999 record_buf_mem[1] = (uint32_t) u_regval;
11001 /* STMDB (STMFD): Decrement before. */
11003 record_buf_mem[1] = (uint32_t) u_regval
11004 - register_count * ARM_INT_REGISTER_SIZE;
11006 /* STMIB (STMFA): Increment before. */
11008 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11011 gdb_assert_not_reached ("no decoding pattern found");
11015 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11016 arm_insn_r->mem_rec_count = 1;
11018 /* If wback is true, also save the base register, which is going to be
11021 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11024 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11025 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11029 /* Handling opcode 101 insns. */
11032 arm_record_b_bl (insn_decode_record *arm_insn_r)
11034 uint32_t record_buf[8];
11036 /* Handle B, BL, BLX(1) insns. */
11037 /* B simply branches so we do nothing here. */
11038 /* Note: BLX(1) doesnt fall here but instead it falls into
11039 extension space. */
11040 if (bit (arm_insn_r->arm_insn, 24))
11042 record_buf[0] = ARM_LR_REGNUM;
11043 arm_insn_r->reg_rec_count = 1;
11046 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11052 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11054 printf_unfiltered (_("Process record does not support instruction "
11055 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11056 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11061 /* Record handler for vector data transfer instructions. */
11064 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11066 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11067 uint32_t record_buf[4];
11069 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11070 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11071 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11072 bit_l = bit (arm_insn_r->arm_insn, 20);
11073 bit_c = bit (arm_insn_r->arm_insn, 8);
11075 /* Handle VMOV instruction. */
11076 if (bit_l && bit_c)
11078 record_buf[0] = reg_t;
11079 arm_insn_r->reg_rec_count = 1;
11081 else if (bit_l && !bit_c)
11083 /* Handle VMOV instruction. */
11084 if (bits_a == 0x00)
11086 record_buf[0] = reg_t;
11087 arm_insn_r->reg_rec_count = 1;
11089 /* Handle VMRS instruction. */
11090 else if (bits_a == 0x07)
11093 reg_t = ARM_PS_REGNUM;
11095 record_buf[0] = reg_t;
11096 arm_insn_r->reg_rec_count = 1;
11099 else if (!bit_l && !bit_c)
11101 /* Handle VMOV instruction. */
11102 if (bits_a == 0x00)
11104 record_buf[0] = ARM_D0_REGNUM + reg_v;
11106 arm_insn_r->reg_rec_count = 1;
11108 /* Handle VMSR instruction. */
11109 else if (bits_a == 0x07)
11111 record_buf[0] = ARM_FPSCR_REGNUM;
11112 arm_insn_r->reg_rec_count = 1;
11115 else if (!bit_l && bit_c)
11117 /* Handle VMOV instruction. */
11118 if (!(bits_a & 0x04))
11120 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11122 arm_insn_r->reg_rec_count = 1;
11124 /* Handle VDUP instruction. */
11127 if (bit (arm_insn_r->arm_insn, 21))
11129 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11130 record_buf[0] = reg_v + ARM_D0_REGNUM;
11131 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11132 arm_insn_r->reg_rec_count = 2;
11136 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11137 record_buf[0] = reg_v + ARM_D0_REGNUM;
11138 arm_insn_r->reg_rec_count = 1;
11143 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11147 /* Record handler for extension register load/store instructions. */
11150 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11152 uint32_t opcode, single_reg;
11153 uint8_t op_vldm_vstm;
11154 uint32_t record_buf[8], record_buf_mem[128];
11155 ULONGEST u_regval = 0;
11157 struct regcache *reg_cache = arm_insn_r->regcache;
11159 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11160 single_reg = !bit (arm_insn_r->arm_insn, 8);
11161 op_vldm_vstm = opcode & 0x1b;
11163 /* Handle VMOV instructions. */
11164 if ((opcode & 0x1e) == 0x04)
11166 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11169 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11170 arm_insn_r->reg_rec_count = 2;
11174 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11175 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11179 /* The first S register number m is REG_M:M (M is bit 5),
11180 the corresponding D register number is REG_M:M / 2, which
11182 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11183 /* The second S register number is REG_M:M + 1, the
11184 corresponding D register number is (REG_M:M + 1) / 2.
11185 IOW, if bit M is 1, the first and second S registers
11186 are mapped to different D registers, otherwise, they are
11187 in the same D register. */
11190 record_buf[arm_insn_r->reg_rec_count++]
11191 = ARM_D0_REGNUM + reg_m + 1;
11196 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11197 arm_insn_r->reg_rec_count = 1;
11201 /* Handle VSTM and VPUSH instructions. */
11202 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11203 || op_vldm_vstm == 0x12)
11205 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11206 uint32_t memory_index = 0;
11208 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11209 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11210 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11211 imm_off32 = imm_off8 << 2;
11212 memory_count = imm_off8;
11214 if (bit (arm_insn_r->arm_insn, 23))
11215 start_address = u_regval;
11217 start_address = u_regval - imm_off32;
11219 if (bit (arm_insn_r->arm_insn, 21))
11221 record_buf[0] = reg_rn;
11222 arm_insn_r->reg_rec_count = 1;
11225 while (memory_count > 0)
11229 record_buf_mem[memory_index] = 4;
11230 record_buf_mem[memory_index + 1] = start_address;
11231 start_address = start_address + 4;
11232 memory_index = memory_index + 2;
11236 record_buf_mem[memory_index] = 4;
11237 record_buf_mem[memory_index + 1] = start_address;
11238 record_buf_mem[memory_index + 2] = 4;
11239 record_buf_mem[memory_index + 3] = start_address + 4;
11240 start_address = start_address + 8;
11241 memory_index = memory_index + 4;
11245 arm_insn_r->mem_rec_count = (memory_index >> 1);
11247 /* Handle VLDM instructions. */
11248 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11249 || op_vldm_vstm == 0x13)
11251 uint32_t reg_count, reg_vd;
11252 uint32_t reg_index = 0;
11253 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11255 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11256 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11258 /* REG_VD is the first D register number. If the instruction
11259 loads memory to S registers (SINGLE_REG is TRUE), the register
11260 number is (REG_VD << 1 | bit D), so the corresponding D
11261 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11263 reg_vd = reg_vd | (bit_d << 4);
11265 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11266 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11268 /* If the instruction loads memory to D register, REG_COUNT should
11269 be divided by 2, according to the ARM Architecture Reference
11270 Manual. If the instruction loads memory to S register, divide by
11271 2 as well because two S registers are mapped to D register. */
11272 reg_count = reg_count / 2;
11273 if (single_reg && bit_d)
11275 /* Increase the register count if S register list starts from
11276 an odd number (bit d is one). */
11280 while (reg_count > 0)
11282 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11285 arm_insn_r->reg_rec_count = reg_index;
11287 /* VSTR Vector store register. */
11288 else if ((opcode & 0x13) == 0x10)
11290 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11291 uint32_t memory_index = 0;
11293 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11294 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11295 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11296 imm_off32 = imm_off8 << 2;
11298 if (bit (arm_insn_r->arm_insn, 23))
11299 start_address = u_regval + imm_off32;
11301 start_address = u_regval - imm_off32;
11305 record_buf_mem[memory_index] = 4;
11306 record_buf_mem[memory_index + 1] = start_address;
11307 arm_insn_r->mem_rec_count = 1;
11311 record_buf_mem[memory_index] = 4;
11312 record_buf_mem[memory_index + 1] = start_address;
11313 record_buf_mem[memory_index + 2] = 4;
11314 record_buf_mem[memory_index + 3] = start_address + 4;
11315 arm_insn_r->mem_rec_count = 2;
11318 /* VLDR Vector load register. */
11319 else if ((opcode & 0x13) == 0x11)
11321 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11325 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11326 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11330 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11331 /* Record register D rather than pseudo register S. */
11332 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11334 arm_insn_r->reg_rec_count = 1;
11337 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11338 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11342 /* Record handler for arm/thumb mode VFP data processing instructions. */
11345 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11347 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11348 uint32_t record_buf[4];
11349 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11350 enum insn_types curr_insn_type = INSN_INV;
11352 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11353 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11354 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11355 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11356 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11357 bit_d = bit (arm_insn_r->arm_insn, 22);
11358 /* Mask off the "D" bit. */
11359 opc1 = opc1 & ~0x04;
11361 /* Handle VMLA, VMLS. */
11364 if (bit (arm_insn_r->arm_insn, 10))
11366 if (bit (arm_insn_r->arm_insn, 6))
11367 curr_insn_type = INSN_T0;
11369 curr_insn_type = INSN_T1;
11374 curr_insn_type = INSN_T1;
11376 curr_insn_type = INSN_T2;
11379 /* Handle VNMLA, VNMLS, VNMUL. */
11380 else if (opc1 == 0x01)
11383 curr_insn_type = INSN_T1;
11385 curr_insn_type = INSN_T2;
11388 else if (opc1 == 0x02 && !(opc3 & 0x01))
11390 if (bit (arm_insn_r->arm_insn, 10))
11392 if (bit (arm_insn_r->arm_insn, 6))
11393 curr_insn_type = INSN_T0;
11395 curr_insn_type = INSN_T1;
11400 curr_insn_type = INSN_T1;
11402 curr_insn_type = INSN_T2;
11405 /* Handle VADD, VSUB. */
11406 else if (opc1 == 0x03)
11408 if (!bit (arm_insn_r->arm_insn, 9))
11410 if (bit (arm_insn_r->arm_insn, 6))
11411 curr_insn_type = INSN_T0;
11413 curr_insn_type = INSN_T1;
11418 curr_insn_type = INSN_T1;
11420 curr_insn_type = INSN_T2;
11424 else if (opc1 == 0x08)
11427 curr_insn_type = INSN_T1;
11429 curr_insn_type = INSN_T2;
11431 /* Handle all other vfp data processing instructions. */
11432 else if (opc1 == 0x0b)
11435 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11437 if (bit (arm_insn_r->arm_insn, 4))
11439 if (bit (arm_insn_r->arm_insn, 6))
11440 curr_insn_type = INSN_T0;
11442 curr_insn_type = INSN_T1;
11447 curr_insn_type = INSN_T1;
11449 curr_insn_type = INSN_T2;
11452 /* Handle VNEG and VABS. */
11453 else if ((opc2 == 0x01 && opc3 == 0x01)
11454 || (opc2 == 0x00 && opc3 == 0x03))
11456 if (!bit (arm_insn_r->arm_insn, 11))
11458 if (bit (arm_insn_r->arm_insn, 6))
11459 curr_insn_type = INSN_T0;
11461 curr_insn_type = INSN_T1;
11466 curr_insn_type = INSN_T1;
11468 curr_insn_type = INSN_T2;
11471 /* Handle VSQRT. */
11472 else if (opc2 == 0x01 && opc3 == 0x03)
11475 curr_insn_type = INSN_T1;
11477 curr_insn_type = INSN_T2;
11480 else if (opc2 == 0x07 && opc3 == 0x03)
11483 curr_insn_type = INSN_T1;
11485 curr_insn_type = INSN_T2;
11487 else if (opc3 & 0x01)
11490 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11492 if (!bit (arm_insn_r->arm_insn, 18))
11493 curr_insn_type = INSN_T2;
11497 curr_insn_type = INSN_T1;
11499 curr_insn_type = INSN_T2;
11503 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11506 curr_insn_type = INSN_T1;
11508 curr_insn_type = INSN_T2;
11510 /* Handle VCVTB, VCVTT. */
11511 else if ((opc2 & 0x0e) == 0x02)
11512 curr_insn_type = INSN_T2;
11513 /* Handle VCMP, VCMPE. */
11514 else if ((opc2 & 0x0e) == 0x04)
11515 curr_insn_type = INSN_T3;
11519 switch (curr_insn_type)
11522 reg_vd = reg_vd | (bit_d << 4);
11523 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11524 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11525 arm_insn_r->reg_rec_count = 2;
11529 reg_vd = reg_vd | (bit_d << 4);
11530 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11531 arm_insn_r->reg_rec_count = 1;
11535 reg_vd = (reg_vd << 1) | bit_d;
11536 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11537 arm_insn_r->reg_rec_count = 1;
11541 record_buf[0] = ARM_FPSCR_REGNUM;
11542 arm_insn_r->reg_rec_count = 1;
11546 gdb_assert_not_reached ("no decoding pattern found");
11550 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11554 /* Handling opcode 110 insns. */
11557 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11559 uint32_t op1, op1_ebit, coproc;
11561 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11562 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11563 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11565 if ((coproc & 0x0e) == 0x0a)
11567 /* Handle extension register ld/st instructions. */
11569 return arm_record_exreg_ld_st_insn (arm_insn_r);
11571 /* 64-bit transfers between arm core and extension registers. */
11572 if ((op1 & 0x3e) == 0x04)
11573 return arm_record_exreg_ld_st_insn (arm_insn_r);
11577 /* Handle coprocessor ld/st instructions. */
11582 return arm_record_unsupported_insn (arm_insn_r);
11585 return arm_record_unsupported_insn (arm_insn_r);
11588 /* Move to coprocessor from two arm core registers. */
11590 return arm_record_unsupported_insn (arm_insn_r);
11592 /* Move to two arm core registers from coprocessor. */
11597 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11598 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11599 arm_insn_r->reg_rec_count = 2;
11601 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11605 return arm_record_unsupported_insn (arm_insn_r);
11608 /* Handling opcode 111 insns. */
11611 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11613 uint32_t op, op1_ebit, coproc, bits_24_25;
11614 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11615 struct regcache *reg_cache = arm_insn_r->regcache;
11617 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11618 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11619 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11620 op = bit (arm_insn_r->arm_insn, 4);
11621 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11623 /* Handle arm SWI/SVC system call instructions. */
11624 if (bits_24_25 == 0x3)
11626 if (tdep->arm_syscall_record != NULL)
11628 ULONGEST svc_operand, svc_number;
11630 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11632 if (svc_operand) /* OABI. */
11633 svc_number = svc_operand - 0x900000;
11635 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11637 return tdep->arm_syscall_record (reg_cache, svc_number);
11641 printf_unfiltered (_("no syscall record support\n"));
11645 else if (bits_24_25 == 0x02)
11649 if ((coproc & 0x0e) == 0x0a)
11651 /* 8, 16, and 32-bit transfer */
11652 return arm_record_vdata_transfer_insn (arm_insn_r);
11659 uint32_t record_buf[1];
11661 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11662 if (record_buf[0] == 15)
11663 record_buf[0] = ARM_PS_REGNUM;
11665 arm_insn_r->reg_rec_count = 1;
11666 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11679 if ((coproc & 0x0e) == 0x0a)
11681 /* VFP data-processing instructions. */
11682 return arm_record_vfp_data_proc_insn (arm_insn_r);
11693 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11697 if ((coproc & 0x0e) != 0x0a)
11703 else if (op1 == 4 || op1 == 5)
11705 if ((coproc & 0x0e) == 0x0a)
11707 /* 64-bit transfers between ARM core and extension */
11716 else if (op1 == 0 || op1 == 1)
11723 if ((coproc & 0x0e) == 0x0a)
11725 /* Extension register load/store */
11729 /* STC, STC2, LDC, LDC2 */
11738 /* Handling opcode 000 insns. */
11741 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11743 uint32_t record_buf[8];
11744 uint32_t reg_src1 = 0;
11746 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11748 record_buf[0] = ARM_PS_REGNUM;
11749 record_buf[1] = reg_src1;
11750 thumb_insn_r->reg_rec_count = 2;
11752 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11758 /* Handling opcode 001 insns. */
11761 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11763 uint32_t record_buf[8];
11764 uint32_t reg_src1 = 0;
11766 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11768 record_buf[0] = ARM_PS_REGNUM;
11769 record_buf[1] = reg_src1;
11770 thumb_insn_r->reg_rec_count = 2;
11772 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11777 /* Handling opcode 010 insns. */
11780 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11782 struct regcache *reg_cache = thumb_insn_r->regcache;
11783 uint32_t record_buf[8], record_buf_mem[8];
11785 uint32_t reg_src1 = 0, reg_src2 = 0;
11786 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11788 ULONGEST u_regval[2] = {0};
11790 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11792 if (bit (thumb_insn_r->arm_insn, 12))
11794 /* Handle load/store register offset. */
11795 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11797 if (in_inclusive_range (opB, 4U, 7U))
11799 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11800 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11801 record_buf[0] = reg_src1;
11802 thumb_insn_r->reg_rec_count = 1;
11804 else if (in_inclusive_range (opB, 0U, 2U))
11806 /* STR(2), STRB(2), STRH(2) . */
11807 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11808 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11809 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11810 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11812 record_buf_mem[0] = 4; /* STR (2). */
11814 record_buf_mem[0] = 1; /* STRB (2). */
11816 record_buf_mem[0] = 2; /* STRH (2). */
11817 record_buf_mem[1] = u_regval[0] + u_regval[1];
11818 thumb_insn_r->mem_rec_count = 1;
11821 else if (bit (thumb_insn_r->arm_insn, 11))
11823 /* Handle load from literal pool. */
11825 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11826 record_buf[0] = reg_src1;
11827 thumb_insn_r->reg_rec_count = 1;
11831 /* Special data instructions and branch and exchange */
11832 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11833 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11834 if ((3 == opcode2) && (!opcode3))
11836 /* Branch with exchange. */
11837 record_buf[0] = ARM_PS_REGNUM;
11838 thumb_insn_r->reg_rec_count = 1;
11842 /* Format 8; special data processing insns. */
11843 record_buf[0] = ARM_PS_REGNUM;
11844 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11845 | bits (thumb_insn_r->arm_insn, 0, 2));
11846 thumb_insn_r->reg_rec_count = 2;
11851 /* Format 5; data processing insns. */
11852 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11853 if (bit (thumb_insn_r->arm_insn, 7))
11855 reg_src1 = reg_src1 + 8;
11857 record_buf[0] = ARM_PS_REGNUM;
11858 record_buf[1] = reg_src1;
11859 thumb_insn_r->reg_rec_count = 2;
11862 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11863 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11869 /* Handling opcode 001 insns. */
11872 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11874 struct regcache *reg_cache = thumb_insn_r->regcache;
11875 uint32_t record_buf[8], record_buf_mem[8];
11877 uint32_t reg_src1 = 0;
11878 uint32_t opcode = 0, immed_5 = 0;
11880 ULONGEST u_regval = 0;
11882 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11887 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11888 record_buf[0] = reg_src1;
11889 thumb_insn_r->reg_rec_count = 1;
11894 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11895 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11897 record_buf_mem[0] = 4;
11898 record_buf_mem[1] = u_regval + (immed_5 * 4);
11899 thumb_insn_r->mem_rec_count = 1;
11902 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11903 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11909 /* Handling opcode 100 insns. */
11912 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11914 struct regcache *reg_cache = thumb_insn_r->regcache;
11915 uint32_t record_buf[8], record_buf_mem[8];
11917 uint32_t reg_src1 = 0;
11918 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11920 ULONGEST u_regval = 0;
11922 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11927 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11928 record_buf[0] = reg_src1;
11929 thumb_insn_r->reg_rec_count = 1;
11931 else if (1 == opcode)
11934 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11935 record_buf[0] = reg_src1;
11936 thumb_insn_r->reg_rec_count = 1;
11938 else if (2 == opcode)
11941 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11942 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11943 record_buf_mem[0] = 4;
11944 record_buf_mem[1] = u_regval + (immed_8 * 4);
11945 thumb_insn_r->mem_rec_count = 1;
11947 else if (0 == opcode)
11950 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11951 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11952 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11953 record_buf_mem[0] = 2;
11954 record_buf_mem[1] = u_regval + (immed_5 * 2);
11955 thumb_insn_r->mem_rec_count = 1;
11958 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11959 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11965 /* Handling opcode 101 insns. */
11968 thumb_record_misc (insn_decode_record *thumb_insn_r)
11970 struct regcache *reg_cache = thumb_insn_r->regcache;
11972 uint32_t opcode = 0;
11973 uint32_t register_bits = 0, register_count = 0;
11974 uint32_t index = 0, start_address = 0;
11975 uint32_t record_buf[24], record_buf_mem[48];
11978 ULONGEST u_regval = 0;
11980 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11982 if (opcode == 0 || opcode == 1)
11984 /* ADR and ADD (SP plus immediate) */
11986 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11987 record_buf[0] = reg_src1;
11988 thumb_insn_r->reg_rec_count = 1;
11992 /* Miscellaneous 16-bit instructions */
11993 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11998 /* SETEND and CPS */
12001 /* ADD/SUB (SP plus immediate) */
12002 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12003 record_buf[0] = ARM_SP_REGNUM;
12004 thumb_insn_r->reg_rec_count = 1;
12006 case 1: /* fall through */
12007 case 3: /* fall through */
12008 case 9: /* fall through */
12013 /* SXTH, SXTB, UXTH, UXTB */
12014 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12015 thumb_insn_r->reg_rec_count = 1;
12017 case 4: /* fall through */
12020 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12021 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12022 while (register_bits)
12024 if (register_bits & 0x00000001)
12026 register_bits = register_bits >> 1;
12028 start_address = u_regval - \
12029 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12030 thumb_insn_r->mem_rec_count = register_count;
12031 while (register_count)
12033 record_buf_mem[(register_count * 2) - 1] = start_address;
12034 record_buf_mem[(register_count * 2) - 2] = 4;
12035 start_address = start_address + 4;
12038 record_buf[0] = ARM_SP_REGNUM;
12039 thumb_insn_r->reg_rec_count = 1;
12042 /* REV, REV16, REVSH */
12043 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12044 thumb_insn_r->reg_rec_count = 1;
12046 case 12: /* fall through */
12049 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12050 while (register_bits)
12052 if (register_bits & 0x00000001)
12053 record_buf[index++] = register_count;
12054 register_bits = register_bits >> 1;
12057 record_buf[index++] = ARM_PS_REGNUM;
12058 record_buf[index++] = ARM_SP_REGNUM;
12059 thumb_insn_r->reg_rec_count = index;
12063 /* Handle enhanced software breakpoint insn, BKPT. */
12064 /* CPSR is changed to be executed in ARM state, disabling normal
12065 interrupts, entering abort mode. */
12066 /* According to high vector configuration PC is set. */
12067 /* User hits breakpoint and type reverse, in that case, we need to go back with
12068 previous CPSR and Program Counter. */
12069 record_buf[0] = ARM_PS_REGNUM;
12070 record_buf[1] = ARM_LR_REGNUM;
12071 thumb_insn_r->reg_rec_count = 2;
12072 /* We need to save SPSR value, which is not yet done. */
12073 printf_unfiltered (_("Process record does not support instruction "
12074 "0x%0x at address %s.\n"),
12075 thumb_insn_r->arm_insn,
12076 paddress (thumb_insn_r->gdbarch,
12077 thumb_insn_r->this_addr));
12081 /* If-Then, and hints */
12088 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12089 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12095 /* Handling opcode 110 insns. */
12098 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12100 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12101 struct regcache *reg_cache = thumb_insn_r->regcache;
12103 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12104 uint32_t reg_src1 = 0;
12105 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12106 uint32_t index = 0, start_address = 0;
12107 uint32_t record_buf[24], record_buf_mem[48];
12109 ULONGEST u_regval = 0;
12111 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12112 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12118 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12120 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12121 while (register_bits)
12123 if (register_bits & 0x00000001)
12124 record_buf[index++] = register_count;
12125 register_bits = register_bits >> 1;
12128 record_buf[index++] = reg_src1;
12129 thumb_insn_r->reg_rec_count = index;
12131 else if (0 == opcode2)
12133 /* It handles both STMIA. */
12134 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12136 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12137 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12138 while (register_bits)
12140 if (register_bits & 0x00000001)
12142 register_bits = register_bits >> 1;
12144 start_address = u_regval;
12145 thumb_insn_r->mem_rec_count = register_count;
12146 while (register_count)
12148 record_buf_mem[(register_count * 2) - 1] = start_address;
12149 record_buf_mem[(register_count * 2) - 2] = 4;
12150 start_address = start_address + 4;
12154 else if (0x1F == opcode1)
12156 /* Handle arm syscall insn. */
12157 if (tdep->arm_syscall_record != NULL)
12159 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12160 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12164 printf_unfiltered (_("no syscall record support\n"));
12169 /* B (1), conditional branch is automatically taken care in process_record,
12170 as PC is saved there. */
12172 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12173 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12179 /* Handling opcode 111 insns. */
12182 thumb_record_branch (insn_decode_record *thumb_insn_r)
12184 uint32_t record_buf[8];
12185 uint32_t bits_h = 0;
12187 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12189 if (2 == bits_h || 3 == bits_h)
12192 record_buf[0] = ARM_LR_REGNUM;
12193 thumb_insn_r->reg_rec_count = 1;
12195 else if (1 == bits_h)
12198 record_buf[0] = ARM_PS_REGNUM;
12199 record_buf[1] = ARM_LR_REGNUM;
12200 thumb_insn_r->reg_rec_count = 2;
12203 /* B(2) is automatically taken care in process_record, as PC is
12206 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12211 /* Handler for thumb2 load/store multiple instructions. */
12214 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12216 struct regcache *reg_cache = thumb2_insn_r->regcache;
12218 uint32_t reg_rn, op;
12219 uint32_t register_bits = 0, register_count = 0;
12220 uint32_t index = 0, start_address = 0;
12221 uint32_t record_buf[24], record_buf_mem[48];
12223 ULONGEST u_regval = 0;
12225 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12226 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12228 if (0 == op || 3 == op)
12230 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12232 /* Handle RFE instruction. */
12233 record_buf[0] = ARM_PS_REGNUM;
12234 thumb2_insn_r->reg_rec_count = 1;
12238 /* Handle SRS instruction after reading banked SP. */
12239 return arm_record_unsupported_insn (thumb2_insn_r);
12242 else if (1 == op || 2 == op)
12244 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12246 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12247 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12248 while (register_bits)
12250 if (register_bits & 0x00000001)
12251 record_buf[index++] = register_count;
12254 register_bits = register_bits >> 1;
12256 record_buf[index++] = reg_rn;
12257 record_buf[index++] = ARM_PS_REGNUM;
12258 thumb2_insn_r->reg_rec_count = index;
12262 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12263 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12264 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12265 while (register_bits)
12267 if (register_bits & 0x00000001)
12270 register_bits = register_bits >> 1;
12275 /* Start address calculation for LDMDB/LDMEA. */
12276 start_address = u_regval;
12280 /* Start address calculation for LDMDB/LDMEA. */
12281 start_address = u_regval - register_count * 4;
12284 thumb2_insn_r->mem_rec_count = register_count;
12285 while (register_count)
12287 record_buf_mem[register_count * 2 - 1] = start_address;
12288 record_buf_mem[register_count * 2 - 2] = 4;
12289 start_address = start_address + 4;
12292 record_buf[0] = reg_rn;
12293 record_buf[1] = ARM_PS_REGNUM;
12294 thumb2_insn_r->reg_rec_count = 2;
12298 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12300 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12302 return ARM_RECORD_SUCCESS;
12305 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12309 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12311 struct regcache *reg_cache = thumb2_insn_r->regcache;
12313 uint32_t reg_rd, reg_rn, offset_imm;
12314 uint32_t reg_dest1, reg_dest2;
12315 uint32_t address, offset_addr;
12316 uint32_t record_buf[8], record_buf_mem[8];
12317 uint32_t op1, op2, op3;
12319 ULONGEST u_regval[2];
12321 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12322 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12323 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12325 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12327 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12329 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12330 record_buf[0] = reg_dest1;
12331 record_buf[1] = ARM_PS_REGNUM;
12332 thumb2_insn_r->reg_rec_count = 2;
12335 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12337 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12338 record_buf[2] = reg_dest2;
12339 thumb2_insn_r->reg_rec_count = 3;
12344 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12345 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12347 if (0 == op1 && 0 == op2)
12349 /* Handle STREX. */
12350 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12351 address = u_regval[0] + (offset_imm * 4);
12352 record_buf_mem[0] = 4;
12353 record_buf_mem[1] = address;
12354 thumb2_insn_r->mem_rec_count = 1;
12355 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12356 record_buf[0] = reg_rd;
12357 thumb2_insn_r->reg_rec_count = 1;
12359 else if (1 == op1 && 0 == op2)
12361 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12362 record_buf[0] = reg_rd;
12363 thumb2_insn_r->reg_rec_count = 1;
12364 address = u_regval[0];
12365 record_buf_mem[1] = address;
12369 /* Handle STREXB. */
12370 record_buf_mem[0] = 1;
12371 thumb2_insn_r->mem_rec_count = 1;
12375 /* Handle STREXH. */
12376 record_buf_mem[0] = 2 ;
12377 thumb2_insn_r->mem_rec_count = 1;
12381 /* Handle STREXD. */
12382 address = u_regval[0];
12383 record_buf_mem[0] = 4;
12384 record_buf_mem[2] = 4;
12385 record_buf_mem[3] = address + 4;
12386 thumb2_insn_r->mem_rec_count = 2;
12391 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12393 if (bit (thumb2_insn_r->arm_insn, 24))
12395 if (bit (thumb2_insn_r->arm_insn, 23))
12396 offset_addr = u_regval[0] + (offset_imm * 4);
12398 offset_addr = u_regval[0] - (offset_imm * 4);
12400 address = offset_addr;
12403 address = u_regval[0];
12405 record_buf_mem[0] = 4;
12406 record_buf_mem[1] = address;
12407 record_buf_mem[2] = 4;
12408 record_buf_mem[3] = address + 4;
12409 thumb2_insn_r->mem_rec_count = 2;
12410 record_buf[0] = reg_rn;
12411 thumb2_insn_r->reg_rec_count = 1;
12415 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12417 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12419 return ARM_RECORD_SUCCESS;
12422 /* Handler for thumb2 data processing (shift register and modified immediate)
12426 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12428 uint32_t reg_rd, op;
12429 uint32_t record_buf[8];
12431 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12432 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12434 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12436 record_buf[0] = ARM_PS_REGNUM;
12437 thumb2_insn_r->reg_rec_count = 1;
12441 record_buf[0] = reg_rd;
12442 record_buf[1] = ARM_PS_REGNUM;
12443 thumb2_insn_r->reg_rec_count = 2;
12446 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12448 return ARM_RECORD_SUCCESS;
12451 /* Generic handler for thumb2 instructions which effect destination and PS
12455 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12458 uint32_t record_buf[8];
12460 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12462 record_buf[0] = reg_rd;
12463 record_buf[1] = ARM_PS_REGNUM;
12464 thumb2_insn_r->reg_rec_count = 2;
12466 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12468 return ARM_RECORD_SUCCESS;
12471 /* Handler for thumb2 branch and miscellaneous control instructions. */
12474 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12476 uint32_t op, op1, op2;
12477 uint32_t record_buf[8];
12479 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12480 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12481 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12483 /* Handle MSR insn. */
12484 if (!(op1 & 0x2) && 0x38 == op)
12488 /* CPSR is going to be changed. */
12489 record_buf[0] = ARM_PS_REGNUM;
12490 thumb2_insn_r->reg_rec_count = 1;
12494 arm_record_unsupported_insn(thumb2_insn_r);
12498 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12501 record_buf[0] = ARM_PS_REGNUM;
12502 record_buf[1] = ARM_LR_REGNUM;
12503 thumb2_insn_r->reg_rec_count = 2;
12506 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12508 return ARM_RECORD_SUCCESS;
12511 /* Handler for thumb2 store single data item instructions. */
12514 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12516 struct regcache *reg_cache = thumb2_insn_r->regcache;
12518 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12519 uint32_t address, offset_addr;
12520 uint32_t record_buf[8], record_buf_mem[8];
12523 ULONGEST u_regval[2];
12525 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12526 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12527 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12528 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12530 if (bit (thumb2_insn_r->arm_insn, 23))
12533 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12534 offset_addr = u_regval[0] + offset_imm;
12535 address = offset_addr;
12540 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12542 /* Handle STRB (register). */
12543 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12544 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12545 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12546 offset_addr = u_regval[1] << shift_imm;
12547 address = u_regval[0] + offset_addr;
12551 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12552 if (bit (thumb2_insn_r->arm_insn, 10))
12554 if (bit (thumb2_insn_r->arm_insn, 9))
12555 offset_addr = u_regval[0] + offset_imm;
12557 offset_addr = u_regval[0] - offset_imm;
12559 address = offset_addr;
12562 address = u_regval[0];
12568 /* Store byte instructions. */
12571 record_buf_mem[0] = 1;
12573 /* Store half word instructions. */
12576 record_buf_mem[0] = 2;
12578 /* Store word instructions. */
12581 record_buf_mem[0] = 4;
12585 gdb_assert_not_reached ("no decoding pattern found");
12589 record_buf_mem[1] = address;
12590 thumb2_insn_r->mem_rec_count = 1;
12591 record_buf[0] = reg_rn;
12592 thumb2_insn_r->reg_rec_count = 1;
12594 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12596 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12598 return ARM_RECORD_SUCCESS;
12601 /* Handler for thumb2 load memory hints instructions. */
12604 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12606 uint32_t record_buf[8];
12607 uint32_t reg_rt, reg_rn;
12609 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12610 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12612 if (ARM_PC_REGNUM != reg_rt)
12614 record_buf[0] = reg_rt;
12615 record_buf[1] = reg_rn;
12616 record_buf[2] = ARM_PS_REGNUM;
12617 thumb2_insn_r->reg_rec_count = 3;
12619 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12621 return ARM_RECORD_SUCCESS;
12624 return ARM_RECORD_FAILURE;
12627 /* Handler for thumb2 load word instructions. */
12630 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12632 uint32_t record_buf[8];
12634 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12635 record_buf[1] = ARM_PS_REGNUM;
12636 thumb2_insn_r->reg_rec_count = 2;
12638 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12640 return ARM_RECORD_SUCCESS;
12643 /* Handler for thumb2 long multiply, long multiply accumulate, and
12644 divide instructions. */
12647 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12649 uint32_t opcode1 = 0, opcode2 = 0;
12650 uint32_t record_buf[8];
12652 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12653 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12655 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12657 /* Handle SMULL, UMULL, SMULAL. */
12658 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12659 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12660 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12661 record_buf[2] = ARM_PS_REGNUM;
12662 thumb2_insn_r->reg_rec_count = 3;
12664 else if (1 == opcode1 || 3 == opcode2)
12666 /* Handle SDIV and UDIV. */
12667 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12668 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12669 record_buf[2] = ARM_PS_REGNUM;
12670 thumb2_insn_r->reg_rec_count = 3;
12673 return ARM_RECORD_FAILURE;
12675 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12677 return ARM_RECORD_SUCCESS;
12680 /* Record handler for thumb32 coprocessor instructions. */
12683 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12685 if (bit (thumb2_insn_r->arm_insn, 25))
12686 return arm_record_coproc_data_proc (thumb2_insn_r);
12688 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12691 /* Record handler for advance SIMD structure load/store instructions. */
12694 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12696 struct regcache *reg_cache = thumb2_insn_r->regcache;
12697 uint32_t l_bit, a_bit, b_bits;
12698 uint32_t record_buf[128], record_buf_mem[128];
12699 uint32_t reg_rn, reg_vd, address, f_elem;
12700 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12703 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12704 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12705 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12706 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12707 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12708 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12709 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12710 f_elem = 8 / f_ebytes;
12714 ULONGEST u_regval = 0;
12715 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12716 address = u_regval;
12721 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12723 if (b_bits == 0x07)
12725 else if (b_bits == 0x0a)
12727 else if (b_bits == 0x06)
12729 else if (b_bits == 0x02)
12734 for (index_r = 0; index_r < bf_regs; index_r++)
12736 for (index_e = 0; index_e < f_elem; index_e++)
12738 record_buf_mem[index_m++] = f_ebytes;
12739 record_buf_mem[index_m++] = address;
12740 address = address + f_ebytes;
12741 thumb2_insn_r->mem_rec_count += 1;
12746 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12748 if (b_bits == 0x09 || b_bits == 0x08)
12750 else if (b_bits == 0x03)
12755 for (index_r = 0; index_r < bf_regs; index_r++)
12756 for (index_e = 0; index_e < f_elem; index_e++)
12758 for (loop_t = 0; loop_t < 2; loop_t++)
12760 record_buf_mem[index_m++] = f_ebytes;
12761 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12762 thumb2_insn_r->mem_rec_count += 1;
12764 address = address + (2 * f_ebytes);
12768 else if ((b_bits & 0x0e) == 0x04)
12770 for (index_e = 0; index_e < f_elem; index_e++)
12772 for (loop_t = 0; loop_t < 3; loop_t++)
12774 record_buf_mem[index_m++] = f_ebytes;
12775 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12776 thumb2_insn_r->mem_rec_count += 1;
12778 address = address + (3 * f_ebytes);
12782 else if (!(b_bits & 0x0e))
12784 for (index_e = 0; index_e < f_elem; index_e++)
12786 for (loop_t = 0; loop_t < 4; loop_t++)
12788 record_buf_mem[index_m++] = f_ebytes;
12789 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12790 thumb2_insn_r->mem_rec_count += 1;
12792 address = address + (4 * f_ebytes);
12798 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12800 if (bft_size == 0x00)
12802 else if (bft_size == 0x01)
12804 else if (bft_size == 0x02)
12810 if (!(b_bits & 0x0b) || b_bits == 0x08)
12811 thumb2_insn_r->mem_rec_count = 1;
12813 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12814 thumb2_insn_r->mem_rec_count = 2;
12816 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12817 thumb2_insn_r->mem_rec_count = 3;
12819 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12820 thumb2_insn_r->mem_rec_count = 4;
12822 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12824 record_buf_mem[index_m] = f_ebytes;
12825 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12834 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12835 thumb2_insn_r->reg_rec_count = 1;
12837 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12838 thumb2_insn_r->reg_rec_count = 2;
12840 else if ((b_bits & 0x0e) == 0x04)
12841 thumb2_insn_r->reg_rec_count = 3;
12843 else if (!(b_bits & 0x0e))
12844 thumb2_insn_r->reg_rec_count = 4;
12849 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12850 thumb2_insn_r->reg_rec_count = 1;
12852 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12853 thumb2_insn_r->reg_rec_count = 2;
12855 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12856 thumb2_insn_r->reg_rec_count = 3;
12858 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12859 thumb2_insn_r->reg_rec_count = 4;
12861 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12862 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12866 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12868 record_buf[index_r] = reg_rn;
12869 thumb2_insn_r->reg_rec_count += 1;
12872 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12874 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12879 /* Decodes thumb2 instruction type and invokes its record handler. */
12881 static unsigned int
12882 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12884 uint32_t op, op1, op2;
12886 op = bit (thumb2_insn_r->arm_insn, 15);
12887 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12888 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12892 if (!(op2 & 0x64 ))
12894 /* Load/store multiple instruction. */
12895 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12897 else if ((op2 & 0x64) == 0x4)
12899 /* Load/store (dual/exclusive) and table branch instruction. */
12900 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12902 else if ((op2 & 0x60) == 0x20)
12904 /* Data-processing (shifted register). */
12905 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12907 else if (op2 & 0x40)
12909 /* Co-processor instructions. */
12910 return thumb2_record_coproc_insn (thumb2_insn_r);
12913 else if (op1 == 0x02)
12917 /* Branches and miscellaneous control instructions. */
12918 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12920 else if (op2 & 0x20)
12922 /* Data-processing (plain binary immediate) instruction. */
12923 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12927 /* Data-processing (modified immediate). */
12928 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12931 else if (op1 == 0x03)
12933 if (!(op2 & 0x71 ))
12935 /* Store single data item. */
12936 return thumb2_record_str_single_data (thumb2_insn_r);
12938 else if (!((op2 & 0x71) ^ 0x10))
12940 /* Advanced SIMD or structure load/store instructions. */
12941 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12943 else if (!((op2 & 0x67) ^ 0x01))
12945 /* Load byte, memory hints instruction. */
12946 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12948 else if (!((op2 & 0x67) ^ 0x03))
12950 /* Load halfword, memory hints instruction. */
12951 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12953 else if (!((op2 & 0x67) ^ 0x05))
12955 /* Load word instruction. */
12956 return thumb2_record_ld_word (thumb2_insn_r);
12958 else if (!((op2 & 0x70) ^ 0x20))
12960 /* Data-processing (register) instruction. */
12961 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12963 else if (!((op2 & 0x78) ^ 0x30))
12965 /* Multiply, multiply accumulate, abs diff instruction. */
12966 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12968 else if (!((op2 & 0x78) ^ 0x38))
12970 /* Long multiply, long multiply accumulate, and divide. */
12971 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12973 else if (op2 & 0x40)
12975 /* Co-processor instructions. */
12976 return thumb2_record_coproc_insn (thumb2_insn_r);
12984 /* Abstract memory reader. */
12986 class abstract_memory_reader
12989 /* Read LEN bytes of target memory at address MEMADDR, placing the
12990 results in GDB's memory at BUF. Return true on success. */
12992 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12995 /* Instruction reader from real target. */
12997 class instruction_reader : public abstract_memory_reader
13000 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13002 if (target_read_memory (memaddr, buf, len))
13011 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13012 and positive val on fauilure. */
13015 extract_arm_insn (abstract_memory_reader& reader,
13016 insn_decode_record *insn_record, uint32_t insn_size)
13018 gdb_byte buf[insn_size];
13020 memset (&buf[0], 0, insn_size);
13022 if (!reader.read (insn_record->this_addr, buf, insn_size))
13024 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13026 gdbarch_byte_order_for_code (insn_record->gdbarch));
13030 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13032 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13036 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13037 record_type_t record_type, uint32_t insn_size)
13040 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13042 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13044 arm_record_data_proc_misc_ld_str, /* 000. */
13045 arm_record_data_proc_imm, /* 001. */
13046 arm_record_ld_st_imm_offset, /* 010. */
13047 arm_record_ld_st_reg_offset, /* 011. */
13048 arm_record_ld_st_multiple, /* 100. */
13049 arm_record_b_bl, /* 101. */
13050 arm_record_asimd_vfp_coproc, /* 110. */
13051 arm_record_coproc_data_proc /* 111. */
13054 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13056 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13058 thumb_record_shift_add_sub, /* 000. */
13059 thumb_record_add_sub_cmp_mov, /* 001. */
13060 thumb_record_ld_st_reg_offset, /* 010. */
13061 thumb_record_ld_st_imm_offset, /* 011. */
13062 thumb_record_ld_st_stack, /* 100. */
13063 thumb_record_misc, /* 101. */
13064 thumb_record_ldm_stm_swi, /* 110. */
13065 thumb_record_branch /* 111. */
13068 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13069 uint32_t insn_id = 0;
13071 if (extract_arm_insn (reader, arm_record, insn_size))
13075 printf_unfiltered (_("Process record: error reading memory at "
13076 "addr %s len = %d.\n"),
13077 paddress (arm_record->gdbarch,
13078 arm_record->this_addr), insn_size);
13082 else if (ARM_RECORD == record_type)
13084 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13085 insn_id = bits (arm_record->arm_insn, 25, 27);
13087 if (arm_record->cond == 0xf)
13088 ret = arm_record_extension_space (arm_record);
13091 /* If this insn has fallen into extension space
13092 then we need not decode it anymore. */
13093 ret = arm_handle_insn[insn_id] (arm_record);
13095 if (ret != ARM_RECORD_SUCCESS)
13097 arm_record_unsupported_insn (arm_record);
13101 else if (THUMB_RECORD == record_type)
13103 /* As thumb does not have condition codes, we set negative. */
13104 arm_record->cond = -1;
13105 insn_id = bits (arm_record->arm_insn, 13, 15);
13106 ret = thumb_handle_insn[insn_id] (arm_record);
13107 if (ret != ARM_RECORD_SUCCESS)
13109 arm_record_unsupported_insn (arm_record);
13113 else if (THUMB2_RECORD == record_type)
13115 /* As thumb does not have condition codes, we set negative. */
13116 arm_record->cond = -1;
13118 /* Swap first half of 32bit thumb instruction with second half. */
13119 arm_record->arm_insn
13120 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13122 ret = thumb2_record_decode_insn_handler (arm_record);
13124 if (ret != ARM_RECORD_SUCCESS)
13126 arm_record_unsupported_insn (arm_record);
13132 /* Throw assertion. */
13133 gdb_assert_not_reached ("not a valid instruction, could not decode");
13140 namespace selftests {
13142 /* Provide both 16-bit and 32-bit thumb instructions. */
13144 class instruction_reader_thumb : public abstract_memory_reader
13147 template<size_t SIZE>
13148 instruction_reader_thumb (enum bfd_endian endian,
13149 const uint16_t (&insns)[SIZE])
13150 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13153 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13155 SELF_CHECK (len == 4 || len == 2);
13156 SELF_CHECK (memaddr % 2 == 0);
13157 SELF_CHECK ((memaddr / 2) < m_insns_size);
13159 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13162 store_unsigned_integer (&buf[2], 2, m_endian,
13163 m_insns[memaddr / 2 + 1]);
13169 enum bfd_endian m_endian;
13170 const uint16_t *m_insns;
13171 size_t m_insns_size;
13175 arm_record_test (void)
13177 struct gdbarch_info info;
13178 gdbarch_info_init (&info);
13179 info.bfd_arch_info = bfd_scan_arch ("arm");
13181 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13183 SELF_CHECK (gdbarch != NULL);
13185 /* 16-bit Thumb instructions. */
13187 insn_decode_record arm_record;
13189 memset (&arm_record, 0, sizeof (insn_decode_record));
13190 arm_record.gdbarch = gdbarch;
13192 static const uint16_t insns[] = {
13193 /* db b2 uxtb r3, r3 */
13195 /* cd 58 ldr r5, [r1, r3] */
13199 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13200 instruction_reader_thumb reader (endian, insns);
13201 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13202 THUMB_INSN_SIZE_BYTES);
13204 SELF_CHECK (ret == 0);
13205 SELF_CHECK (arm_record.mem_rec_count == 0);
13206 SELF_CHECK (arm_record.reg_rec_count == 1);
13207 SELF_CHECK (arm_record.arm_regs[0] == 3);
13209 arm_record.this_addr += 2;
13210 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13211 THUMB_INSN_SIZE_BYTES);
13213 SELF_CHECK (ret == 0);
13214 SELF_CHECK (arm_record.mem_rec_count == 0);
13215 SELF_CHECK (arm_record.reg_rec_count == 1);
13216 SELF_CHECK (arm_record.arm_regs[0] == 5);
13219 /* 32-bit Thumb-2 instructions. */
13221 insn_decode_record arm_record;
13223 memset (&arm_record, 0, sizeof (insn_decode_record));
13224 arm_record.gdbarch = gdbarch;
13226 static const uint16_t insns[] = {
13227 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13231 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13232 instruction_reader_thumb reader (endian, insns);
13233 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13234 THUMB2_INSN_SIZE_BYTES);
13236 SELF_CHECK (ret == 0);
13237 SELF_CHECK (arm_record.mem_rec_count == 0);
13238 SELF_CHECK (arm_record.reg_rec_count == 1);
13239 SELF_CHECK (arm_record.arm_regs[0] == 7);
13242 } // namespace selftests
13243 #endif /* GDB_SELF_TEST */
13245 /* Cleans up local record registers and memory allocations. */
13248 deallocate_reg_mem (insn_decode_record *record)
13250 xfree (record->arm_regs);
13251 xfree (record->arm_mems);
13255 /* Parse the current instruction and record the values of the registers and
13256 memory that will be changed in current instruction to record_arch_list".
13257 Return -1 if something is wrong. */
13260 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13261 CORE_ADDR insn_addr)
13264 uint32_t no_of_rec = 0;
13265 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13266 ULONGEST t_bit = 0, insn_id = 0;
13268 ULONGEST u_regval = 0;
13270 insn_decode_record arm_record;
13272 memset (&arm_record, 0, sizeof (insn_decode_record));
13273 arm_record.regcache = regcache;
13274 arm_record.this_addr = insn_addr;
13275 arm_record.gdbarch = gdbarch;
13278 if (record_debug > 1)
13280 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13282 paddress (gdbarch, arm_record.this_addr));
13285 instruction_reader reader;
13286 if (extract_arm_insn (reader, &arm_record, 2))
13290 printf_unfiltered (_("Process record: error reading memory at "
13291 "addr %s len = %d.\n"),
13292 paddress (arm_record.gdbarch,
13293 arm_record.this_addr), 2);
13298 /* Check the insn, whether it is thumb or arm one. */
13300 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13301 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13304 if (!(u_regval & t_bit))
13306 /* We are decoding arm insn. */
13307 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13311 insn_id = bits (arm_record.arm_insn, 11, 15);
13312 /* is it thumb2 insn? */
13313 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13315 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13316 THUMB2_INSN_SIZE_BYTES);
13320 /* We are decoding thumb insn. */
13321 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13322 THUMB_INSN_SIZE_BYTES);
13328 /* Record registers. */
13329 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13330 if (arm_record.arm_regs)
13332 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13334 if (record_full_arch_list_add_reg
13335 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13339 /* Record memories. */
13340 if (arm_record.arm_mems)
13342 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13344 if (record_full_arch_list_add_mem
13345 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13346 arm_record.arm_mems[no_of_rec].len))
13351 if (record_full_arch_list_add_end ())
13356 deallocate_reg_mem (&arm_record);
13361 /* See arm-tdep.h. */
13363 const target_desc *
13364 arm_read_description (arm_fp_type fp_type)
13366 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13368 if (tdesc == nullptr)
13370 tdesc = arm_create_target_description (fp_type);
13371 tdesc_arm_list[fp_type] = tdesc;
13377 /* See arm-tdep.h. */
13379 const target_desc *
13380 arm_read_mprofile_description (arm_m_profile_type m_type)
13382 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13384 if (tdesc == nullptr)
13386 tdesc = arm_create_mprofile_target_description (m_type);
13387 tdesc_arm_mprofile_list[m_type] = tdesc;