1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
56 #include "gdb_assert.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
240 static int thumb_insn_size (unsigned short inst1);
242 struct arm_prologue_cache
244 /* The stack pointer at the time this frame was created; i.e. the
245 caller's stack pointer when this function was called. It is used
246 to identify this frame. */
249 /* The frame base for this frame is just prev_sp - frame size.
250 FRAMESIZE is the distance from the frame pointer to the
251 initial stack pointer. */
255 /* The register used to hold the frame pointer for this frame. */
258 /* Saved register offsets. */
259 struct trad_frame_saved_reg *saved_regs;
262 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
263 CORE_ADDR prologue_start,
264 CORE_ADDR prologue_end,
265 struct arm_prologue_cache *cache);
267 /* Architecture version for displaced stepping. This effects the behaviour of
268 certain instructions, and really should not be hard-wired. */
270 #define DISPLACED_STEPPING_ARCH_VERSION 5
272 /* Addresses for calling Thumb functions have the bit 0 set.
273 Here are some macros to test, set, or clear bit 0 of addresses. */
274 #define IS_THUMB_ADDR(addr) ((addr) & 1)
275 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
276 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 if (gdbarch_tdep (gdbarch)->is_m)
293 /* Determine if FRAME is executing in Thumb mode. */
296 arm_frame_is_thumb (struct frame_info *frame)
299 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
301 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
302 directly (from a signal frame or dummy frame) or by interpreting
303 the saved LR (from a prologue or DWARF frame). So consult it and
304 trust the unwinders. */
305 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
307 return (cpsr & t_bit) != 0;
310 /* Callback for VEC_lower_bound. */
313 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
314 const struct arm_mapping_symbol *rhs)
316 return lhs->value < rhs->value;
319 /* Search for the mapping symbol covering MEMADDR. If one is found,
320 return its type. Otherwise, return 0. If START is non-NULL,
321 set *START to the location of the mapping symbol. */
324 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
326 struct obj_section *sec;
328 /* If there are mapping symbols, consult them. */
329 sec = find_pc_section (memaddr);
332 struct arm_per_objfile *data;
333 VEC(arm_mapping_symbol_s) *map;
334 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
338 data = objfile_data (sec->objfile, arm_objfile_data_key);
341 map = data->section_maps[sec->the_bfd_section->index];
342 if (!VEC_empty (arm_mapping_symbol_s, map))
344 struct arm_mapping_symbol *map_sym;
346 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
347 arm_compare_mapping_symbols);
349 /* VEC_lower_bound finds the earliest ordered insertion
350 point. If the following symbol starts at this exact
351 address, we use that; otherwise, the preceding
352 mapping symbol covers this address. */
353 if (idx < VEC_length (arm_mapping_symbol_s, map))
355 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
356 if (map_sym->value == map_key.value)
359 *start = map_sym->value + obj_section_addr (sec);
360 return map_sym->type;
366 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
368 *start = map_sym->value + obj_section_addr (sec);
369 return map_sym->type;
378 /* Determine if the program counter specified in MEMADDR is in a Thumb
379 function. This function should be called for addresses unrelated to
380 any executing frame; otherwise, prefer arm_frame_is_thumb. */
383 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
385 struct bound_minimal_symbol sym;
387 struct displaced_step_closure* dsc
388 = get_displaced_step_closure_by_addr(memaddr);
390 /* If checking the mode of displaced instruction in copy area, the mode
391 should be determined by instruction on the original address. */
395 fprintf_unfiltered (gdb_stdlog,
396 "displaced: check mode of %.8lx instead of %.8lx\n",
397 (unsigned long) dsc->insn_addr,
398 (unsigned long) memaddr);
399 memaddr = dsc->insn_addr;
402 /* If bit 0 of the address is set, assume this is a Thumb address. */
403 if (IS_THUMB_ADDR (memaddr))
406 /* Respect internal mode override if active. */
407 if (arm_override_mode != -1)
408 return arm_override_mode;
410 /* If the user wants to override the symbol table, let him. */
411 if (strcmp (arm_force_mode_string, "arm") == 0)
413 if (strcmp (arm_force_mode_string, "thumb") == 0)
416 /* ARM v6-M and v7-M are always in Thumb mode. */
417 if (gdbarch_tdep (gdbarch)->is_m)
420 /* If there are mapping symbols, consult them. */
421 type = arm_find_mapping_symbol (memaddr, NULL);
425 /* Thumb functions have a "special" bit set in minimal symbols. */
426 sym = lookup_minimal_symbol_by_pc (memaddr);
428 return (MSYMBOL_IS_SPECIAL (sym.minsym));
430 /* If the user wants to override the fallback mode, let them. */
431 if (strcmp (arm_fallback_mode_string, "arm") == 0)
433 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
436 /* If we couldn't find any symbol, but we're talking to a running
437 target, then trust the current value of $cpsr. This lets
438 "display/i $pc" always show the correct mode (though if there is
439 a symbol table we will not reach here, so it still may not be
440 displayed in the mode it will be executed). */
441 if (target_has_registers)
442 return arm_frame_is_thumb (get_current_frame ());
444 /* Otherwise we're out of luck; we assume ARM. */
448 /* Remove useless bits from addresses in a running program. */
450 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
452 /* On M-profile devices, do not strip the low bit from EXC_RETURN
453 (the magic exception return address). */
454 if (gdbarch_tdep (gdbarch)->is_m
455 && (val & 0xfffffff0) == 0xfffffff0)
459 return UNMAKE_THUMB_ADDR (val);
461 return (val & 0x03fffffc);
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct bound_minimal_symbol msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
475 if (msym.minsym != NULL
476 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
477 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
479 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
689 epilogue, 0 otherwise. */
692 thumb_instruction_restores_sp (unsigned short insn)
694 return (insn == 0x46bd /* mov sp, r7 */
695 || (insn & 0xff80) == 0xb000 /* add sp, imm */
696 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
699 /* Analyze a Thumb prologue, looking for a recognizable stack frame
700 and frame pointer. Scan until we encounter a store that could
701 clobber the stack frame unexpectedly, or an unknown instruction.
702 Return the last address which is definitely safe to skip for an
703 initial breakpoint. */
706 thumb_analyze_prologue (struct gdbarch *gdbarch,
707 CORE_ADDR start, CORE_ADDR limit,
708 struct arm_prologue_cache *cache)
710 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
711 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
714 struct pv_area *stack;
715 struct cleanup *back_to;
717 CORE_ADDR unrecognized_pc = 0;
719 for (i = 0; i < 16; i++)
720 regs[i] = pv_register (i, 0);
721 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
722 back_to = make_cleanup_free_pv_area (stack);
724 while (start < limit)
728 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
730 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
735 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
738 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
739 whether to save LR (R14). */
740 mask = (insn & 0xff) | ((insn & 0x100) << 6);
742 /* Calculate offsets of saved R0-R7 and LR. */
743 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
744 if (mask & (1 << regno))
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
751 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
753 offset = (insn & 0x7f) << 2; /* get scaled offset */
754 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
757 else if (thumb_instruction_restores_sp (insn))
759 /* Don't scan past the epilogue. */
762 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
763 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
765 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
767 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
769 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
770 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
771 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
773 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
774 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
775 && pv_is_constant (regs[bits (insn, 3, 5)]))
776 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
777 regs[bits (insn, 6, 8)]);
778 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
779 && pv_is_constant (regs[bits (insn, 3, 6)]))
781 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
782 int rm = bits (insn, 3, 6);
783 regs[rd] = pv_add (regs[rd], regs[rm]);
785 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
787 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
788 int src_reg = (insn & 0x78) >> 3;
789 regs[dst_reg] = regs[src_reg];
791 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
793 /* Handle stores to the stack. Normally pushes are used,
794 but with GCC -mtpcs-frame, there may be other stores
795 in the prologue to create the frame. */
796 int regno = (insn >> 8) & 0x7;
799 offset = (insn & 0xff) << 2;
800 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
802 if (pv_area_store_would_trash (stack, addr))
805 pv_area_store (stack, addr, 4, regs[regno]);
807 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
809 int rd = bits (insn, 0, 2);
810 int rn = bits (insn, 3, 5);
813 offset = bits (insn, 6, 10) << 2;
814 addr = pv_add_constant (regs[rn], offset);
816 if (pv_area_store_would_trash (stack, addr))
819 pv_area_store (stack, addr, 4, regs[rd]);
821 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
822 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
823 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
824 /* Ignore stores of argument registers to the stack. */
826 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
827 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
828 /* Ignore block loads from the stack, potentially copying
829 parameters from memory. */
831 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
832 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
833 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
834 /* Similarly ignore single loads from the stack. */
836 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
837 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
838 /* Skip register copies, i.e. saves to another register
839 instead of the stack. */
841 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
842 /* Recognize constant loads; even with small stacks these are necessary
844 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
845 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
847 /* Constant pool loads, for the same reason. */
848 unsigned int constant;
851 loc = start + 4 + bits (insn, 0, 7) * 4;
852 constant = read_memory_unsigned_integer (loc, 4, byte_order);
853 regs[bits (insn, 8, 10)] = pv_constant (constant);
855 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
857 unsigned short inst2;
859 inst2 = read_memory_unsigned_integer (start + 2, 2,
860 byte_order_for_code);
862 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
864 /* BL, BLX. Allow some special function calls when
865 skipping the prologue; GCC generates these before
866 storing arguments to the stack. */
868 int j1, j2, imm1, imm2;
870 imm1 = sbits (insn, 0, 10);
871 imm2 = bits (inst2, 0, 10);
872 j1 = bit (inst2, 13);
873 j2 = bit (inst2, 11);
875 offset = ((imm1 << 12) + (imm2 << 1));
876 offset ^= ((!j2) << 22) | ((!j1) << 23);
878 nextpc = start + 4 + offset;
879 /* For BLX make sure to clear the low bits. */
880 if (bit (inst2, 12) == 0)
881 nextpc = nextpc & 0xfffffffc;
883 if (!skip_prologue_function (gdbarch, nextpc,
884 bit (inst2, 12) != 0))
888 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
890 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
892 pv_t addr = regs[bits (insn, 0, 3)];
895 if (pv_area_store_would_trash (stack, addr))
898 /* Calculate offsets of saved registers. */
899 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
900 if (inst2 & (1 << regno))
902 addr = pv_add_constant (addr, -4);
903 pv_area_store (stack, addr, 4, regs[regno]);
907 regs[bits (insn, 0, 3)] = addr;
910 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
912 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 int regno1 = bits (inst2, 12, 15);
915 int regno2 = bits (inst2, 8, 11);
916 pv_t addr = regs[bits (insn, 0, 3)];
918 offset = inst2 & 0xff;
920 addr = pv_add_constant (addr, offset);
922 addr = pv_add_constant (addr, -offset);
924 if (pv_area_store_would_trash (stack, addr))
927 pv_area_store (stack, addr, 4, regs[regno1]);
928 pv_area_store (stack, pv_add_constant (addr, 4),
932 regs[bits (insn, 0, 3)] = addr;
935 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
936 && (inst2 & 0x0c00) == 0x0c00
937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 int regno = bits (inst2, 12, 15);
940 pv_t addr = regs[bits (insn, 0, 3)];
942 offset = inst2 & 0xff;
944 addr = pv_add_constant (addr, offset);
946 addr = pv_add_constant (addr, -offset);
948 if (pv_area_store_would_trash (stack, addr))
951 pv_area_store (stack, addr, 4, regs[regno]);
954 regs[bits (insn, 0, 3)] = addr;
957 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
958 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
960 int regno = bits (inst2, 12, 15);
963 offset = inst2 & 0xfff;
964 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
966 if (pv_area_store_would_trash (stack, addr))
969 pv_area_store (stack, addr, 4, regs[regno]);
972 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
973 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
974 /* Ignore stores of argument registers to the stack. */
977 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
978 && (inst2 & 0x0d00) == 0x0c00
979 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
980 /* Ignore stores of argument registers to the stack. */
983 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
985 && (inst2 & 0x8000) == 0x0000
986 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
987 /* Ignore block loads from the stack, potentially copying
988 parameters from memory. */
991 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore dual loads from the stack. */
997 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
998 && (inst2 & 0x0d00) == 0x0c00
999 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1000 /* Similarly ignore single loads from the stack. */
1003 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1004 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1005 /* Similarly ignore single loads from the stack. */
1008 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1009 && (inst2 & 0x8000) == 0x0000)
1011 unsigned int imm = ((bits (insn, 10, 10) << 11)
1012 | (bits (inst2, 12, 14) << 8)
1013 | bits (inst2, 0, 7));
1015 regs[bits (inst2, 8, 11)]
1016 = pv_add_constant (regs[bits (insn, 0, 3)],
1017 thumb_expand_immediate (imm));
1020 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1031 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1032 && (inst2 & 0x8000) == 0x0000)
1034 unsigned int imm = ((bits (insn, 10, 10) << 11)
1035 | (bits (inst2, 12, 14) << 8)
1036 | bits (inst2, 0, 7));
1038 regs[bits (inst2, 8, 11)]
1039 = pv_add_constant (regs[bits (insn, 0, 3)],
1040 - (CORE_ADDR) thumb_expand_immediate (imm));
1043 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1044 && (inst2 & 0x8000) == 0x0000)
1046 unsigned int imm = ((bits (insn, 10, 10) << 11)
1047 | (bits (inst2, 12, 14) << 8)
1048 | bits (inst2, 0, 7));
1050 regs[bits (inst2, 8, 11)]
1051 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1054 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1056 unsigned int imm = ((bits (insn, 10, 10) << 11)
1057 | (bits (inst2, 12, 14) << 8)
1058 | bits (inst2, 0, 7));
1060 regs[bits (inst2, 8, 11)]
1061 = pv_constant (thumb_expand_immediate (imm));
1064 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1067 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1069 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1072 else if (insn == 0xea5f /* mov.w Rd,Rm */
1073 && (inst2 & 0xf0f0) == 0)
1075 int dst_reg = (inst2 & 0x0f00) >> 8;
1076 int src_reg = inst2 & 0xf;
1077 regs[dst_reg] = regs[src_reg];
1080 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1082 /* Constant pool loads. */
1083 unsigned int constant;
1086 offset = bits (inst2, 0, 11);
1088 loc = start + 4 + offset;
1090 loc = start + 4 - offset;
1092 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1093 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1096 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1098 /* Constant pool loads. */
1099 unsigned int constant;
1102 offset = bits (inst2, 0, 7) << 2;
1104 loc = start + 4 + offset;
1106 loc = start + 4 - offset;
1108 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1109 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1111 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1112 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1115 else if (thumb2_instruction_changes_pc (insn, inst2))
1117 /* Don't scan past anything that might change control flow. */
1122 /* The optimizer might shove anything into the prologue,
1123 so we just skip what we don't recognize. */
1124 unrecognized_pc = start;
1129 else if (thumb_instruction_changes_pc (insn))
1131 /* Don't scan past anything that might change control flow. */
1136 /* The optimizer might shove anything into the prologue,
1137 so we just skip what we don't recognize. */
1138 unrecognized_pc = start;
1145 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1146 paddress (gdbarch, start));
1148 if (unrecognized_pc == 0)
1149 unrecognized_pc = start;
1153 do_cleanups (back_to);
1154 return unrecognized_pc;
1157 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1159 /* Frame pointer is fp. Frame size is constant. */
1160 cache->framereg = ARM_FP_REGNUM;
1161 cache->framesize = -regs[ARM_FP_REGNUM].k;
1163 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1165 /* Frame pointer is r7. Frame size is constant. */
1166 cache->framereg = THUMB_FP_REGNUM;
1167 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1171 /* Try the stack pointer... this is a bit desperate. */
1172 cache->framereg = ARM_SP_REGNUM;
1173 cache->framesize = -regs[ARM_SP_REGNUM].k;
1176 for (i = 0; i < 16; i++)
1177 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1178 cache->saved_regs[i].addr = offset;
1180 do_cleanups (back_to);
1181 return unrecognized_pc;
1185 /* Try to analyze the instructions starting from PC, which load symbol
1186 __stack_chk_guard. Return the address of instruction after loading this
1187 symbol, set the dest register number to *BASEREG, and set the size of
1188 instructions for loading symbol in OFFSET. Return 0 if instructions are
1192 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1193 unsigned int *destreg, int *offset)
1195 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1196 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1197 unsigned int low, high, address;
1202 unsigned short insn1
1203 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1205 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1207 *destreg = bits (insn1, 8, 10);
1209 address = bits (insn1, 0, 7);
1211 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1216 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1219 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1221 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1223 /* movt Rd, #const */
1224 if ((insn1 & 0xfbc0) == 0xf2c0)
1226 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 *destreg = bits (insn2, 8, 11);
1229 address = (high << 16 | low);
1236 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1238 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1240 address = bits (insn, 0, 11);
1241 *destreg = bits (insn, 12, 15);
1244 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1246 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1249 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1251 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1253 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1254 *destreg = bits (insn, 12, 15);
1256 address = (high << 16 | low);
1264 /* Try to skip a sequence of instructions used for stack protector. If PC
1265 points to the first instruction of this sequence, return the address of
1266 first instruction after this sequence, otherwise, return original PC.
1268 On arm, this sequence of instructions is composed of mainly three steps,
1269 Step 1: load symbol __stack_chk_guard,
1270 Step 2: load from address of __stack_chk_guard,
1271 Step 3: store it to somewhere else.
1273 Usually, instructions on step 2 and step 3 are the same on various ARM
1274 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1275 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1276 instructions in step 1 vary from different ARM architectures. On ARMv7,
1279 movw Rn, #:lower16:__stack_chk_guard
1280 movt Rn, #:upper16:__stack_chk_guard
1287 .word __stack_chk_guard
1289 Since ldr/str is a very popular instruction, we can't use them as
1290 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1291 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1292 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1295 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1297 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1298 unsigned int basereg;
1299 struct bound_minimal_symbol stack_chk_guard;
1301 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1304 /* Try to parse the instructions in Step 1. */
1305 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1310 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1311 /* If name of symbol doesn't start with '__stack_chk_guard', this
1312 instruction sequence is not for stack protector. If symbol is
1313 removed, we conservatively think this sequence is for stack protector. */
1314 if (stack_chk_guard.minsym
1315 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1316 "__stack_chk_guard",
1317 strlen ("__stack_chk_guard")) != 0)
1322 unsigned int destreg;
1324 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1326 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1327 if ((insn & 0xf800) != 0x6800)
1329 if (bits (insn, 3, 5) != basereg)
1331 destreg = bits (insn, 0, 2);
1333 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1334 byte_order_for_code);
1335 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1336 if ((insn & 0xf800) != 0x6000)
1338 if (destreg != bits (insn, 0, 2))
1343 unsigned int destreg;
1345 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1347 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1348 if ((insn & 0x0e500000) != 0x04100000)
1350 if (bits (insn, 16, 19) != basereg)
1352 destreg = bits (insn, 12, 15);
1353 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1354 insn = read_memory_unsigned_integer (pc + offset + 4,
1355 4, byte_order_for_code);
1356 if ((insn & 0x0e500000) != 0x04000000)
1358 if (bits (insn, 12, 15) != destreg)
1361 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1364 return pc + offset + 4;
1366 return pc + offset + 8;
1369 /* Advance the PC across any function entry prologue instructions to
1370 reach some "real" code.
1372 The APCS (ARM Procedure Call Standard) defines the following
1376 [stmfd sp!, {a1,a2,a3,a4}]
1377 stmfd sp!, {...,fp,ip,lr,pc}
1378 [stfe f7, [sp, #-12]!]
1379 [stfe f6, [sp, #-12]!]
1380 [stfe f5, [sp, #-12]!]
1381 [stfe f4, [sp, #-12]!]
1382 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1385 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1387 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1390 CORE_ADDR func_addr, limit_pc;
1392 /* See if we can determine the end of the prologue via the symbol table.
1393 If so, then return either PC, or the PC after the prologue, whichever
1395 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1397 CORE_ADDR post_prologue_pc
1398 = skip_prologue_using_sal (gdbarch, func_addr);
1399 struct symtab *s = find_pc_symtab (func_addr);
1401 if (post_prologue_pc)
1403 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1406 /* GCC always emits a line note before the prologue and another
1407 one after, even if the two are at the same address or on the
1408 same line. Take advantage of this so that we do not need to
1409 know every instruction that might appear in the prologue. We
1410 will have producer information for most binaries; if it is
1411 missing (e.g. for -gstabs), assuming the GNU tools. */
1412 if (post_prologue_pc
1414 || s->producer == NULL
1415 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1416 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1417 return post_prologue_pc;
1419 if (post_prologue_pc != 0)
1421 CORE_ADDR analyzed_limit;
1423 /* For non-GCC compilers, make sure the entire line is an
1424 acceptable prologue; GDB will round this function's
1425 return value up to the end of the following line so we
1426 can not skip just part of a line (and we do not want to).
1428 RealView does not treat the prologue specially, but does
1429 associate prologue code with the opening brace; so this
1430 lets us skip the first line if we think it is the opening
1432 if (arm_pc_is_thumb (gdbarch, func_addr))
1433 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1434 post_prologue_pc, NULL);
1436 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1437 post_prologue_pc, NULL);
1439 if (analyzed_limit != post_prologue_pc)
1442 return post_prologue_pc;
1446 /* Can't determine prologue from the symbol table, need to examine
1449 /* Find an upper limit on the function prologue using the debug
1450 information. If the debug information could not be used to provide
1451 that bound, then use an arbitrary large number as the upper bound. */
1452 /* Like arm_scan_prologue, stop no later than pc + 64. */
1453 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1455 limit_pc = pc + 64; /* Magic. */
1458 /* Check if this is Thumb code. */
1459 if (arm_pc_is_thumb (gdbarch, pc))
1460 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1462 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1464 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1466 /* "mov ip, sp" is no longer a required part of the prologue. */
1467 if (inst == 0xe1a0c00d) /* mov ip, sp */
1470 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1473 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1476 /* Some prologues begin with "str lr, [sp, #-4]!". */
1477 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1480 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1483 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1486 /* Any insns after this point may float into the code, if it makes
1487 for better instruction scheduling, so we skip them only if we
1488 find them, but still consider the function to be frame-ful. */
1490 /* We may have either one sfmfd instruction here, or several stfe
1491 insns, depending on the version of floating point code we
1493 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1496 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1499 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1502 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1505 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1506 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1507 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1510 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1511 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1512 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1515 /* Un-recognized instruction; stop scanning. */
1519 return skip_pc; /* End of prologue. */
1523 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1524 This function decodes a Thumb function prologue to determine:
1525 1) the size of the stack frame
1526 2) which registers are saved on it
1527 3) the offsets of saved regs
1528 4) the offset from the stack pointer to the frame pointer
1530 A typical Thumb function prologue would create this stack frame
1531 (offsets relative to FP)
1532 old SP -> 24 stack parameters
1535 R7 -> 0 local variables (16 bytes)
1536 SP -> -12 additional stack space (12 bytes)
1537 The frame size would thus be 36 bytes, and the frame offset would be
1538 12 bytes. The frame register is R7.
1540 The comments for thumb_skip_prolog() describe the algorithm we use
1541 to detect the end of the prolog. */
1545 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1546 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1548 CORE_ADDR prologue_start;
1549 CORE_ADDR prologue_end;
1551 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1554 /* See comment in arm_scan_prologue for an explanation of
1556 if (prologue_end > prologue_start + 64)
1558 prologue_end = prologue_start + 64;
1562 /* We're in the boondocks: we have no idea where the start of the
1566 prologue_end = min (prologue_end, prev_pc);
1568 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1571 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1574 arm_instruction_changes_pc (uint32_t this_instr)
1576 if (bits (this_instr, 28, 31) == INST_NV)
1577 /* Unconditional instructions. */
1578 switch (bits (this_instr, 24, 27))
1582 /* Branch with Link and change to Thumb. */
1587 /* Coprocessor register transfer. */
1588 if (bits (this_instr, 12, 15) == 15)
1589 error (_("Invalid update to pc in instruction"));
1595 switch (bits (this_instr, 25, 27))
1598 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1600 /* Multiplies and extra load/stores. */
1601 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1602 /* Neither multiplies nor extension load/stores are allowed
1606 /* Otherwise, miscellaneous instructions. */
1608 /* BX <reg>, BXJ <reg>, BLX <reg> */
1609 if (bits (this_instr, 4, 27) == 0x12fff1
1610 || bits (this_instr, 4, 27) == 0x12fff2
1611 || bits (this_instr, 4, 27) == 0x12fff3)
1614 /* Other miscellaneous instructions are unpredictable if they
1618 /* Data processing instruction. Fall through. */
1621 if (bits (this_instr, 12, 15) == 15)
1628 /* Media instructions and architecturally undefined instructions. */
1629 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1633 if (bit (this_instr, 20) == 0)
1637 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1643 /* Load/store multiple. */
1644 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1650 /* Branch and branch with link. */
1655 /* Coprocessor transfers or SWIs can not affect PC. */
1659 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1663 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1664 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1665 fill it in. Return the first address not recognized as a prologue
1668 We recognize all the instructions typically found in ARM prologues,
1669 plus harmless instructions which can be skipped (either for analysis
1670 purposes, or a more restrictive set that can be skipped when finding
1671 the end of the prologue). */
1674 arm_analyze_prologue (struct gdbarch *gdbarch,
1675 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1676 struct arm_prologue_cache *cache)
1678 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1679 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1681 CORE_ADDR offset, current_pc;
1682 pv_t regs[ARM_FPS_REGNUM];
1683 struct pv_area *stack;
1684 struct cleanup *back_to;
1685 int framereg, framesize;
1686 CORE_ADDR unrecognized_pc = 0;
1688 /* Search the prologue looking for instructions that set up the
1689 frame pointer, adjust the stack pointer, and save registers.
1691 Be careful, however, and if it doesn't look like a prologue,
1692 don't try to scan it. If, for instance, a frameless function
1693 begins with stmfd sp!, then we will tell ourselves there is
1694 a frame, which will confuse stack traceback, as well as "finish"
1695 and other operations that rely on a knowledge of the stack
1698 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1699 regs[regno] = pv_register (regno, 0);
1700 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1701 back_to = make_cleanup_free_pv_area (stack);
1703 for (current_pc = prologue_start;
1704 current_pc < prologue_end;
1708 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1710 if (insn == 0xe1a0c00d) /* mov ip, sp */
1712 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1715 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1716 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1718 unsigned imm = insn & 0xff; /* immediate value */
1719 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1720 int rd = bits (insn, 12, 15);
1721 imm = (imm >> rot) | (imm << (32 - rot));
1722 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1725 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1726 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1728 unsigned imm = insn & 0xff; /* immediate value */
1729 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1730 int rd = bits (insn, 12, 15);
1731 imm = (imm >> rot) | (imm << (32 - rot));
1732 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1735 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1738 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1740 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1741 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1742 regs[bits (insn, 12, 15)]);
1745 else if ((insn & 0xffff0000) == 0xe92d0000)
1746 /* stmfd sp!, {..., fp, ip, lr, pc}
1748 stmfd sp!, {a1, a2, a3, a4} */
1750 int mask = insn & 0xffff;
1752 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1755 /* Calculate offsets of saved registers. */
1756 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1757 if (mask & (1 << regno))
1760 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1761 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1764 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1765 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1766 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1768 /* No need to add this to saved_regs -- it's just an arg reg. */
1771 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1772 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1773 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1775 /* No need to add this to saved_regs -- it's just an arg reg. */
1778 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1780 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1782 /* No need to add this to saved_regs -- it's just arg regs. */
1785 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1787 unsigned imm = insn & 0xff; /* immediate value */
1788 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1789 imm = (imm >> rot) | (imm << (32 - rot));
1790 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1792 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1794 unsigned imm = insn & 0xff; /* immediate value */
1795 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1796 imm = (imm >> rot) | (imm << (32 - rot));
1797 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1799 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1801 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1803 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1806 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1807 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1808 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1810 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1812 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1814 int n_saved_fp_regs;
1815 unsigned int fp_start_reg, fp_bound_reg;
1817 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1820 if ((insn & 0x800) == 0x800) /* N0 is set */
1822 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1823 n_saved_fp_regs = 3;
1825 n_saved_fp_regs = 1;
1829 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1830 n_saved_fp_regs = 2;
1832 n_saved_fp_regs = 4;
1835 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1836 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1837 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1839 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1840 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1841 regs[fp_start_reg++]);
1844 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1846 /* Allow some special function calls when skipping the
1847 prologue; GCC generates these before storing arguments to
1849 CORE_ADDR dest = BranchDest (current_pc, insn);
1851 if (skip_prologue_function (gdbarch, dest, 0))
1856 else if ((insn & 0xf0000000) != 0xe0000000)
1857 break; /* Condition not true, exit early. */
1858 else if (arm_instruction_changes_pc (insn))
1859 /* Don't scan past anything that might change control flow. */
1861 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1862 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1863 /* Ignore block loads from the stack, potentially copying
1864 parameters from memory. */
1866 else if ((insn & 0xfc500000) == 0xe4100000
1867 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1868 /* Similarly ignore single loads from the stack. */
1870 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1871 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1872 register instead of the stack. */
1876 /* The optimizer might shove anything into the prologue,
1877 so we just skip what we don't recognize. */
1878 unrecognized_pc = current_pc;
1883 if (unrecognized_pc == 0)
1884 unrecognized_pc = current_pc;
1886 /* The frame size is just the distance from the frame register
1887 to the original stack pointer. */
1888 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1890 /* Frame pointer is fp. */
1891 framereg = ARM_FP_REGNUM;
1892 framesize = -regs[ARM_FP_REGNUM].k;
1896 /* Try the stack pointer... this is a bit desperate. */
1897 framereg = ARM_SP_REGNUM;
1898 framesize = -regs[ARM_SP_REGNUM].k;
1903 cache->framereg = framereg;
1904 cache->framesize = framesize;
1906 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1907 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1908 cache->saved_regs[regno].addr = offset;
1912 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1913 paddress (gdbarch, unrecognized_pc));
1915 do_cleanups (back_to);
1916 return unrecognized_pc;
1920 arm_scan_prologue (struct frame_info *this_frame,
1921 struct arm_prologue_cache *cache)
1923 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1924 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1926 CORE_ADDR prologue_start, prologue_end, current_pc;
1927 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1928 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1929 pv_t regs[ARM_FPS_REGNUM];
1930 struct pv_area *stack;
1931 struct cleanup *back_to;
1934 /* Assume there is no frame until proven otherwise. */
1935 cache->framereg = ARM_SP_REGNUM;
1936 cache->framesize = 0;
1938 /* Check for Thumb prologue. */
1939 if (arm_frame_is_thumb (this_frame))
1941 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1945 /* Find the function prologue. If we can't find the function in
1946 the symbol table, peek in the stack frame to find the PC. */
1947 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1950 /* One way to find the end of the prologue (which works well
1951 for unoptimized code) is to do the following:
1953 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1956 prologue_end = prev_pc;
1957 else if (sal.end < prologue_end)
1958 prologue_end = sal.end;
1960 This mechanism is very accurate so long as the optimizer
1961 doesn't move any instructions from the function body into the
1962 prologue. If this happens, sal.end will be the last
1963 instruction in the first hunk of prologue code just before
1964 the first instruction that the scheduler has moved from
1965 the body to the prologue.
1967 In order to make sure that we scan all of the prologue
1968 instructions, we use a slightly less accurate mechanism which
1969 may scan more than necessary. To help compensate for this
1970 lack of accuracy, the prologue scanning loop below contains
1971 several clauses which'll cause the loop to terminate early if
1972 an implausible prologue instruction is encountered.
1978 is a suitable endpoint since it accounts for the largest
1979 possible prologue plus up to five instructions inserted by
1982 if (prologue_end > prologue_start + 64)
1984 prologue_end = prologue_start + 64; /* See above. */
1989 /* We have no symbol information. Our only option is to assume this
1990 function has a standard stack frame and the normal frame register.
1991 Then, we can find the value of our frame pointer on entrance to
1992 the callee (or at the present moment if this is the innermost frame).
1993 The value stored there should be the address of the stmfd + 8. */
1994 CORE_ADDR frame_loc;
1995 LONGEST return_value;
1997 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1998 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
2002 prologue_start = gdbarch_addr_bits_remove
2003 (gdbarch, return_value) - 8;
2004 prologue_end = prologue_start + 64; /* See above. */
2008 if (prev_pc < prologue_end)
2009 prologue_end = prev_pc;
2011 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2014 static struct arm_prologue_cache *
2015 arm_make_prologue_cache (struct frame_info *this_frame)
2018 struct arm_prologue_cache *cache;
2019 CORE_ADDR unwound_fp;
2021 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2022 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2024 arm_scan_prologue (this_frame, cache);
2026 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2027 if (unwound_fp == 0)
2030 cache->prev_sp = unwound_fp + cache->framesize;
2032 /* Calculate actual addresses of saved registers using offsets
2033 determined by arm_scan_prologue. */
2034 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2035 if (trad_frame_addr_p (cache->saved_regs, reg))
2036 cache->saved_regs[reg].addr += cache->prev_sp;
2041 /* Our frame ID for a normal frame is the current function's starting PC
2042 and the caller's SP when we were called. */
2045 arm_prologue_this_id (struct frame_info *this_frame,
2047 struct frame_id *this_id)
2049 struct arm_prologue_cache *cache;
2053 if (*this_cache == NULL)
2054 *this_cache = arm_make_prologue_cache (this_frame);
2055 cache = *this_cache;
2057 /* This is meant to halt the backtrace at "_start". */
2058 pc = get_frame_pc (this_frame);
2059 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2062 /* If we've hit a wall, stop. */
2063 if (cache->prev_sp == 0)
2066 /* Use function start address as part of the frame ID. If we cannot
2067 identify the start address (due to missing symbol information),
2068 fall back to just using the current PC. */
2069 func = get_frame_func (this_frame);
2073 id = frame_id_build (cache->prev_sp, func);
2077 static struct value *
2078 arm_prologue_prev_register (struct frame_info *this_frame,
2082 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2083 struct arm_prologue_cache *cache;
2085 if (*this_cache == NULL)
2086 *this_cache = arm_make_prologue_cache (this_frame);
2087 cache = *this_cache;
2089 /* If we are asked to unwind the PC, then we need to return the LR
2090 instead. The prologue may save PC, but it will point into this
2091 frame's prologue, not the next frame's resume location. Also
2092 strip the saved T bit. A valid LR may have the low bit set, but
2093 a valid PC never does. */
2094 if (prev_regnum == ARM_PC_REGNUM)
2098 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2099 return frame_unwind_got_constant (this_frame, prev_regnum,
2100 arm_addr_bits_remove (gdbarch, lr));
2103 /* SP is generally not saved to the stack, but this frame is
2104 identified by the next frame's stack pointer at the time of the call.
2105 The value was already reconstructed into PREV_SP. */
2106 if (prev_regnum == ARM_SP_REGNUM)
2107 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2109 /* The CPSR may have been changed by the call instruction and by the
2110 called function. The only bit we can reconstruct is the T bit,
2111 by checking the low bit of LR as of the call. This is a reliable
2112 indicator of Thumb-ness except for some ARM v4T pre-interworking
2113 Thumb code, which could get away with a clear low bit as long as
2114 the called function did not use bx. Guess that all other
2115 bits are unchanged; the condition flags are presumably lost,
2116 but the processor status is likely valid. */
2117 if (prev_regnum == ARM_PS_REGNUM)
2120 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2122 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2123 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2124 if (IS_THUMB_ADDR (lr))
2128 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2131 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2135 struct frame_unwind arm_prologue_unwind = {
2137 default_frame_unwind_stop_reason,
2138 arm_prologue_this_id,
2139 arm_prologue_prev_register,
2141 default_frame_sniffer
2144 /* Maintain a list of ARM exception table entries per objfile, similar to the
2145 list of mapping symbols. We only cache entries for standard ARM-defined
2146 personality routines; the cache will contain only the frame unwinding
2147 instructions associated with the entry (not the descriptors). */
2149 static const struct objfile_data *arm_exidx_data_key;
2151 struct arm_exidx_entry
2156 typedef struct arm_exidx_entry arm_exidx_entry_s;
2157 DEF_VEC_O(arm_exidx_entry_s);
2159 struct arm_exidx_data
2161 VEC(arm_exidx_entry_s) **section_maps;
2165 arm_exidx_data_free (struct objfile *objfile, void *arg)
2167 struct arm_exidx_data *data = arg;
2170 for (i = 0; i < objfile->obfd->section_count; i++)
2171 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2175 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2176 const struct arm_exidx_entry *rhs)
2178 return lhs->addr < rhs->addr;
2181 static struct obj_section *
2182 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2184 struct obj_section *osect;
2186 ALL_OBJFILE_OSECTIONS (objfile, osect)
2187 if (bfd_get_section_flags (objfile->obfd,
2188 osect->the_bfd_section) & SEC_ALLOC)
2190 bfd_vma start, size;
2191 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2192 size = bfd_get_section_size (osect->the_bfd_section);
2194 if (start <= vma && vma < start + size)
2201 /* Parse contents of exception table and exception index sections
2202 of OBJFILE, and fill in the exception table entry cache.
2204 For each entry that refers to a standard ARM-defined personality
2205 routine, extract the frame unwinding instructions (from either
2206 the index or the table section). The unwinding instructions
2208 - extracting them from the rest of the table data
2209 - converting to host endianness
2210 - appending the implicit 0xb0 ("Finish") code
2212 The extracted and normalized instructions are stored for later
2213 retrieval by the arm_find_exidx_entry routine. */
2216 arm_exidx_new_objfile (struct objfile *objfile)
2218 struct cleanup *cleanups;
2219 struct arm_exidx_data *data;
2220 asection *exidx, *extab;
2221 bfd_vma exidx_vma = 0, extab_vma = 0;
2222 bfd_size_type exidx_size = 0, extab_size = 0;
2223 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2226 /* If we've already touched this file, do nothing. */
2227 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2229 cleanups = make_cleanup (null_cleanup, NULL);
2231 /* Read contents of exception table and index. */
2232 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2235 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2236 exidx_size = bfd_get_section_size (exidx);
2237 exidx_data = xmalloc (exidx_size);
2238 make_cleanup (xfree, exidx_data);
2240 if (!bfd_get_section_contents (objfile->obfd, exidx,
2241 exidx_data, 0, exidx_size))
2243 do_cleanups (cleanups);
2248 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2251 extab_vma = bfd_section_vma (objfile->obfd, extab);
2252 extab_size = bfd_get_section_size (extab);
2253 extab_data = xmalloc (extab_size);
2254 make_cleanup (xfree, extab_data);
2256 if (!bfd_get_section_contents (objfile->obfd, extab,
2257 extab_data, 0, extab_size))
2259 do_cleanups (cleanups);
2264 /* Allocate exception table data structure. */
2265 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2266 set_objfile_data (objfile, arm_exidx_data_key, data);
2267 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2268 objfile->obfd->section_count,
2269 VEC(arm_exidx_entry_s) *);
2271 /* Fill in exception table. */
2272 for (i = 0; i < exidx_size / 8; i++)
2274 struct arm_exidx_entry new_exidx_entry;
2275 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2276 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2277 bfd_vma addr = 0, word = 0;
2278 int n_bytes = 0, n_words = 0;
2279 struct obj_section *sec;
2280 gdb_byte *entry = NULL;
2282 /* Extract address of start of function. */
2283 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2284 idx += exidx_vma + i * 8;
2286 /* Find section containing function and compute section offset. */
2287 sec = arm_obj_section_from_vma (objfile, idx);
2290 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2292 /* Determine address of exception table entry. */
2295 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2297 else if ((val & 0xff000000) == 0x80000000)
2299 /* Exception table entry embedded in .ARM.exidx
2300 -- must be short form. */
2304 else if (!(val & 0x80000000))
2306 /* Exception table entry in .ARM.extab. */
2307 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2308 addr += exidx_vma + i * 8 + 4;
2310 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2312 word = bfd_h_get_32 (objfile->obfd,
2313 extab_data + addr - extab_vma);
2316 if ((word & 0xff000000) == 0x80000000)
2321 else if ((word & 0xff000000) == 0x81000000
2322 || (word & 0xff000000) == 0x82000000)
2326 n_words = ((word >> 16) & 0xff);
2328 else if (!(word & 0x80000000))
2331 struct obj_section *pers_sec;
2332 int gnu_personality = 0;
2334 /* Custom personality routine. */
2335 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2336 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2338 /* Check whether we've got one of the variants of the
2339 GNU personality routines. */
2340 pers_sec = arm_obj_section_from_vma (objfile, pers);
2343 static const char *personality[] =
2345 "__gcc_personality_v0",
2346 "__gxx_personality_v0",
2347 "__gcj_personality_v0",
2348 "__gnu_objc_personality_v0",
2352 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2355 for (k = 0; personality[k]; k++)
2356 if (lookup_minimal_symbol_by_pc_name
2357 (pc, personality[k], objfile))
2359 gnu_personality = 1;
2364 /* If so, the next word contains a word count in the high
2365 byte, followed by the same unwind instructions as the
2366 pre-defined forms. */
2368 && addr + 4 <= extab_vma + extab_size)
2370 word = bfd_h_get_32 (objfile->obfd,
2371 extab_data + addr - extab_vma);
2374 n_words = ((word >> 24) & 0xff);
2380 /* Sanity check address. */
2382 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2383 n_words = n_bytes = 0;
2385 /* The unwind instructions reside in WORD (only the N_BYTES least
2386 significant bytes are valid), followed by N_WORDS words in the
2387 extab section starting at ADDR. */
2388 if (n_bytes || n_words)
2390 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2391 n_bytes + n_words * 4 + 1);
2394 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2398 word = bfd_h_get_32 (objfile->obfd,
2399 extab_data + addr - extab_vma);
2402 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2403 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2404 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2405 *p++ = (gdb_byte) (word & 0xff);
2408 /* Implied "Finish" to terminate the list. */
2412 /* Push entry onto vector. They are guaranteed to always
2413 appear in order of increasing addresses. */
2414 new_exidx_entry.addr = idx;
2415 new_exidx_entry.entry = entry;
2416 VEC_safe_push (arm_exidx_entry_s,
2417 data->section_maps[sec->the_bfd_section->index],
2421 do_cleanups (cleanups);
2424 /* Search for the exception table entry covering MEMADDR. If one is found,
2425 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2426 set *START to the start of the region covered by this entry. */
2429 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2431 struct obj_section *sec;
2433 sec = find_pc_section (memaddr);
2436 struct arm_exidx_data *data;
2437 VEC(arm_exidx_entry_s) *map;
2438 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2441 data = objfile_data (sec->objfile, arm_exidx_data_key);
2444 map = data->section_maps[sec->the_bfd_section->index];
2445 if (!VEC_empty (arm_exidx_entry_s, map))
2447 struct arm_exidx_entry *map_sym;
2449 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2450 arm_compare_exidx_entries);
2452 /* VEC_lower_bound finds the earliest ordered insertion
2453 point. If the following symbol starts at this exact
2454 address, we use that; otherwise, the preceding
2455 exception table entry covers this address. */
2456 if (idx < VEC_length (arm_exidx_entry_s, map))
2458 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2459 if (map_sym->addr == map_key.addr)
2462 *start = map_sym->addr + obj_section_addr (sec);
2463 return map_sym->entry;
2469 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2471 *start = map_sym->addr + obj_section_addr (sec);
2472 return map_sym->entry;
2481 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2482 instruction list from the ARM exception table entry ENTRY, allocate and
2483 return a prologue cache structure describing how to unwind this frame.
2485 Return NULL if the unwinding instruction list contains a "spare",
2486 "reserved" or "refuse to unwind" instruction as defined in section
2487 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2488 for the ARM Architecture" document. */
2490 static struct arm_prologue_cache *
2491 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2496 struct arm_prologue_cache *cache;
2497 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2498 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2504 /* Whenever we reload SP, we actually have to retrieve its
2505 actual value in the current frame. */
2508 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2510 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2511 vsp = get_frame_register_unsigned (this_frame, reg);
2515 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2516 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2522 /* Decode next unwind instruction. */
2525 if ((insn & 0xc0) == 0)
2527 int offset = insn & 0x3f;
2528 vsp += (offset << 2) + 4;
2530 else if ((insn & 0xc0) == 0x40)
2532 int offset = insn & 0x3f;
2533 vsp -= (offset << 2) + 4;
2535 else if ((insn & 0xf0) == 0x80)
2537 int mask = ((insn & 0xf) << 8) | *entry++;
2540 /* The special case of an all-zero mask identifies
2541 "Refuse to unwind". We return NULL to fall back
2542 to the prologue analyzer. */
2546 /* Pop registers r4..r15 under mask. */
2547 for (i = 0; i < 12; i++)
2548 if (mask & (1 << i))
2550 cache->saved_regs[4 + i].addr = vsp;
2554 /* Special-case popping SP -- we need to reload vsp. */
2555 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2558 else if ((insn & 0xf0) == 0x90)
2560 int reg = insn & 0xf;
2562 /* Reserved cases. */
2563 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2566 /* Set SP from another register and mark VSP for reload. */
2567 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2570 else if ((insn & 0xf0) == 0xa0)
2572 int count = insn & 0x7;
2573 int pop_lr = (insn & 0x8) != 0;
2576 /* Pop r4..r[4+count]. */
2577 for (i = 0; i <= count; i++)
2579 cache->saved_regs[4 + i].addr = vsp;
2583 /* If indicated by flag, pop LR as well. */
2586 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2590 else if (insn == 0xb0)
2592 /* We could only have updated PC by popping into it; if so, it
2593 will show up as address. Otherwise, copy LR into PC. */
2594 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2595 cache->saved_regs[ARM_PC_REGNUM]
2596 = cache->saved_regs[ARM_LR_REGNUM];
2601 else if (insn == 0xb1)
2603 int mask = *entry++;
2606 /* All-zero mask and mask >= 16 is "spare". */
2607 if (mask == 0 || mask >= 16)
2610 /* Pop r0..r3 under mask. */
2611 for (i = 0; i < 4; i++)
2612 if (mask & (1 << i))
2614 cache->saved_regs[i].addr = vsp;
2618 else if (insn == 0xb2)
2620 ULONGEST offset = 0;
2625 offset |= (*entry & 0x7f) << shift;
2628 while (*entry++ & 0x80);
2630 vsp += 0x204 + (offset << 2);
2632 else if (insn == 0xb3)
2634 int start = *entry >> 4;
2635 int count = (*entry++) & 0xf;
2638 /* Only registers D0..D15 are valid here. */
2639 if (start + count >= 16)
2642 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2643 for (i = 0; i <= count; i++)
2645 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2649 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2652 else if ((insn & 0xf8) == 0xb8)
2654 int count = insn & 0x7;
2657 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2658 for (i = 0; i <= count; i++)
2660 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2664 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2667 else if (insn == 0xc6)
2669 int start = *entry >> 4;
2670 int count = (*entry++) & 0xf;
2673 /* Only registers WR0..WR15 are valid. */
2674 if (start + count >= 16)
2677 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2678 for (i = 0; i <= count; i++)
2680 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2684 else if (insn == 0xc7)
2686 int mask = *entry++;
2689 /* All-zero mask and mask >= 16 is "spare". */
2690 if (mask == 0 || mask >= 16)
2693 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2694 for (i = 0; i < 4; i++)
2695 if (mask & (1 << i))
2697 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2701 else if ((insn & 0xf8) == 0xc0)
2703 int count = insn & 0x7;
2706 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2707 for (i = 0; i <= count; i++)
2709 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2713 else if (insn == 0xc8)
2715 int start = *entry >> 4;
2716 int count = (*entry++) & 0xf;
2719 /* Only registers D0..D31 are valid. */
2720 if (start + count >= 16)
2723 /* Pop VFP double-precision registers
2724 D[16+start]..D[16+start+count]. */
2725 for (i = 0; i <= count; i++)
2727 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2731 else if (insn == 0xc9)
2733 int start = *entry >> 4;
2734 int count = (*entry++) & 0xf;
2737 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2738 for (i = 0; i <= count; i++)
2740 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2744 else if ((insn & 0xf8) == 0xd0)
2746 int count = insn & 0x7;
2749 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2750 for (i = 0; i <= count; i++)
2752 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2758 /* Everything else is "spare". */
2763 /* If we restore SP from a register, assume this was the frame register.
2764 Otherwise just fall back to SP as frame register. */
2765 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2766 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2768 cache->framereg = ARM_SP_REGNUM;
2770 /* Determine offset to previous frame. */
2772 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2774 /* We already got the previous SP. */
2775 cache->prev_sp = vsp;
2780 /* Unwinding via ARM exception table entries. Note that the sniffer
2781 already computes a filled-in prologue cache, which is then used
2782 with the same arm_prologue_this_id and arm_prologue_prev_register
2783 routines also used for prologue-parsing based unwinding. */
2786 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2787 struct frame_info *this_frame,
2788 void **this_prologue_cache)
2790 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2791 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2792 CORE_ADDR addr_in_block, exidx_region, func_start;
2793 struct arm_prologue_cache *cache;
2796 /* See if we have an ARM exception table entry covering this address. */
2797 addr_in_block = get_frame_address_in_block (this_frame);
2798 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2802 /* The ARM exception table does not describe unwind information
2803 for arbitrary PC values, but is guaranteed to be correct only
2804 at call sites. We have to decide here whether we want to use
2805 ARM exception table information for this frame, or fall back
2806 to using prologue parsing. (Note that if we have DWARF CFI,
2807 this sniffer isn't even called -- CFI is always preferred.)
2809 Before we make this decision, however, we check whether we
2810 actually have *symbol* information for the current frame.
2811 If not, prologue parsing would not work anyway, so we might
2812 as well use the exception table and hope for the best. */
2813 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2817 /* If the next frame is "normal", we are at a call site in this
2818 frame, so exception information is guaranteed to be valid. */
2819 if (get_next_frame (this_frame)
2820 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2823 /* We also assume exception information is valid if we're currently
2824 blocked in a system call. The system library is supposed to
2825 ensure this, so that e.g. pthread cancellation works. */
2826 if (arm_frame_is_thumb (this_frame))
2830 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2831 byte_order_for_code, &insn)
2832 && (insn & 0xff00) == 0xdf00 /* svc */)
2839 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2840 byte_order_for_code, &insn)
2841 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2845 /* Bail out if we don't know that exception information is valid. */
2849 /* The ARM exception index does not mark the *end* of the region
2850 covered by the entry, and some functions will not have any entry.
2851 To correctly recognize the end of the covered region, the linker
2852 should have inserted dummy records with a CANTUNWIND marker.
2854 Unfortunately, current versions of GNU ld do not reliably do
2855 this, and thus we may have found an incorrect entry above.
2856 As a (temporary) sanity check, we only use the entry if it
2857 lies *within* the bounds of the function. Note that this check
2858 might reject perfectly valid entries that just happen to cover
2859 multiple functions; therefore this check ought to be removed
2860 once the linker is fixed. */
2861 if (func_start > exidx_region)
2865 /* Decode the list of unwinding instructions into a prologue cache.
2866 Note that this may fail due to e.g. a "refuse to unwind" code. */
2867 cache = arm_exidx_fill_cache (this_frame, entry);
2871 *this_prologue_cache = cache;
2875 struct frame_unwind arm_exidx_unwind = {
2877 default_frame_unwind_stop_reason,
2878 arm_prologue_this_id,
2879 arm_prologue_prev_register,
2881 arm_exidx_unwind_sniffer
2884 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2885 trampoline, return the target PC. Otherwise return 0.
2887 void call0a (char c, short s, int i, long l) {}
2891 (*pointer_to_call0a) (c, s, i, l);
2894 Instead of calling a stub library function _call_via_xx (xx is
2895 the register name), GCC may inline the trampoline in the object
2896 file as below (register r2 has the address of call0a).
2899 .type main, %function
2908 The trampoline 'bx r2' doesn't belong to main. */
2911 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2913 /* The heuristics of recognizing such trampoline is that FRAME is
2914 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2915 if (arm_frame_is_thumb (frame))
2919 if (target_read_memory (pc, buf, 2) == 0)
2921 struct gdbarch *gdbarch = get_frame_arch (frame);
2922 enum bfd_endian byte_order_for_code
2923 = gdbarch_byte_order_for_code (gdbarch);
2925 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2927 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2930 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2932 /* Clear the LSB so that gdb core sets step-resume
2933 breakpoint at the right address. */
2934 return UNMAKE_THUMB_ADDR (dest);
2942 static struct arm_prologue_cache *
2943 arm_make_stub_cache (struct frame_info *this_frame)
2945 struct arm_prologue_cache *cache;
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2950 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2955 /* Our frame ID for a stub frame is the current SP and LR. */
2958 arm_stub_this_id (struct frame_info *this_frame,
2960 struct frame_id *this_id)
2962 struct arm_prologue_cache *cache;
2964 if (*this_cache == NULL)
2965 *this_cache = arm_make_stub_cache (this_frame);
2966 cache = *this_cache;
2968 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2972 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2973 struct frame_info *this_frame,
2974 void **this_prologue_cache)
2976 CORE_ADDR addr_in_block;
2978 CORE_ADDR pc, start_addr;
2981 addr_in_block = get_frame_address_in_block (this_frame);
2982 pc = get_frame_pc (this_frame);
2983 if (in_plt_section (addr_in_block)
2984 /* We also use the stub winder if the target memory is unreadable
2985 to avoid having the prologue unwinder trying to read it. */
2986 || target_read_memory (pc, dummy, 4) != 0)
2989 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2990 && arm_skip_bx_reg (this_frame, pc) != 0)
2996 struct frame_unwind arm_stub_unwind = {
2998 default_frame_unwind_stop_reason,
3000 arm_prologue_prev_register,
3002 arm_stub_unwind_sniffer
3005 /* Put here the code to store, into CACHE->saved_regs, the addresses
3006 of the saved registers of frame described by THIS_FRAME. CACHE is
3009 static struct arm_prologue_cache *
3010 arm_m_exception_cache (struct frame_info *this_frame)
3012 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3013 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3014 struct arm_prologue_cache *cache;
3015 CORE_ADDR unwound_sp;
3018 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3019 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3021 unwound_sp = get_frame_register_unsigned (this_frame,
3024 /* The hardware saves eight 32-bit words, comprising xPSR,
3025 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3026 "B1.5.6 Exception entry behavior" in
3027 "ARMv7-M Architecture Reference Manual". */
3028 cache->saved_regs[0].addr = unwound_sp;
3029 cache->saved_regs[1].addr = unwound_sp + 4;
3030 cache->saved_regs[2].addr = unwound_sp + 8;
3031 cache->saved_regs[3].addr = unwound_sp + 12;
3032 cache->saved_regs[12].addr = unwound_sp + 16;
3033 cache->saved_regs[14].addr = unwound_sp + 20;
3034 cache->saved_regs[15].addr = unwound_sp + 24;
3035 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3037 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3038 aligner between the top of the 32-byte stack frame and the
3039 previous context's stack pointer. */
3040 cache->prev_sp = unwound_sp + 32;
3041 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3042 && (xpsr & (1 << 9)) != 0)
3043 cache->prev_sp += 4;
3048 /* Implementation of function hook 'this_id' in
3049 'struct frame_uwnind'. */
3052 arm_m_exception_this_id (struct frame_info *this_frame,
3054 struct frame_id *this_id)
3056 struct arm_prologue_cache *cache;
3058 if (*this_cache == NULL)
3059 *this_cache = arm_m_exception_cache (this_frame);
3060 cache = *this_cache;
3062 /* Our frame ID for a stub frame is the current SP and LR. */
3063 *this_id = frame_id_build (cache->prev_sp,
3064 get_frame_pc (this_frame));
3067 /* Implementation of function hook 'prev_register' in
3068 'struct frame_uwnind'. */
3070 static struct value *
3071 arm_m_exception_prev_register (struct frame_info *this_frame,
3075 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3076 struct arm_prologue_cache *cache;
3078 if (*this_cache == NULL)
3079 *this_cache = arm_m_exception_cache (this_frame);
3080 cache = *this_cache;
3082 /* The value was already reconstructed into PREV_SP. */
3083 if (prev_regnum == ARM_SP_REGNUM)
3084 return frame_unwind_got_constant (this_frame, prev_regnum,
3087 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3091 /* Implementation of function hook 'sniffer' in
3092 'struct frame_uwnind'. */
3095 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3096 struct frame_info *this_frame,
3097 void **this_prologue_cache)
3099 CORE_ADDR this_pc = get_frame_pc (this_frame);
3101 /* No need to check is_m; this sniffer is only registered for
3102 M-profile architectures. */
3104 /* Exception frames return to one of these magic PCs. Other values
3105 are not defined as of v7-M. See details in "B1.5.8 Exception
3106 return behavior" in "ARMv7-M Architecture Reference Manual". */
3107 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3108 || this_pc == 0xfffffffd)
3114 /* Frame unwinder for M-profile exceptions. */
3116 struct frame_unwind arm_m_exception_unwind =
3119 default_frame_unwind_stop_reason,
3120 arm_m_exception_this_id,
3121 arm_m_exception_prev_register,
3123 arm_m_exception_unwind_sniffer
3127 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3129 struct arm_prologue_cache *cache;
3131 if (*this_cache == NULL)
3132 *this_cache = arm_make_prologue_cache (this_frame);
3133 cache = *this_cache;
3135 return cache->prev_sp - cache->framesize;
3138 struct frame_base arm_normal_base = {
3139 &arm_prologue_unwind,
3140 arm_normal_frame_base,
3141 arm_normal_frame_base,
3142 arm_normal_frame_base
3145 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3146 dummy frame. The frame ID's base needs to match the TOS value
3147 saved by save_dummy_frame_tos() and returned from
3148 arm_push_dummy_call, and the PC needs to match the dummy frame's
3151 static struct frame_id
3152 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3154 return frame_id_build (get_frame_register_unsigned (this_frame,
3156 get_frame_pc (this_frame));
3159 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3160 be used to construct the previous frame's ID, after looking up the
3161 containing function). */
3164 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3167 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3168 return arm_addr_bits_remove (gdbarch, pc);
3172 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3174 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3177 static struct value *
3178 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3181 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3183 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3188 /* The PC is normally copied from the return column, which
3189 describes saves of LR. However, that version may have an
3190 extra bit set to indicate Thumb state. The bit is not
3192 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3193 return frame_unwind_got_constant (this_frame, regnum,
3194 arm_addr_bits_remove (gdbarch, lr));
3197 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3198 cpsr = get_frame_register_unsigned (this_frame, regnum);
3199 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3200 if (IS_THUMB_ADDR (lr))
3204 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3207 internal_error (__FILE__, __LINE__,
3208 _("Unexpected register %d"), regnum);
3213 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3214 struct dwarf2_frame_state_reg *reg,
3215 struct frame_info *this_frame)
3221 reg->how = DWARF2_FRAME_REG_FN;
3222 reg->loc.fn = arm_dwarf2_prev_register;
3225 reg->how = DWARF2_FRAME_REG_CFA;
3230 /* Return true if we are in the function's epilogue, i.e. after the
3231 instruction that destroyed the function's stack frame. */
3234 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3236 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3237 unsigned int insn, insn2;
3238 int found_return = 0, found_stack_adjust = 0;
3239 CORE_ADDR func_start, func_end;
3243 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3246 /* The epilogue is a sequence of instructions along the following lines:
3248 - add stack frame size to SP or FP
3249 - [if frame pointer used] restore SP from FP
3250 - restore registers from SP [may include PC]
3251 - a return-type instruction [if PC wasn't already restored]
3253 In a first pass, we scan forward from the current PC and verify the
3254 instructions we find as compatible with this sequence, ending in a
3257 However, this is not sufficient to distinguish indirect function calls
3258 within a function from indirect tail calls in the epilogue in some cases.
3259 Therefore, if we didn't already find any SP-changing instruction during
3260 forward scan, we add a backward scanning heuristic to ensure we actually
3261 are in the epilogue. */
3264 while (scan_pc < func_end && !found_return)
3266 if (target_read_memory (scan_pc, buf, 2))
3270 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3272 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3274 else if (insn == 0x46f7) /* mov pc, lr */
3276 else if (thumb_instruction_restores_sp (insn))
3278 found_stack_adjust = 1;
3279 if ((insn & 0xfe00) == 0xbd00) /* pop <registers, PC> */
3282 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3284 if (target_read_memory (scan_pc, buf, 2))
3288 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3290 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3292 found_stack_adjust = 1;
3293 if (insn2 & 0x8000) /* <registers> include PC. */
3296 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3297 && (insn2 & 0x0fff) == 0x0b04)
3299 found_stack_adjust = 1;
3300 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3303 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3304 && (insn2 & 0x0e00) == 0x0a00)
3305 found_stack_adjust = 1;
3316 /* Since any instruction in the epilogue sequence, with the possible
3317 exception of return itself, updates the stack pointer, we need to
3318 scan backwards for at most one instruction. Try either a 16-bit or
3319 a 32-bit instruction. This is just a heuristic, so we do not worry
3320 too much about false positives. */
3322 if (!found_stack_adjust)
3324 if (pc - 4 < func_start)
3326 if (target_read_memory (pc - 4, buf, 4))
3329 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3330 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3332 if (thumb_instruction_restores_sp (insn2))
3333 found_stack_adjust = 1;
3334 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3335 found_stack_adjust = 1;
3336 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3337 && (insn2 & 0x0fff) == 0x0b04)
3338 found_stack_adjust = 1;
3339 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3340 && (insn2 & 0x0e00) == 0x0a00)
3341 found_stack_adjust = 1;
3344 return found_stack_adjust;
3347 /* Return true if we are in the function's epilogue, i.e. after the
3348 instruction that destroyed the function's stack frame. */
3351 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3353 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3355 int found_return, found_stack_adjust;
3356 CORE_ADDR func_start, func_end;
3358 if (arm_pc_is_thumb (gdbarch, pc))
3359 return thumb_in_function_epilogue_p (gdbarch, pc);
3361 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3364 /* We are in the epilogue if the previous instruction was a stack
3365 adjustment and the next instruction is a possible return (bx, mov
3366 pc, or pop). We could have to scan backwards to find the stack
3367 adjustment, or forwards to find the return, but this is a decent
3368 approximation. First scan forwards. */
3371 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3372 if (bits (insn, 28, 31) != INST_NV)
3374 if ((insn & 0x0ffffff0) == 0x012fff10)
3377 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3380 else if ((insn & 0x0fff0000) == 0x08bd0000
3381 && (insn & 0x0000c000) != 0)
3382 /* POP (LDMIA), including PC or LR. */
3389 /* Scan backwards. This is just a heuristic, so do not worry about
3390 false positives from mode changes. */
3392 if (pc < func_start + 4)
3395 found_stack_adjust = 0;
3396 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3397 if (bits (insn, 28, 31) != INST_NV)
3399 if ((insn & 0x0df0f000) == 0x0080d000)
3400 /* ADD SP (register or immediate). */
3401 found_stack_adjust = 1;
3402 else if ((insn & 0x0df0f000) == 0x0040d000)
3403 /* SUB SP (register or immediate). */
3404 found_stack_adjust = 1;
3405 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3407 found_stack_adjust = 1;
3408 else if ((insn & 0x0fff0000) == 0x08bd0000)
3410 found_stack_adjust = 1;
3411 else if ((insn & 0x0fff0000) == 0x049d0000)
3412 /* POP of a single register. */
3413 found_stack_adjust = 1;
3416 if (found_stack_adjust)
3423 /* When arguments must be pushed onto the stack, they go on in reverse
3424 order. The code below implements a FILO (stack) to do this. */
3429 struct stack_item *prev;
3433 static struct stack_item *
3434 push_stack_item (struct stack_item *prev, const void *contents, int len)
3436 struct stack_item *si;
3437 si = xmalloc (sizeof (struct stack_item));
3438 si->data = xmalloc (len);
3441 memcpy (si->data, contents, len);
3445 static struct stack_item *
3446 pop_stack_item (struct stack_item *si)
3448 struct stack_item *dead = si;
3456 /* Return the alignment (in bytes) of the given type. */
3459 arm_type_align (struct type *t)
3465 t = check_typedef (t);
3466 switch (TYPE_CODE (t))
3469 /* Should never happen. */
3470 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3474 case TYPE_CODE_ENUM:
3478 case TYPE_CODE_RANGE:
3480 case TYPE_CODE_CHAR:
3481 case TYPE_CODE_BOOL:
3482 return TYPE_LENGTH (t);
3484 case TYPE_CODE_ARRAY:
3485 case TYPE_CODE_COMPLEX:
3486 /* TODO: What about vector types? */
3487 return arm_type_align (TYPE_TARGET_TYPE (t));
3489 case TYPE_CODE_STRUCT:
3490 case TYPE_CODE_UNION:
3492 for (n = 0; n < TYPE_NFIELDS (t); n++)
3494 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3502 /* Possible base types for a candidate for passing and returning in
3505 enum arm_vfp_cprc_base_type
3514 /* The length of one element of base type B. */
3517 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3521 case VFP_CPRC_SINGLE:
3523 case VFP_CPRC_DOUBLE:
3525 case VFP_CPRC_VEC64:
3527 case VFP_CPRC_VEC128:
3530 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3535 /* The character ('s', 'd' or 'q') for the type of VFP register used
3536 for passing base type B. */
3539 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3543 case VFP_CPRC_SINGLE:
3545 case VFP_CPRC_DOUBLE:
3547 case VFP_CPRC_VEC64:
3549 case VFP_CPRC_VEC128:
3552 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3557 /* Determine whether T may be part of a candidate for passing and
3558 returning in VFP registers, ignoring the limit on the total number
3559 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3560 classification of the first valid component found; if it is not
3561 VFP_CPRC_UNKNOWN, all components must have the same classification
3562 as *BASE_TYPE. If it is found that T contains a type not permitted
3563 for passing and returning in VFP registers, a type differently
3564 classified from *BASE_TYPE, or two types differently classified
3565 from each other, return -1, otherwise return the total number of
3566 base-type elements found (possibly 0 in an empty structure or
3567 array). Vectors and complex types are not currently supported,
3568 matching the generic AAPCS support. */
3571 arm_vfp_cprc_sub_candidate (struct type *t,
3572 enum arm_vfp_cprc_base_type *base_type)
3574 t = check_typedef (t);
3575 switch (TYPE_CODE (t))
3578 switch (TYPE_LENGTH (t))
3581 if (*base_type == VFP_CPRC_UNKNOWN)
3582 *base_type = VFP_CPRC_SINGLE;
3583 else if (*base_type != VFP_CPRC_SINGLE)
3588 if (*base_type == VFP_CPRC_UNKNOWN)
3589 *base_type = VFP_CPRC_DOUBLE;
3590 else if (*base_type != VFP_CPRC_DOUBLE)
3599 case TYPE_CODE_ARRAY:
3603 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3606 if (TYPE_LENGTH (t) == 0)
3608 gdb_assert (count == 0);
3611 else if (count == 0)
3613 unitlen = arm_vfp_cprc_unit_length (*base_type);
3614 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3615 return TYPE_LENGTH (t) / unitlen;
3619 case TYPE_CODE_STRUCT:
3624 for (i = 0; i < TYPE_NFIELDS (t); i++)
3626 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3628 if (sub_count == -1)
3632 if (TYPE_LENGTH (t) == 0)
3634 gdb_assert (count == 0);
3637 else if (count == 0)
3639 unitlen = arm_vfp_cprc_unit_length (*base_type);
3640 if (TYPE_LENGTH (t) != unitlen * count)
3645 case TYPE_CODE_UNION:
3650 for (i = 0; i < TYPE_NFIELDS (t); i++)
3652 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3654 if (sub_count == -1)
3656 count = (count > sub_count ? count : sub_count);
3658 if (TYPE_LENGTH (t) == 0)
3660 gdb_assert (count == 0);
3663 else if (count == 0)
3665 unitlen = arm_vfp_cprc_unit_length (*base_type);
3666 if (TYPE_LENGTH (t) != unitlen * count)
3678 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3679 if passed to or returned from a non-variadic function with the VFP
3680 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3681 *BASE_TYPE to the base type for T and *COUNT to the number of
3682 elements of that base type before returning. */
3685 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3688 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3689 int c = arm_vfp_cprc_sub_candidate (t, &b);
3690 if (c <= 0 || c > 4)
3697 /* Return 1 if the VFP ABI should be used for passing arguments to and
3698 returning values from a function of type FUNC_TYPE, 0
3702 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3704 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3705 /* Variadic functions always use the base ABI. Assume that functions
3706 without debug info are not variadic. */
3707 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3709 /* The VFP ABI is only supported as a variant of AAPCS. */
3710 if (tdep->arm_abi != ARM_ABI_AAPCS)
3712 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3715 /* We currently only support passing parameters in integer registers, which
3716 conforms with GCC's default model, and VFP argument passing following
3717 the VFP variant of AAPCS. Several other variants exist and
3718 we should probably support some of them based on the selected ABI. */
3721 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3722 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3723 struct value **args, CORE_ADDR sp, int struct_return,
3724 CORE_ADDR struct_addr)
3726 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3730 struct stack_item *si = NULL;
3733 unsigned vfp_regs_free = (1 << 16) - 1;
3735 /* Determine the type of this function and whether the VFP ABI
3737 ftype = check_typedef (value_type (function));
3738 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3739 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3740 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3742 /* Set the return address. For the ARM, the return breakpoint is
3743 always at BP_ADDR. */
3744 if (arm_pc_is_thumb (gdbarch, bp_addr))
3746 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3748 /* Walk through the list of args and determine how large a temporary
3749 stack is required. Need to take care here as structs may be
3750 passed on the stack, and we have to push them. */
3753 argreg = ARM_A1_REGNUM;
3756 /* The struct_return pointer occupies the first parameter
3757 passing register. */
3761 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3762 gdbarch_register_name (gdbarch, argreg),
3763 paddress (gdbarch, struct_addr));
3764 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3768 for (argnum = 0; argnum < nargs; argnum++)
3771 struct type *arg_type;
3772 struct type *target_type;
3773 enum type_code typecode;
3774 const bfd_byte *val;
3776 enum arm_vfp_cprc_base_type vfp_base_type;
3778 int may_use_core_reg = 1;
3780 arg_type = check_typedef (value_type (args[argnum]));
3781 len = TYPE_LENGTH (arg_type);
3782 target_type = TYPE_TARGET_TYPE (arg_type);
3783 typecode = TYPE_CODE (arg_type);
3784 val = value_contents (args[argnum]);
3786 align = arm_type_align (arg_type);
3787 /* Round alignment up to a whole number of words. */
3788 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3789 /* Different ABIs have different maximum alignments. */
3790 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3792 /* The APCS ABI only requires word alignment. */
3793 align = INT_REGISTER_SIZE;
3797 /* The AAPCS requires at most doubleword alignment. */
3798 if (align > INT_REGISTER_SIZE * 2)
3799 align = INT_REGISTER_SIZE * 2;
3803 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3811 /* Because this is a CPRC it cannot go in a core register or
3812 cause a core register to be skipped for alignment.
3813 Either it goes in VFP registers and the rest of this loop
3814 iteration is skipped for this argument, or it goes on the
3815 stack (and the stack alignment code is correct for this
3817 may_use_core_reg = 0;
3819 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3820 shift = unit_length / 4;
3821 mask = (1 << (shift * vfp_base_count)) - 1;
3822 for (regno = 0; regno < 16; regno += shift)
3823 if (((vfp_regs_free >> regno) & mask) == mask)
3832 vfp_regs_free &= ~(mask << regno);
3833 reg_scaled = regno / shift;
3834 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3835 for (i = 0; i < vfp_base_count; i++)
3839 if (reg_char == 'q')
3840 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3841 val + i * unit_length);
3844 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3845 reg_char, reg_scaled + i);
3846 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3848 regcache_cooked_write (regcache, regnum,
3849 val + i * unit_length);
3856 /* This CPRC could not go in VFP registers, so all VFP
3857 registers are now marked as used. */
3862 /* Push stack padding for dowubleword alignment. */
3863 if (nstack & (align - 1))
3865 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3866 nstack += INT_REGISTER_SIZE;
3869 /* Doubleword aligned quantities must go in even register pairs. */
3870 if (may_use_core_reg
3871 && argreg <= ARM_LAST_ARG_REGNUM
3872 && align > INT_REGISTER_SIZE
3876 /* If the argument is a pointer to a function, and it is a
3877 Thumb function, create a LOCAL copy of the value and set
3878 the THUMB bit in it. */
3879 if (TYPE_CODE_PTR == typecode
3880 && target_type != NULL
3881 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3883 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3884 if (arm_pc_is_thumb (gdbarch, regval))
3886 bfd_byte *copy = alloca (len);
3887 store_unsigned_integer (copy, len, byte_order,
3888 MAKE_THUMB_ADDR (regval));
3893 /* Copy the argument to general registers or the stack in
3894 register-sized pieces. Large arguments are split between
3895 registers and stack. */
3898 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3900 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3902 /* The argument is being passed in a general purpose
3905 = extract_unsigned_integer (val, partial_len, byte_order);
3906 if (byte_order == BFD_ENDIAN_BIG)
3907 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3909 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3911 gdbarch_register_name
3913 phex (regval, INT_REGISTER_SIZE));
3914 regcache_cooked_write_unsigned (regcache, argreg, regval);
3919 /* Push the arguments onto the stack. */
3921 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3923 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3924 nstack += INT_REGISTER_SIZE;
3931 /* If we have an odd number of words to push, then decrement the stack
3932 by one word now, so first stack argument will be dword aligned. */
3939 write_memory (sp, si->data, si->len);
3940 si = pop_stack_item (si);
3943 /* Finally, update teh SP register. */
3944 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3950 /* Always align the frame to an 8-byte boundary. This is required on
3951 some platforms and harmless on the rest. */
3954 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3956 /* Align the stack to eight bytes. */
3957 return sp & ~ (CORE_ADDR) 7;
3961 print_fpu_flags (struct ui_file *file, int flags)
3963 if (flags & (1 << 0))
3964 fputs_filtered ("IVO ", file);
3965 if (flags & (1 << 1))
3966 fputs_filtered ("DVZ ", file);
3967 if (flags & (1 << 2))
3968 fputs_filtered ("OFL ", file);
3969 if (flags & (1 << 3))
3970 fputs_filtered ("UFL ", file);
3971 if (flags & (1 << 4))
3972 fputs_filtered ("INX ", file);
3973 fputc_filtered ('\n', file);
3976 /* Print interesting information about the floating point processor
3977 (if present) or emulator. */
3979 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3980 struct frame_info *frame, const char *args)
3982 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3985 type = (status >> 24) & 127;
3986 if (status & (1 << 31))
3987 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3989 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3990 /* i18n: [floating point unit] mask */
3991 fputs_filtered (_("mask: "), file);
3992 print_fpu_flags (file, status >> 16);
3993 /* i18n: [floating point unit] flags */
3994 fputs_filtered (_("flags: "), file);
3995 print_fpu_flags (file, status);
3998 /* Construct the ARM extended floating point type. */
3999 static struct type *
4000 arm_ext_type (struct gdbarch *gdbarch)
4002 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4004 if (!tdep->arm_ext_type)
4006 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4007 floatformats_arm_ext);
4009 return tdep->arm_ext_type;
4012 static struct type *
4013 arm_neon_double_type (struct gdbarch *gdbarch)
4015 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4017 if (tdep->neon_double_type == NULL)
4019 struct type *t, *elem;
4021 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4023 elem = builtin_type (gdbarch)->builtin_uint8;
4024 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4025 elem = builtin_type (gdbarch)->builtin_uint16;
4026 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4027 elem = builtin_type (gdbarch)->builtin_uint32;
4028 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4029 elem = builtin_type (gdbarch)->builtin_uint64;
4030 append_composite_type_field (t, "u64", elem);
4031 elem = builtin_type (gdbarch)->builtin_float;
4032 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4033 elem = builtin_type (gdbarch)->builtin_double;
4034 append_composite_type_field (t, "f64", elem);
4036 TYPE_VECTOR (t) = 1;
4037 TYPE_NAME (t) = "neon_d";
4038 tdep->neon_double_type = t;
4041 return tdep->neon_double_type;
4044 /* FIXME: The vector types are not correctly ordered on big-endian
4045 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4046 bits of d0 - regardless of what unit size is being held in d0. So
4047 the offset of the first uint8 in d0 is 7, but the offset of the
4048 first float is 4. This code works as-is for little-endian
4051 static struct type *
4052 arm_neon_quad_type (struct gdbarch *gdbarch)
4054 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4056 if (tdep->neon_quad_type == NULL)
4058 struct type *t, *elem;
4060 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4062 elem = builtin_type (gdbarch)->builtin_uint8;
4063 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4064 elem = builtin_type (gdbarch)->builtin_uint16;
4065 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4066 elem = builtin_type (gdbarch)->builtin_uint32;
4067 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4068 elem = builtin_type (gdbarch)->builtin_uint64;
4069 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4070 elem = builtin_type (gdbarch)->builtin_float;
4071 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4072 elem = builtin_type (gdbarch)->builtin_double;
4073 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4075 TYPE_VECTOR (t) = 1;
4076 TYPE_NAME (t) = "neon_q";
4077 tdep->neon_quad_type = t;
4080 return tdep->neon_quad_type;
4083 /* Return the GDB type object for the "standard" data type of data in
4086 static struct type *
4087 arm_register_type (struct gdbarch *gdbarch, int regnum)
4089 int num_regs = gdbarch_num_regs (gdbarch);
4091 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4092 && regnum >= num_regs && regnum < num_regs + 32)
4093 return builtin_type (gdbarch)->builtin_float;
4095 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4096 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4097 return arm_neon_quad_type (gdbarch);
4099 /* If the target description has register information, we are only
4100 in this function so that we can override the types of
4101 double-precision registers for NEON. */
4102 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4104 struct type *t = tdesc_register_type (gdbarch, regnum);
4106 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4107 && TYPE_CODE (t) == TYPE_CODE_FLT
4108 && gdbarch_tdep (gdbarch)->have_neon)
4109 return arm_neon_double_type (gdbarch);
4114 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4116 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4117 return builtin_type (gdbarch)->builtin_void;
4119 return arm_ext_type (gdbarch);
4121 else if (regnum == ARM_SP_REGNUM)
4122 return builtin_type (gdbarch)->builtin_data_ptr;
4123 else if (regnum == ARM_PC_REGNUM)
4124 return builtin_type (gdbarch)->builtin_func_ptr;
4125 else if (regnum >= ARRAY_SIZE (arm_register_names))
4126 /* These registers are only supported on targets which supply
4127 an XML description. */
4128 return builtin_type (gdbarch)->builtin_int0;
4130 return builtin_type (gdbarch)->builtin_uint32;
4133 /* Map a DWARF register REGNUM onto the appropriate GDB register
4137 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4139 /* Core integer regs. */
4140 if (reg >= 0 && reg <= 15)
4143 /* Legacy FPA encoding. These were once used in a way which
4144 overlapped with VFP register numbering, so their use is
4145 discouraged, but GDB doesn't support the ARM toolchain
4146 which used them for VFP. */
4147 if (reg >= 16 && reg <= 23)
4148 return ARM_F0_REGNUM + reg - 16;
4150 /* New assignments for the FPA registers. */
4151 if (reg >= 96 && reg <= 103)
4152 return ARM_F0_REGNUM + reg - 96;
4154 /* WMMX register assignments. */
4155 if (reg >= 104 && reg <= 111)
4156 return ARM_WCGR0_REGNUM + reg - 104;
4158 if (reg >= 112 && reg <= 127)
4159 return ARM_WR0_REGNUM + reg - 112;
4161 if (reg >= 192 && reg <= 199)
4162 return ARM_WC0_REGNUM + reg - 192;
4164 /* VFP v2 registers. A double precision value is actually
4165 in d1 rather than s2, but the ABI only defines numbering
4166 for the single precision registers. This will "just work"
4167 in GDB for little endian targets (we'll read eight bytes,
4168 starting in s0 and then progressing to s1), but will be
4169 reversed on big endian targets with VFP. This won't
4170 be a problem for the new Neon quad registers; you're supposed
4171 to use DW_OP_piece for those. */
4172 if (reg >= 64 && reg <= 95)
4176 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4177 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4181 /* VFP v3 / Neon registers. This range is also used for VFP v2
4182 registers, except that it now describes d0 instead of s0. */
4183 if (reg >= 256 && reg <= 287)
4187 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4188 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4195 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4197 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4200 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4202 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4203 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4205 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4206 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4208 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4209 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4211 if (reg < NUM_GREGS)
4212 return SIM_ARM_R0_REGNUM + reg;
4215 if (reg < NUM_FREGS)
4216 return SIM_ARM_FP0_REGNUM + reg;
4219 if (reg < NUM_SREGS)
4220 return SIM_ARM_FPS_REGNUM + reg;
4223 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4226 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4227 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4228 It is thought that this is is the floating-point register format on
4229 little-endian systems. */
4232 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4233 void *dbl, int endianess)
4237 if (endianess == BFD_ENDIAN_BIG)
4238 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4240 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4242 floatformat_from_doublest (fmt, &d, dbl);
4246 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4251 floatformat_to_doublest (fmt, ptr, &d);
4252 if (endianess == BFD_ENDIAN_BIG)
4253 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4255 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4260 condition_true (unsigned long cond, unsigned long status_reg)
4262 if (cond == INST_AL || cond == INST_NV)
4268 return ((status_reg & FLAG_Z) != 0);
4270 return ((status_reg & FLAG_Z) == 0);
4272 return ((status_reg & FLAG_C) != 0);
4274 return ((status_reg & FLAG_C) == 0);
4276 return ((status_reg & FLAG_N) != 0);
4278 return ((status_reg & FLAG_N) == 0);
4280 return ((status_reg & FLAG_V) != 0);
4282 return ((status_reg & FLAG_V) == 0);
4284 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4286 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4288 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4290 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4292 return (((status_reg & FLAG_Z) == 0)
4293 && (((status_reg & FLAG_N) == 0)
4294 == ((status_reg & FLAG_V) == 0)));
4296 return (((status_reg & FLAG_Z) != 0)
4297 || (((status_reg & FLAG_N) == 0)
4298 != ((status_reg & FLAG_V) == 0)));
4303 static unsigned long
4304 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4305 unsigned long pc_val, unsigned long status_reg)
4307 unsigned long res, shift;
4308 int rm = bits (inst, 0, 3);
4309 unsigned long shifttype = bits (inst, 5, 6);
4313 int rs = bits (inst, 8, 11);
4314 shift = (rs == 15 ? pc_val + 8
4315 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4318 shift = bits (inst, 7, 11);
4320 res = (rm == ARM_PC_REGNUM
4321 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4322 : get_frame_register_unsigned (frame, rm));
4327 res = shift >= 32 ? 0 : res << shift;
4331 res = shift >= 32 ? 0 : res >> shift;
4337 res = ((res & 0x80000000L)
4338 ? ~((~res) >> shift) : res >> shift);
4341 case 3: /* ROR/RRX */
4344 res = (res >> 1) | (carry ? 0x80000000L : 0);
4346 res = (res >> shift) | (res << (32 - shift));
4350 return res & 0xffffffff;
4353 /* Return number of 1-bits in VAL. */
4356 bitcount (unsigned long val)
4359 for (nbits = 0; val != 0; nbits++)
4360 val &= val - 1; /* Delete rightmost 1-bit in val. */
4364 /* Return the size in bytes of the complete Thumb instruction whose
4365 first halfword is INST1. */
4368 thumb_insn_size (unsigned short inst1)
4370 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4377 thumb_advance_itstate (unsigned int itstate)
4379 /* Preserve IT[7:5], the first three bits of the condition. Shift
4380 the upcoming condition flags left by one bit. */
4381 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4383 /* If we have finished the IT block, clear the state. */
4384 if ((itstate & 0x0f) == 0)
4390 /* Find the next PC after the current instruction executes. In some
4391 cases we can not statically determine the answer (see the IT state
4392 handling in this function); in that case, a breakpoint may be
4393 inserted in addition to the returned PC, which will be used to set
4394 another breakpoint by our caller. */
4397 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4399 struct gdbarch *gdbarch = get_frame_arch (frame);
4400 struct address_space *aspace = get_frame_address_space (frame);
4401 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4402 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4403 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4404 unsigned short inst1;
4405 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4406 unsigned long offset;
4407 ULONGEST status, itstate;
4409 nextpc = MAKE_THUMB_ADDR (nextpc);
4410 pc_val = MAKE_THUMB_ADDR (pc_val);
4412 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4414 /* Thumb-2 conditional execution support. There are eight bits in
4415 the CPSR which describe conditional execution state. Once
4416 reconstructed (they're in a funny order), the low five bits
4417 describe the low bit of the condition for each instruction and
4418 how many instructions remain. The high three bits describe the
4419 base condition. One of the low four bits will be set if an IT
4420 block is active. These bits read as zero on earlier
4422 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4423 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4425 /* If-Then handling. On GNU/Linux, where this routine is used, we
4426 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4427 can disable execution of the undefined instruction. So we might
4428 miss the breakpoint if we set it on a skipped conditional
4429 instruction. Because conditional instructions can change the
4430 flags, affecting the execution of further instructions, we may
4431 need to set two breakpoints. */
4433 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4435 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4437 /* An IT instruction. Because this instruction does not
4438 modify the flags, we can accurately predict the next
4439 executed instruction. */
4440 itstate = inst1 & 0x00ff;
4441 pc += thumb_insn_size (inst1);
4443 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4445 inst1 = read_memory_unsigned_integer (pc, 2,
4446 byte_order_for_code);
4447 pc += thumb_insn_size (inst1);
4448 itstate = thumb_advance_itstate (itstate);
4451 return MAKE_THUMB_ADDR (pc);
4453 else if (itstate != 0)
4455 /* We are in a conditional block. Check the condition. */
4456 if (! condition_true (itstate >> 4, status))
4458 /* Advance to the next executed instruction. */
4459 pc += thumb_insn_size (inst1);
4460 itstate = thumb_advance_itstate (itstate);
4462 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4464 inst1 = read_memory_unsigned_integer (pc, 2,
4465 byte_order_for_code);
4466 pc += thumb_insn_size (inst1);
4467 itstate = thumb_advance_itstate (itstate);
4470 return MAKE_THUMB_ADDR (pc);
4472 else if ((itstate & 0x0f) == 0x08)
4474 /* This is the last instruction of the conditional
4475 block, and it is executed. We can handle it normally
4476 because the following instruction is not conditional,
4477 and we must handle it normally because it is
4478 permitted to branch. Fall through. */
4484 /* There are conditional instructions after this one.
4485 If this instruction modifies the flags, then we can
4486 not predict what the next executed instruction will
4487 be. Fortunately, this instruction is architecturally
4488 forbidden to branch; we know it will fall through.
4489 Start by skipping past it. */
4490 pc += thumb_insn_size (inst1);
4491 itstate = thumb_advance_itstate (itstate);
4493 /* Set a breakpoint on the following instruction. */
4494 gdb_assert ((itstate & 0x0f) != 0);
4495 arm_insert_single_step_breakpoint (gdbarch, aspace,
4496 MAKE_THUMB_ADDR (pc));
4497 cond_negated = (itstate >> 4) & 1;
4499 /* Skip all following instructions with the same
4500 condition. If there is a later instruction in the IT
4501 block with the opposite condition, set the other
4502 breakpoint there. If not, then set a breakpoint on
4503 the instruction after the IT block. */
4506 inst1 = read_memory_unsigned_integer (pc, 2,
4507 byte_order_for_code);
4508 pc += thumb_insn_size (inst1);
4509 itstate = thumb_advance_itstate (itstate);
4511 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4513 return MAKE_THUMB_ADDR (pc);
4517 else if (itstate & 0x0f)
4519 /* We are in a conditional block. Check the condition. */
4520 int cond = itstate >> 4;
4522 if (! condition_true (cond, status))
4523 /* Advance to the next instruction. All the 32-bit
4524 instructions share a common prefix. */
4525 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4527 /* Otherwise, handle the instruction normally. */
4530 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4534 /* Fetch the saved PC from the stack. It's stored above
4535 all of the other registers. */
4536 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4537 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4538 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4540 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4542 unsigned long cond = bits (inst1, 8, 11);
4543 if (cond == 0x0f) /* 0x0f = SWI */
4545 struct gdbarch_tdep *tdep;
4546 tdep = gdbarch_tdep (gdbarch);
4548 if (tdep->syscall_next_pc != NULL)
4549 nextpc = tdep->syscall_next_pc (frame);
4552 else if (cond != 0x0f && condition_true (cond, status))
4553 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4555 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4557 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4559 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4561 unsigned short inst2;
4562 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4564 /* Default to the next instruction. */
4566 nextpc = MAKE_THUMB_ADDR (nextpc);
4568 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4570 /* Branches and miscellaneous control instructions. */
4572 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4575 int j1, j2, imm1, imm2;
4577 imm1 = sbits (inst1, 0, 10);
4578 imm2 = bits (inst2, 0, 10);
4579 j1 = bit (inst2, 13);
4580 j2 = bit (inst2, 11);
4582 offset = ((imm1 << 12) + (imm2 << 1));
4583 offset ^= ((!j2) << 22) | ((!j1) << 23);
4585 nextpc = pc_val + offset;
4586 /* For BLX make sure to clear the low bits. */
4587 if (bit (inst2, 12) == 0)
4588 nextpc = nextpc & 0xfffffffc;
4590 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4592 /* SUBS PC, LR, #imm8. */
4593 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4594 nextpc -= inst2 & 0x00ff;
4596 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4598 /* Conditional branch. */
4599 if (condition_true (bits (inst1, 6, 9), status))
4601 int sign, j1, j2, imm1, imm2;
4603 sign = sbits (inst1, 10, 10);
4604 imm1 = bits (inst1, 0, 5);
4605 imm2 = bits (inst2, 0, 10);
4606 j1 = bit (inst2, 13);
4607 j2 = bit (inst2, 11);
4609 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4610 offset += (imm1 << 12) + (imm2 << 1);
4612 nextpc = pc_val + offset;
4616 else if ((inst1 & 0xfe50) == 0xe810)
4618 /* Load multiple or RFE. */
4619 int rn, offset, load_pc = 1;
4621 rn = bits (inst1, 0, 3);
4622 if (bit (inst1, 7) && !bit (inst1, 8))
4625 if (!bit (inst2, 15))
4627 offset = bitcount (inst2) * 4 - 4;
4629 else if (!bit (inst1, 7) && bit (inst1, 8))
4632 if (!bit (inst2, 15))
4636 else if (bit (inst1, 7) && bit (inst1, 8))
4641 else if (!bit (inst1, 7) && !bit (inst1, 8))
4651 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4652 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4655 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4657 /* MOV PC or MOVS PC. */
4658 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4659 nextpc = MAKE_THUMB_ADDR (nextpc);
4661 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4665 int rn, load_pc = 1;
4667 rn = bits (inst1, 0, 3);
4668 base = get_frame_register_unsigned (frame, rn);
4669 if (rn == ARM_PC_REGNUM)
4671 base = (base + 4) & ~(CORE_ADDR) 0x3;
4673 base += bits (inst2, 0, 11);
4675 base -= bits (inst2, 0, 11);
4677 else if (bit (inst1, 7))
4678 base += bits (inst2, 0, 11);
4679 else if (bit (inst2, 11))
4681 if (bit (inst2, 10))
4684 base += bits (inst2, 0, 7);
4686 base -= bits (inst2, 0, 7);
4689 else if ((inst2 & 0x0fc0) == 0x0000)
4691 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4692 base += get_frame_register_unsigned (frame, rm) << shift;
4699 nextpc = get_frame_memory_unsigned (frame, base, 4);
4701 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4704 CORE_ADDR tbl_reg, table, offset, length;
4706 tbl_reg = bits (inst1, 0, 3);
4707 if (tbl_reg == 0x0f)
4708 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4710 table = get_frame_register_unsigned (frame, tbl_reg);
4712 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4713 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4714 nextpc = pc_val + length;
4716 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4719 CORE_ADDR tbl_reg, table, offset, length;
4721 tbl_reg = bits (inst1, 0, 3);
4722 if (tbl_reg == 0x0f)
4723 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4725 table = get_frame_register_unsigned (frame, tbl_reg);
4727 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4728 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4729 nextpc = pc_val + length;
4732 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4734 if (bits (inst1, 3, 6) == 0x0f)
4735 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4737 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4739 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4741 if (bits (inst1, 3, 6) == 0x0f)
4744 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4746 nextpc = MAKE_THUMB_ADDR (nextpc);
4748 else if ((inst1 & 0xf500) == 0xb100)
4751 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4752 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4754 if (bit (inst1, 11) && reg != 0)
4755 nextpc = pc_val + imm;
4756 else if (!bit (inst1, 11) && reg == 0)
4757 nextpc = pc_val + imm;
4762 /* Get the raw next address. PC is the current program counter, in
4763 FRAME, which is assumed to be executing in ARM mode.
4765 The value returned has the execution state of the next instruction
4766 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4767 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4771 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4773 struct gdbarch *gdbarch = get_frame_arch (frame);
4774 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4775 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4776 unsigned long pc_val;
4777 unsigned long this_instr;
4778 unsigned long status;
4781 pc_val = (unsigned long) pc;
4782 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4784 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4785 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4787 if (bits (this_instr, 28, 31) == INST_NV)
4788 switch (bits (this_instr, 24, 27))
4793 /* Branch with Link and change to Thumb. */
4794 nextpc = BranchDest (pc, this_instr);
4795 nextpc |= bit (this_instr, 24) << 1;
4796 nextpc = MAKE_THUMB_ADDR (nextpc);
4802 /* Coprocessor register transfer. */
4803 if (bits (this_instr, 12, 15) == 15)
4804 error (_("Invalid update to pc in instruction"));
4807 else if (condition_true (bits (this_instr, 28, 31), status))
4809 switch (bits (this_instr, 24, 27))
4812 case 0x1: /* data processing */
4816 unsigned long operand1, operand2, result = 0;
4820 if (bits (this_instr, 12, 15) != 15)
4823 if (bits (this_instr, 22, 25) == 0
4824 && bits (this_instr, 4, 7) == 9) /* multiply */
4825 error (_("Invalid update to pc in instruction"));
4827 /* BX <reg>, BLX <reg> */
4828 if (bits (this_instr, 4, 27) == 0x12fff1
4829 || bits (this_instr, 4, 27) == 0x12fff3)
4831 rn = bits (this_instr, 0, 3);
4832 nextpc = ((rn == ARM_PC_REGNUM)
4834 : get_frame_register_unsigned (frame, rn));
4839 /* Multiply into PC. */
4840 c = (status & FLAG_C) ? 1 : 0;
4841 rn = bits (this_instr, 16, 19);
4842 operand1 = ((rn == ARM_PC_REGNUM)
4844 : get_frame_register_unsigned (frame, rn));
4846 if (bit (this_instr, 25))
4848 unsigned long immval = bits (this_instr, 0, 7);
4849 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4850 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4853 else /* operand 2 is a shifted register. */
4854 operand2 = shifted_reg_val (frame, this_instr, c,
4857 switch (bits (this_instr, 21, 24))
4860 result = operand1 & operand2;
4864 result = operand1 ^ operand2;
4868 result = operand1 - operand2;
4872 result = operand2 - operand1;
4876 result = operand1 + operand2;
4880 result = operand1 + operand2 + c;
4884 result = operand1 - operand2 + c;
4888 result = operand2 - operand1 + c;
4894 case 0xb: /* tst, teq, cmp, cmn */
4895 result = (unsigned long) nextpc;
4899 result = operand1 | operand2;
4903 /* Always step into a function. */
4908 result = operand1 & ~operand2;
4916 /* In 26-bit APCS the bottom two bits of the result are
4917 ignored, and we always end up in ARM state. */
4919 nextpc = arm_addr_bits_remove (gdbarch, result);
4927 case 0x5: /* data transfer */
4930 if (bit (this_instr, 20))
4933 if (bits (this_instr, 12, 15) == 15)
4939 if (bit (this_instr, 22))
4940 error (_("Invalid update to pc in instruction"));
4942 /* byte write to PC */
4943 rn = bits (this_instr, 16, 19);
4944 base = ((rn == ARM_PC_REGNUM)
4946 : get_frame_register_unsigned (frame, rn));
4948 if (bit (this_instr, 24))
4951 int c = (status & FLAG_C) ? 1 : 0;
4952 unsigned long offset =
4953 (bit (this_instr, 25)
4954 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4955 : bits (this_instr, 0, 11));
4957 if (bit (this_instr, 23))
4963 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4970 case 0x9: /* block transfer */
4971 if (bit (this_instr, 20))
4974 if (bit (this_instr, 15))
4978 unsigned long rn_val
4979 = get_frame_register_unsigned (frame,
4980 bits (this_instr, 16, 19));
4982 if (bit (this_instr, 23))
4985 unsigned long reglist = bits (this_instr, 0, 14);
4986 offset = bitcount (reglist) * 4;
4987 if (bit (this_instr, 24)) /* pre */
4990 else if (bit (this_instr, 24))
4994 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5001 case 0xb: /* branch & link */
5002 case 0xa: /* branch */
5004 nextpc = BranchDest (pc, this_instr);
5010 case 0xe: /* coproc ops */
5014 struct gdbarch_tdep *tdep;
5015 tdep = gdbarch_tdep (gdbarch);
5017 if (tdep->syscall_next_pc != NULL)
5018 nextpc = tdep->syscall_next_pc (frame);
5024 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5032 /* Determine next PC after current instruction executes. Will call either
5033 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5034 loop is detected. */
5037 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5041 if (arm_frame_is_thumb (frame))
5042 nextpc = thumb_get_next_pc_raw (frame, pc);
5044 nextpc = arm_get_next_pc_raw (frame, pc);
5049 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5050 of the appropriate mode (as encoded in the PC value), even if this
5051 differs from what would be expected according to the symbol tables. */
5054 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5055 struct address_space *aspace,
5058 struct cleanup *old_chain
5059 = make_cleanup_restore_integer (&arm_override_mode);
5061 arm_override_mode = IS_THUMB_ADDR (pc);
5062 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5064 insert_single_step_breakpoint (gdbarch, aspace, pc);
5066 do_cleanups (old_chain);
5069 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5070 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5071 is found, attempt to step through it. A breakpoint is placed at the end of
5075 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5077 struct gdbarch *gdbarch = get_frame_arch (frame);
5078 struct address_space *aspace = get_frame_address_space (frame);
5079 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5080 CORE_ADDR pc = get_frame_pc (frame);
5081 CORE_ADDR breaks[2] = {-1, -1};
5083 unsigned short insn1, insn2;
5086 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5087 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5088 ULONGEST status, itstate;
5090 /* We currently do not support atomic sequences within an IT block. */
5091 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5092 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5096 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5097 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5099 if (thumb_insn_size (insn1) != 4)
5102 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5104 if (!((insn1 & 0xfff0) == 0xe850
5105 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5108 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5110 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5112 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5115 if (thumb_insn_size (insn1) != 4)
5117 /* Assume that there is at most one conditional branch in the
5118 atomic sequence. If a conditional branch is found, put a
5119 breakpoint in its destination address. */
5120 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5122 if (last_breakpoint > 0)
5123 return 0; /* More than one conditional branch found,
5124 fallback to the standard code. */
5126 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5130 /* We do not support atomic sequences that use any *other*
5131 instructions but conditional branches to change the PC.
5132 Fall back to standard code to avoid losing control of
5134 else if (thumb_instruction_changes_pc (insn1))
5139 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5142 /* Assume that there is at most one conditional branch in the
5143 atomic sequence. If a conditional branch is found, put a
5144 breakpoint in its destination address. */
5145 if ((insn1 & 0xf800) == 0xf000
5146 && (insn2 & 0xd000) == 0x8000
5147 && (insn1 & 0x0380) != 0x0380)
5149 int sign, j1, j2, imm1, imm2;
5150 unsigned int offset;
5152 sign = sbits (insn1, 10, 10);
5153 imm1 = bits (insn1, 0, 5);
5154 imm2 = bits (insn2, 0, 10);
5155 j1 = bit (insn2, 13);
5156 j2 = bit (insn2, 11);
5158 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5159 offset += (imm1 << 12) + (imm2 << 1);
5161 if (last_breakpoint > 0)
5162 return 0; /* More than one conditional branch found,
5163 fallback to the standard code. */
5165 breaks[1] = loc + offset;
5169 /* We do not support atomic sequences that use any *other*
5170 instructions but conditional branches to change the PC.
5171 Fall back to standard code to avoid losing control of
5173 else if (thumb2_instruction_changes_pc (insn1, insn2))
5176 /* If we find a strex{,b,h,d}, we're done. */
5177 if ((insn1 & 0xfff0) == 0xe840
5178 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5183 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5184 if (insn_count == atomic_sequence_length)
5187 /* Insert a breakpoint right after the end of the atomic sequence. */
5190 /* Check for duplicated breakpoints. Check also for a breakpoint
5191 placed (branch instruction's destination) anywhere in sequence. */
5193 && (breaks[1] == breaks[0]
5194 || (breaks[1] >= pc && breaks[1] < loc)))
5195 last_breakpoint = 0;
5197 /* Effectively inserts the breakpoints. */
5198 for (index = 0; index <= last_breakpoint; index++)
5199 arm_insert_single_step_breakpoint (gdbarch, aspace,
5200 MAKE_THUMB_ADDR (breaks[index]));
5206 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5208 struct gdbarch *gdbarch = get_frame_arch (frame);
5209 struct address_space *aspace = get_frame_address_space (frame);
5210 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5211 CORE_ADDR pc = get_frame_pc (frame);
5212 CORE_ADDR breaks[2] = {-1, -1};
5217 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5218 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5220 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5221 Note that we do not currently support conditionally executed atomic
5223 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5225 if ((insn & 0xff9000f0) != 0xe1900090)
5228 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5230 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5232 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5235 /* Assume that there is at most one conditional branch in the atomic
5236 sequence. If a conditional branch is found, put a breakpoint in
5237 its destination address. */
5238 if (bits (insn, 24, 27) == 0xa)
5240 if (last_breakpoint > 0)
5241 return 0; /* More than one conditional branch found, fallback
5242 to the standard single-step code. */
5244 breaks[1] = BranchDest (loc - 4, insn);
5248 /* We do not support atomic sequences that use any *other* instructions
5249 but conditional branches to change the PC. Fall back to standard
5250 code to avoid losing control of execution. */
5251 else if (arm_instruction_changes_pc (insn))
5254 /* If we find a strex{,b,h,d}, we're done. */
5255 if ((insn & 0xff9000f0) == 0xe1800090)
5259 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5260 if (insn_count == atomic_sequence_length)
5263 /* Insert a breakpoint right after the end of the atomic sequence. */
5266 /* Check for duplicated breakpoints. Check also for a breakpoint
5267 placed (branch instruction's destination) anywhere in sequence. */
5269 && (breaks[1] == breaks[0]
5270 || (breaks[1] >= pc && breaks[1] < loc)))
5271 last_breakpoint = 0;
5273 /* Effectively inserts the breakpoints. */
5274 for (index = 0; index <= last_breakpoint; index++)
5275 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5281 arm_deal_with_atomic_sequence (struct frame_info *frame)
5283 if (arm_frame_is_thumb (frame))
5284 return thumb_deal_with_atomic_sequence_raw (frame);
5286 return arm_deal_with_atomic_sequence_raw (frame);
5289 /* single_step() is called just before we want to resume the inferior,
5290 if we want to single-step it but there is no hardware or kernel
5291 single-step support. We find the target of the coming instruction
5292 and breakpoint it. */
5295 arm_software_single_step (struct frame_info *frame)
5297 struct gdbarch *gdbarch = get_frame_arch (frame);
5298 struct address_space *aspace = get_frame_address_space (frame);
5301 if (arm_deal_with_atomic_sequence (frame))
5304 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5305 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5310 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5311 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5312 NULL if an error occurs. BUF is freed. */
5315 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5316 int old_len, int new_len)
5319 int bytes_to_read = new_len - old_len;
5321 new_buf = xmalloc (new_len);
5322 memcpy (new_buf + bytes_to_read, buf, old_len);
5324 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5332 /* An IT block is at most the 2-byte IT instruction followed by
5333 four 4-byte instructions. The furthest back we must search to
5334 find an IT block that affects the current instruction is thus
5335 2 + 3 * 4 == 14 bytes. */
5336 #define MAX_IT_BLOCK_PREFIX 14
5338 /* Use a quick scan if there are more than this many bytes of
5340 #define IT_SCAN_THRESHOLD 32
5342 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5343 A breakpoint in an IT block may not be hit, depending on the
5346 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5350 CORE_ADDR boundary, func_start;
5352 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5353 int i, any, last_it, last_it_count;
5355 /* If we are using BKPT breakpoints, none of this is necessary. */
5356 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5359 /* ARM mode does not have this problem. */
5360 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5363 /* We are setting a breakpoint in Thumb code that could potentially
5364 contain an IT block. The first step is to find how much Thumb
5365 code there is; we do not need to read outside of known Thumb
5367 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5369 /* Thumb-2 code must have mapping symbols to have a chance. */
5372 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5374 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5375 && func_start > boundary)
5376 boundary = func_start;
5378 /* Search for a candidate IT instruction. We have to do some fancy
5379 footwork to distinguish a real IT instruction from the second
5380 half of a 32-bit instruction, but there is no need for that if
5381 there's no candidate. */
5382 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5384 /* No room for an IT instruction. */
5387 buf = xmalloc (buf_len);
5388 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5391 for (i = 0; i < buf_len; i += 2)
5393 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5394 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5406 /* OK, the code bytes before this instruction contain at least one
5407 halfword which resembles an IT instruction. We know that it's
5408 Thumb code, but there are still two possibilities. Either the
5409 halfword really is an IT instruction, or it is the second half of
5410 a 32-bit Thumb instruction. The only way we can tell is to
5411 scan forwards from a known instruction boundary. */
5412 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5416 /* There's a lot of code before this instruction. Start with an
5417 optimistic search; it's easy to recognize halfwords that can
5418 not be the start of a 32-bit instruction, and use that to
5419 lock on to the instruction boundaries. */
5420 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5423 buf_len = IT_SCAN_THRESHOLD;
5426 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5428 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5429 if (thumb_insn_size (inst1) == 2)
5436 /* At this point, if DEFINITE, BUF[I] is the first place we
5437 are sure that we know the instruction boundaries, and it is far
5438 enough from BPADDR that we could not miss an IT instruction
5439 affecting BPADDR. If ! DEFINITE, give up - start from a
5443 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5447 buf_len = bpaddr - boundary;
5453 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5456 buf_len = bpaddr - boundary;
5460 /* Scan forwards. Find the last IT instruction before BPADDR. */
5465 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5467 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5472 else if (inst1 & 0x0002)
5474 else if (inst1 & 0x0004)
5479 i += thumb_insn_size (inst1);
5485 /* There wasn't really an IT instruction after all. */
5488 if (last_it_count < 1)
5489 /* It was too far away. */
5492 /* This really is a trouble spot. Move the breakpoint to the IT
5494 return bpaddr - buf_len + last_it;
5497 /* ARM displaced stepping support.
5499 Generally ARM displaced stepping works as follows:
5501 1. When an instruction is to be single-stepped, it is first decoded by
5502 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5503 Depending on the type of instruction, it is then copied to a scratch
5504 location, possibly in a modified form. The copy_* set of functions
5505 performs such modification, as necessary. A breakpoint is placed after
5506 the modified instruction in the scratch space to return control to GDB.
5507 Note in particular that instructions which modify the PC will no longer
5508 do so after modification.
5510 2. The instruction is single-stepped, by setting the PC to the scratch
5511 location address, and resuming. Control returns to GDB when the
5514 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5515 function used for the current instruction. This function's job is to
5516 put the CPU/memory state back to what it would have been if the
5517 instruction had been executed unmodified in its original location. */
5519 /* NOP instruction (mov r0, r0). */
5520 #define ARM_NOP 0xe1a00000
5521 #define THUMB_NOP 0x4600
5523 /* Helper for register reads for displaced stepping. In particular, this
5524 returns the PC as it would be seen by the instruction at its original
5528 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5532 CORE_ADDR from = dsc->insn_addr;
5534 if (regno == ARM_PC_REGNUM)
5536 /* Compute pipeline offset:
5537 - When executing an ARM instruction, PC reads as the address of the
5538 current instruction plus 8.
5539 - When executing a Thumb instruction, PC reads as the address of the
5540 current instruction plus 4. */
5547 if (debug_displaced)
5548 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5549 (unsigned long) from);
5550 return (ULONGEST) from;
5554 regcache_cooked_read_unsigned (regs, regno, &ret);
5555 if (debug_displaced)
5556 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5557 regno, (unsigned long) ret);
5563 displaced_in_arm_mode (struct regcache *regs)
5566 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5568 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5570 return (ps & t_bit) == 0;
5573 /* Write to the PC as from a branch instruction. */
5576 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5580 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5581 architecture versions < 6. */
5582 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5583 val & ~(ULONGEST) 0x3);
5585 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5586 val & ~(ULONGEST) 0x1);
5589 /* Write to the PC as from a branch-exchange instruction. */
5592 bx_write_pc (struct regcache *regs, ULONGEST val)
5595 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5597 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5601 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5602 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5604 else if ((val & 2) == 0)
5606 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5607 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5611 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5612 mode, align dest to 4 bytes). */
5613 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5614 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5615 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5619 /* Write to the PC as if from a load instruction. */
5622 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5625 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5626 bx_write_pc (regs, val);
5628 branch_write_pc (regs, dsc, val);
5631 /* Write to the PC as if from an ALU instruction. */
5634 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5637 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5638 bx_write_pc (regs, val);
5640 branch_write_pc (regs, dsc, val);
5643 /* Helper for writing to registers for displaced stepping. Writing to the PC
5644 has a varying effects depending on the instruction which does the write:
5645 this is controlled by the WRITE_PC argument. */
5648 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5649 int regno, ULONGEST val, enum pc_write_style write_pc)
5651 if (regno == ARM_PC_REGNUM)
5653 if (debug_displaced)
5654 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5655 (unsigned long) val);
5658 case BRANCH_WRITE_PC:
5659 branch_write_pc (regs, dsc, val);
5663 bx_write_pc (regs, val);
5667 load_write_pc (regs, dsc, val);
5671 alu_write_pc (regs, dsc, val);
5674 case CANNOT_WRITE_PC:
5675 warning (_("Instruction wrote to PC in an unexpected way when "
5676 "single-stepping"));
5680 internal_error (__FILE__, __LINE__,
5681 _("Invalid argument to displaced_write_reg"));
5684 dsc->wrote_to_pc = 1;
5688 if (debug_displaced)
5689 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5690 regno, (unsigned long) val);
5691 regcache_cooked_write_unsigned (regs, regno, val);
5695 /* This function is used to concisely determine if an instruction INSN
5696 references PC. Register fields of interest in INSN should have the
5697 corresponding fields of BITMASK set to 0b1111. The function
5698 returns return 1 if any of these fields in INSN reference the PC
5699 (also 0b1111, r15), else it returns 0. */
5702 insn_references_pc (uint32_t insn, uint32_t bitmask)
5704 uint32_t lowbit = 1;
5706 while (bitmask != 0)
5710 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5716 mask = lowbit * 0xf;
5718 if ((insn & mask) == mask)
5727 /* The simplest copy function. Many instructions have the same effect no
5728 matter what address they are executed at: in those cases, use this. */
5731 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5732 const char *iname, struct displaced_step_closure *dsc)
5734 if (debug_displaced)
5735 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5736 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5739 dsc->modinsn[0] = insn;
5745 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5746 uint16_t insn2, const char *iname,
5747 struct displaced_step_closure *dsc)
5749 if (debug_displaced)
5750 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5751 "opcode/class '%s' unmodified\n", insn1, insn2,
5754 dsc->modinsn[0] = insn1;
5755 dsc->modinsn[1] = insn2;
5761 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5764 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5766 struct displaced_step_closure *dsc)
5768 if (debug_displaced)
5769 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5770 "opcode/class '%s' unmodified\n", insn,
5773 dsc->modinsn[0] = insn;
5778 /* Preload instructions with immediate offset. */
5781 cleanup_preload (struct gdbarch *gdbarch,
5782 struct regcache *regs, struct displaced_step_closure *dsc)
5784 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5785 if (!dsc->u.preload.immed)
5786 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5790 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5791 struct displaced_step_closure *dsc, unsigned int rn)
5794 /* Preload instructions:
5796 {pli/pld} [rn, #+/-imm]
5798 {pli/pld} [r0, #+/-imm]. */
5800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5801 rn_val = displaced_read_reg (regs, dsc, rn);
5802 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5803 dsc->u.preload.immed = 1;
5805 dsc->cleanup = &cleanup_preload;
5809 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5810 struct displaced_step_closure *dsc)
5812 unsigned int rn = bits (insn, 16, 19);
5814 if (!insn_references_pc (insn, 0x000f0000ul))
5815 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5817 if (debug_displaced)
5818 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5819 (unsigned long) insn);
5821 dsc->modinsn[0] = insn & 0xfff0ffff;
5823 install_preload (gdbarch, regs, dsc, rn);
5829 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5830 struct regcache *regs, struct displaced_step_closure *dsc)
5832 unsigned int rn = bits (insn1, 0, 3);
5833 unsigned int u_bit = bit (insn1, 7);
5834 int imm12 = bits (insn2, 0, 11);
5837 if (rn != ARM_PC_REGNUM)
5838 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5840 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5841 PLD (literal) Encoding T1. */
5842 if (debug_displaced)
5843 fprintf_unfiltered (gdb_stdlog,
5844 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5845 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5851 /* Rewrite instruction {pli/pld} PC imm12 into:
5852 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5856 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5858 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5859 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5861 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5863 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5864 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5865 dsc->u.preload.immed = 0;
5867 /* {pli/pld} [r0, r1] */
5868 dsc->modinsn[0] = insn1 & 0xfff0;
5869 dsc->modinsn[1] = 0xf001;
5872 dsc->cleanup = &cleanup_preload;
5876 /* Preload instructions with register offset. */
5879 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5880 struct displaced_step_closure *dsc, unsigned int rn,
5883 ULONGEST rn_val, rm_val;
5885 /* Preload register-offset instructions:
5887 {pli/pld} [rn, rm {, shift}]
5889 {pli/pld} [r0, r1 {, shift}]. */
5891 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5892 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5893 rn_val = displaced_read_reg (regs, dsc, rn);
5894 rm_val = displaced_read_reg (regs, dsc, rm);
5895 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5896 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5897 dsc->u.preload.immed = 0;
5899 dsc->cleanup = &cleanup_preload;
5903 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5904 struct regcache *regs,
5905 struct displaced_step_closure *dsc)
5907 unsigned int rn = bits (insn, 16, 19);
5908 unsigned int rm = bits (insn, 0, 3);
5911 if (!insn_references_pc (insn, 0x000f000ful))
5912 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5914 if (debug_displaced)
5915 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5916 (unsigned long) insn);
5918 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5920 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5924 /* Copy/cleanup coprocessor load and store instructions. */
5927 cleanup_copro_load_store (struct gdbarch *gdbarch,
5928 struct regcache *regs,
5929 struct displaced_step_closure *dsc)
5931 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5933 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5935 if (dsc->u.ldst.writeback)
5936 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5940 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5941 struct displaced_step_closure *dsc,
5942 int writeback, unsigned int rn)
5946 /* Coprocessor load/store instructions:
5948 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5950 {stc/stc2} [r0, #+/-imm].
5952 ldc/ldc2 are handled identically. */
5954 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5955 rn_val = displaced_read_reg (regs, dsc, rn);
5956 /* PC should be 4-byte aligned. */
5957 rn_val = rn_val & 0xfffffffc;
5958 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5960 dsc->u.ldst.writeback = writeback;
5961 dsc->u.ldst.rn = rn;
5963 dsc->cleanup = &cleanup_copro_load_store;
5967 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5968 struct regcache *regs,
5969 struct displaced_step_closure *dsc)
5971 unsigned int rn = bits (insn, 16, 19);
5973 if (!insn_references_pc (insn, 0x000f0000ul))
5974 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5976 if (debug_displaced)
5977 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5978 "load/store insn %.8lx\n", (unsigned long) insn);
5980 dsc->modinsn[0] = insn & 0xfff0ffff;
5982 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5988 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5989 uint16_t insn2, struct regcache *regs,
5990 struct displaced_step_closure *dsc)
5992 unsigned int rn = bits (insn1, 0, 3);
5994 if (rn != ARM_PC_REGNUM)
5995 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5996 "copro load/store", dsc);
5998 if (debug_displaced)
5999 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6000 "load/store insn %.4x%.4x\n", insn1, insn2);
6002 dsc->modinsn[0] = insn1 & 0xfff0;
6003 dsc->modinsn[1] = insn2;
6006 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6007 doesn't support writeback, so pass 0. */
6008 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6013 /* Clean up branch instructions (actually perform the branch, by setting
6017 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6018 struct displaced_step_closure *dsc)
6020 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6021 int branch_taken = condition_true (dsc->u.branch.cond, status);
6022 enum pc_write_style write_pc = dsc->u.branch.exchange
6023 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6028 if (dsc->u.branch.link)
6030 /* The value of LR should be the next insn of current one. In order
6031 not to confuse logic hanlding later insn `bx lr', if current insn mode
6032 is Thumb, the bit 0 of LR value should be set to 1. */
6033 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6036 next_insn_addr |= 0x1;
6038 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6042 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6045 /* Copy B/BL/BLX instructions with immediate destinations. */
6048 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6049 struct displaced_step_closure *dsc,
6050 unsigned int cond, int exchange, int link, long offset)
6052 /* Implement "BL<cond> <label>" as:
6054 Preparation: cond <- instruction condition
6055 Insn: mov r0, r0 (nop)
6056 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6058 B<cond> similar, but don't set r14 in cleanup. */
6060 dsc->u.branch.cond = cond;
6061 dsc->u.branch.link = link;
6062 dsc->u.branch.exchange = exchange;
6064 dsc->u.branch.dest = dsc->insn_addr;
6065 if (link && exchange)
6066 /* For BLX, offset is computed from the Align (PC, 4). */
6067 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6070 dsc->u.branch.dest += 4 + offset;
6072 dsc->u.branch.dest += 8 + offset;
6074 dsc->cleanup = &cleanup_branch;
6077 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6078 struct regcache *regs, struct displaced_step_closure *dsc)
6080 unsigned int cond = bits (insn, 28, 31);
6081 int exchange = (cond == 0xf);
6082 int link = exchange || bit (insn, 24);
6085 if (debug_displaced)
6086 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6087 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6088 (unsigned long) insn);
6090 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6091 then arrange the switch into Thumb mode. */
6092 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6094 offset = bits (insn, 0, 23) << 2;
6096 if (bit (offset, 25))
6097 offset = offset | ~0x3ffffff;
6099 dsc->modinsn[0] = ARM_NOP;
6101 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6106 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6107 uint16_t insn2, struct regcache *regs,
6108 struct displaced_step_closure *dsc)
6110 int link = bit (insn2, 14);
6111 int exchange = link && !bit (insn2, 12);
6114 int j1 = bit (insn2, 13);
6115 int j2 = bit (insn2, 11);
6116 int s = sbits (insn1, 10, 10);
6117 int i1 = !(j1 ^ bit (insn1, 10));
6118 int i2 = !(j2 ^ bit (insn1, 10));
6120 if (!link && !exchange) /* B */
6122 offset = (bits (insn2, 0, 10) << 1);
6123 if (bit (insn2, 12)) /* Encoding T4 */
6125 offset |= (bits (insn1, 0, 9) << 12)
6131 else /* Encoding T3 */
6133 offset |= (bits (insn1, 0, 5) << 12)
6137 cond = bits (insn1, 6, 9);
6142 offset = (bits (insn1, 0, 9) << 12);
6143 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6144 offset |= exchange ?
6145 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6148 if (debug_displaced)
6149 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6150 "%.4x %.4x with offset %.8lx\n",
6151 link ? (exchange) ? "blx" : "bl" : "b",
6152 insn1, insn2, offset);
6154 dsc->modinsn[0] = THUMB_NOP;
6156 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6160 /* Copy B Thumb instructions. */
6162 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6163 struct displaced_step_closure *dsc)
6165 unsigned int cond = 0;
6167 unsigned short bit_12_15 = bits (insn, 12, 15);
6168 CORE_ADDR from = dsc->insn_addr;
6170 if (bit_12_15 == 0xd)
6172 /* offset = SignExtend (imm8:0, 32) */
6173 offset = sbits ((insn << 1), 0, 8);
6174 cond = bits (insn, 8, 11);
6176 else if (bit_12_15 == 0xe) /* Encoding T2 */
6178 offset = sbits ((insn << 1), 0, 11);
6182 if (debug_displaced)
6183 fprintf_unfiltered (gdb_stdlog,
6184 "displaced: copying b immediate insn %.4x "
6185 "with offset %d\n", insn, offset);
6187 dsc->u.branch.cond = cond;
6188 dsc->u.branch.link = 0;
6189 dsc->u.branch.exchange = 0;
6190 dsc->u.branch.dest = from + 4 + offset;
6192 dsc->modinsn[0] = THUMB_NOP;
6194 dsc->cleanup = &cleanup_branch;
6199 /* Copy BX/BLX with register-specified destinations. */
6202 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6203 struct displaced_step_closure *dsc, int link,
6204 unsigned int cond, unsigned int rm)
6206 /* Implement {BX,BLX}<cond> <reg>" as:
6208 Preparation: cond <- instruction condition
6209 Insn: mov r0, r0 (nop)
6210 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6212 Don't set r14 in cleanup for BX. */
6214 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6216 dsc->u.branch.cond = cond;
6217 dsc->u.branch.link = link;
6219 dsc->u.branch.exchange = 1;
6221 dsc->cleanup = &cleanup_branch;
6225 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6226 struct regcache *regs, struct displaced_step_closure *dsc)
6228 unsigned int cond = bits (insn, 28, 31);
6231 int link = bit (insn, 5);
6232 unsigned int rm = bits (insn, 0, 3);
6234 if (debug_displaced)
6235 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6236 (unsigned long) insn);
6238 dsc->modinsn[0] = ARM_NOP;
6240 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6245 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6246 struct regcache *regs,
6247 struct displaced_step_closure *dsc)
6249 int link = bit (insn, 7);
6250 unsigned int rm = bits (insn, 3, 6);
6252 if (debug_displaced)
6253 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6254 (unsigned short) insn);
6256 dsc->modinsn[0] = THUMB_NOP;
6258 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6264 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6267 cleanup_alu_imm (struct gdbarch *gdbarch,
6268 struct regcache *regs, struct displaced_step_closure *dsc)
6270 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6271 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6272 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6273 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6277 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6278 struct displaced_step_closure *dsc)
6280 unsigned int rn = bits (insn, 16, 19);
6281 unsigned int rd = bits (insn, 12, 15);
6282 unsigned int op = bits (insn, 21, 24);
6283 int is_mov = (op == 0xd);
6284 ULONGEST rd_val, rn_val;
6286 if (!insn_references_pc (insn, 0x000ff000ul))
6287 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6289 if (debug_displaced)
6290 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6291 "%.8lx\n", is_mov ? "move" : "ALU",
6292 (unsigned long) insn);
6294 /* Instruction is of form:
6296 <op><cond> rd, [rn,] #imm
6300 Preparation: tmp1, tmp2 <- r0, r1;
6302 Insn: <op><cond> r0, r1, #imm
6303 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6306 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6307 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6308 rn_val = displaced_read_reg (regs, dsc, rn);
6309 rd_val = displaced_read_reg (regs, dsc, rd);
6310 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6311 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6315 dsc->modinsn[0] = insn & 0xfff00fff;
6317 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6319 dsc->cleanup = &cleanup_alu_imm;
6325 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6326 uint16_t insn2, struct regcache *regs,
6327 struct displaced_step_closure *dsc)
6329 unsigned int op = bits (insn1, 5, 8);
6330 unsigned int rn, rm, rd;
6331 ULONGEST rd_val, rn_val;
6333 rn = bits (insn1, 0, 3); /* Rn */
6334 rm = bits (insn2, 0, 3); /* Rm */
6335 rd = bits (insn2, 8, 11); /* Rd */
6337 /* This routine is only called for instruction MOV. */
6338 gdb_assert (op == 0x2 && rn == 0xf);
6340 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6341 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6343 if (debug_displaced)
6344 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6345 "ALU", insn1, insn2);
6347 /* Instruction is of form:
6349 <op><cond> rd, [rn,] #imm
6353 Preparation: tmp1, tmp2 <- r0, r1;
6355 Insn: <op><cond> r0, r1, #imm
6356 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6359 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6360 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6361 rn_val = displaced_read_reg (regs, dsc, rn);
6362 rd_val = displaced_read_reg (regs, dsc, rd);
6363 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6364 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6367 dsc->modinsn[0] = insn1;
6368 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6371 dsc->cleanup = &cleanup_alu_imm;
6376 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6379 cleanup_alu_reg (struct gdbarch *gdbarch,
6380 struct regcache *regs, struct displaced_step_closure *dsc)
6385 rd_val = displaced_read_reg (regs, dsc, 0);
6387 for (i = 0; i < 3; i++)
6388 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6390 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6394 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6395 struct displaced_step_closure *dsc,
6396 unsigned int rd, unsigned int rn, unsigned int rm)
6398 ULONGEST rd_val, rn_val, rm_val;
6400 /* Instruction is of form:
6402 <op><cond> rd, [rn,] rm [, <shift>]
6406 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6407 r0, r1, r2 <- rd, rn, rm
6408 Insn: <op><cond> r0, r1, r2 [, <shift>]
6409 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6412 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6413 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6414 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6415 rd_val = displaced_read_reg (regs, dsc, rd);
6416 rn_val = displaced_read_reg (regs, dsc, rn);
6417 rm_val = displaced_read_reg (regs, dsc, rm);
6418 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6419 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6420 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6423 dsc->cleanup = &cleanup_alu_reg;
6427 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6428 struct displaced_step_closure *dsc)
6430 unsigned int op = bits (insn, 21, 24);
6431 int is_mov = (op == 0xd);
6433 if (!insn_references_pc (insn, 0x000ff00ful))
6434 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6436 if (debug_displaced)
6437 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6438 is_mov ? "move" : "ALU", (unsigned long) insn);
6441 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6443 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6445 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6451 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6452 struct regcache *regs,
6453 struct displaced_step_closure *dsc)
6455 unsigned rn, rm, rd;
6457 rd = bits (insn, 3, 6);
6458 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6461 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6462 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6464 if (debug_displaced)
6465 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6466 "ALU", (unsigned short) insn);
6468 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6470 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6475 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6478 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6479 struct regcache *regs,
6480 struct displaced_step_closure *dsc)
6482 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6485 for (i = 0; i < 4; i++)
6486 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6488 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6492 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6493 struct displaced_step_closure *dsc,
6494 unsigned int rd, unsigned int rn, unsigned int rm,
6498 ULONGEST rd_val, rn_val, rm_val, rs_val;
6500 /* Instruction is of form:
6502 <op><cond> rd, [rn,] rm, <shift> rs
6506 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6507 r0, r1, r2, r3 <- rd, rn, rm, rs
6508 Insn: <op><cond> r0, r1, r2, <shift> r3
6510 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6514 for (i = 0; i < 4; i++)
6515 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6517 rd_val = displaced_read_reg (regs, dsc, rd);
6518 rn_val = displaced_read_reg (regs, dsc, rn);
6519 rm_val = displaced_read_reg (regs, dsc, rm);
6520 rs_val = displaced_read_reg (regs, dsc, rs);
6521 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6522 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6523 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6524 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6526 dsc->cleanup = &cleanup_alu_shifted_reg;
6530 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6531 struct regcache *regs,
6532 struct displaced_step_closure *dsc)
6534 unsigned int op = bits (insn, 21, 24);
6535 int is_mov = (op == 0xd);
6536 unsigned int rd, rn, rm, rs;
6538 if (!insn_references_pc (insn, 0x000fff0ful))
6539 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6541 if (debug_displaced)
6542 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6543 "%.8lx\n", is_mov ? "move" : "ALU",
6544 (unsigned long) insn);
6546 rn = bits (insn, 16, 19);
6547 rm = bits (insn, 0, 3);
6548 rs = bits (insn, 8, 11);
6549 rd = bits (insn, 12, 15);
6552 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6554 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6556 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6561 /* Clean up load instructions. */
6564 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6565 struct displaced_step_closure *dsc)
6567 ULONGEST rt_val, rt_val2 = 0, rn_val;
6569 rt_val = displaced_read_reg (regs, dsc, 0);
6570 if (dsc->u.ldst.xfersize == 8)
6571 rt_val2 = displaced_read_reg (regs, dsc, 1);
6572 rn_val = displaced_read_reg (regs, dsc, 2);
6574 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6575 if (dsc->u.ldst.xfersize > 4)
6576 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6577 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6578 if (!dsc->u.ldst.immed)
6579 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6581 /* Handle register writeback. */
6582 if (dsc->u.ldst.writeback)
6583 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6584 /* Put result in right place. */
6585 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6586 if (dsc->u.ldst.xfersize == 8)
6587 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6590 /* Clean up store instructions. */
6593 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6594 struct displaced_step_closure *dsc)
6596 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6598 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6599 if (dsc->u.ldst.xfersize > 4)
6600 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6601 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6602 if (!dsc->u.ldst.immed)
6603 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6604 if (!dsc->u.ldst.restore_r4)
6605 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6608 if (dsc->u.ldst.writeback)
6609 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6612 /* Copy "extra" load/store instructions. These are halfword/doubleword
6613 transfers, which have a different encoding to byte/word transfers. */
6616 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6617 struct regcache *regs, struct displaced_step_closure *dsc)
6619 unsigned int op1 = bits (insn, 20, 24);
6620 unsigned int op2 = bits (insn, 5, 6);
6621 unsigned int rt = bits (insn, 12, 15);
6622 unsigned int rn = bits (insn, 16, 19);
6623 unsigned int rm = bits (insn, 0, 3);
6624 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6625 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6626 int immed = (op1 & 0x4) != 0;
6628 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6630 if (!insn_references_pc (insn, 0x000ff00ful))
6631 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6633 if (debug_displaced)
6634 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6635 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6636 (unsigned long) insn);
6638 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6641 internal_error (__FILE__, __LINE__,
6642 _("copy_extra_ld_st: instruction decode error"));
6644 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6645 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6646 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6648 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6650 rt_val = displaced_read_reg (regs, dsc, rt);
6651 if (bytesize[opcode] == 8)
6652 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6653 rn_val = displaced_read_reg (regs, dsc, rn);
6655 rm_val = displaced_read_reg (regs, dsc, rm);
6657 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6658 if (bytesize[opcode] == 8)
6659 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6660 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6662 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6665 dsc->u.ldst.xfersize = bytesize[opcode];
6666 dsc->u.ldst.rn = rn;
6667 dsc->u.ldst.immed = immed;
6668 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6669 dsc->u.ldst.restore_r4 = 0;
6672 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6674 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6675 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6677 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6679 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6680 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6682 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6687 /* Copy byte/half word/word loads and stores. */
6690 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6691 struct displaced_step_closure *dsc, int load,
6692 int immed, int writeback, int size, int usermode,
6693 int rt, int rm, int rn)
6695 ULONGEST rt_val, rn_val, rm_val = 0;
6697 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6698 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6700 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6702 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6704 rt_val = displaced_read_reg (regs, dsc, rt);
6705 rn_val = displaced_read_reg (regs, dsc, rn);
6707 rm_val = displaced_read_reg (regs, dsc, rm);
6709 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6710 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6712 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6714 dsc->u.ldst.xfersize = size;
6715 dsc->u.ldst.rn = rn;
6716 dsc->u.ldst.immed = immed;
6717 dsc->u.ldst.writeback = writeback;
6719 /* To write PC we can do:
6721 Before this sequence of instructions:
6722 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6723 r2 is the Rn value got from dispalced_read_reg.
6725 Insn1: push {pc} Write address of STR instruction + offset on stack
6726 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6727 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6728 = addr(Insn1) + offset - addr(Insn3) - 8
6730 Insn4: add r4, r4, #8 r4 = offset - 8
6731 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6733 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6735 Otherwise we don't know what value to write for PC, since the offset is
6736 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6737 of this can be found in Section "Saving from r15" in
6738 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6740 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6745 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6746 uint16_t insn2, struct regcache *regs,
6747 struct displaced_step_closure *dsc, int size)
6749 unsigned int u_bit = bit (insn1, 7);
6750 unsigned int rt = bits (insn2, 12, 15);
6751 int imm12 = bits (insn2, 0, 11);
6754 if (debug_displaced)
6755 fprintf_unfiltered (gdb_stdlog,
6756 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6757 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6763 /* Rewrite instruction LDR Rt imm12 into:
6765 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6769 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6772 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6773 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6774 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6776 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6778 pc_val = pc_val & 0xfffffffc;
6780 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6781 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6785 dsc->u.ldst.xfersize = size;
6786 dsc->u.ldst.immed = 0;
6787 dsc->u.ldst.writeback = 0;
6788 dsc->u.ldst.restore_r4 = 0;
6790 /* LDR R0, R2, R3 */
6791 dsc->modinsn[0] = 0xf852;
6792 dsc->modinsn[1] = 0x3;
6795 dsc->cleanup = &cleanup_load;
6801 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6802 uint16_t insn2, struct regcache *regs,
6803 struct displaced_step_closure *dsc,
6804 int writeback, int immed)
6806 unsigned int rt = bits (insn2, 12, 15);
6807 unsigned int rn = bits (insn1, 0, 3);
6808 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6809 /* In LDR (register), there is also a register Rm, which is not allowed to
6810 be PC, so we don't have to check it. */
6812 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6816 if (debug_displaced)
6817 fprintf_unfiltered (gdb_stdlog,
6818 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6819 rt, rn, insn1, insn2);
6821 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6824 dsc->u.ldst.restore_r4 = 0;
6827 /* ldr[b]<cond> rt, [rn, #imm], etc.
6829 ldr[b]<cond> r0, [r2, #imm]. */
6831 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6832 dsc->modinsn[1] = insn2 & 0x0fff;
6835 /* ldr[b]<cond> rt, [rn, rm], etc.
6837 ldr[b]<cond> r0, [r2, r3]. */
6839 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6840 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6850 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6851 struct regcache *regs,
6852 struct displaced_step_closure *dsc,
6853 int load, int size, int usermode)
6855 int immed = !bit (insn, 25);
6856 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6857 unsigned int rt = bits (insn, 12, 15);
6858 unsigned int rn = bits (insn, 16, 19);
6859 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6861 if (!insn_references_pc (insn, 0x000ff00ful))
6862 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6864 if (debug_displaced)
6865 fprintf_unfiltered (gdb_stdlog,
6866 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6867 load ? (size == 1 ? "ldrb" : "ldr")
6868 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6870 (unsigned long) insn);
6872 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6873 usermode, rt, rm, rn);
6875 if (load || rt != ARM_PC_REGNUM)
6877 dsc->u.ldst.restore_r4 = 0;
6880 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6882 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6883 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6885 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6887 {ldr,str}[b]<cond> r0, [r2, r3]. */
6888 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6892 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6893 dsc->u.ldst.restore_r4 = 1;
6894 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6895 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6896 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6897 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6898 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6902 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6904 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6909 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6914 /* Cleanup LDM instructions with fully-populated register list. This is an
6915 unfortunate corner case: it's impossible to implement correctly by modifying
6916 the instruction. The issue is as follows: we have an instruction,
6920 which we must rewrite to avoid loading PC. A possible solution would be to
6921 do the load in two halves, something like (with suitable cleanup
6925 ldm[id][ab] r8!, {r0-r7}
6927 ldm[id][ab] r8, {r7-r14}
6930 but at present there's no suitable place for <temp>, since the scratch space
6931 is overwritten before the cleanup routine is called. For now, we simply
6932 emulate the instruction. */
6935 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6936 struct displaced_step_closure *dsc)
6938 int inc = dsc->u.block.increment;
6939 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6940 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6941 uint32_t regmask = dsc->u.block.regmask;
6942 int regno = inc ? 0 : 15;
6943 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6944 int exception_return = dsc->u.block.load && dsc->u.block.user
6945 && (regmask & 0x8000) != 0;
6946 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6947 int do_transfer = condition_true (dsc->u.block.cond, status);
6948 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6953 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6954 sensible we can do here. Complain loudly. */
6955 if (exception_return)
6956 error (_("Cannot single-step exception return"));
6958 /* We don't handle any stores here for now. */
6959 gdb_assert (dsc->u.block.load != 0);
6961 if (debug_displaced)
6962 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6963 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6964 dsc->u.block.increment ? "inc" : "dec",
6965 dsc->u.block.before ? "before" : "after");
6972 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6975 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6978 xfer_addr += bump_before;
6980 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6981 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6983 xfer_addr += bump_after;
6985 regmask &= ~(1 << regno);
6988 if (dsc->u.block.writeback)
6989 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6993 /* Clean up an STM which included the PC in the register list. */
6996 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6997 struct displaced_step_closure *dsc)
6999 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7000 int store_executed = condition_true (dsc->u.block.cond, status);
7001 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7002 CORE_ADDR stm_insn_addr;
7005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7007 /* If condition code fails, there's nothing else to do. */
7008 if (!store_executed)
7011 if (dsc->u.block.increment)
7013 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7015 if (dsc->u.block.before)
7020 pc_stored_at = dsc->u.block.xfer_addr;
7022 if (dsc->u.block.before)
7026 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7027 stm_insn_addr = dsc->scratch_base;
7028 offset = pc_val - stm_insn_addr;
7030 if (debug_displaced)
7031 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7032 "STM instruction\n", offset);
7034 /* Rewrite the stored PC to the proper value for the non-displaced original
7036 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7037 dsc->insn_addr + offset);
7040 /* Clean up an LDM which includes the PC in the register list. We clumped all
7041 the registers in the transferred list into a contiguous range r0...rX (to
7042 avoid loading PC directly and losing control of the debugged program), so we
7043 must undo that here. */
7046 cleanup_block_load_pc (struct gdbarch *gdbarch,
7047 struct regcache *regs,
7048 struct displaced_step_closure *dsc)
7050 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7051 int load_executed = condition_true (dsc->u.block.cond, status);
7052 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7053 unsigned int regs_loaded = bitcount (mask);
7054 unsigned int num_to_shuffle = regs_loaded, clobbered;
7056 /* The method employed here will fail if the register list is fully populated
7057 (we need to avoid loading PC directly). */
7058 gdb_assert (num_to_shuffle < 16);
7063 clobbered = (1 << num_to_shuffle) - 1;
7065 while (num_to_shuffle > 0)
7067 if ((mask & (1 << write_reg)) != 0)
7069 unsigned int read_reg = num_to_shuffle - 1;
7071 if (read_reg != write_reg)
7073 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7074 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7075 if (debug_displaced)
7076 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7077 "loaded register r%d to r%d\n"), read_reg,
7080 else if (debug_displaced)
7081 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7082 "r%d already in the right place\n"),
7085 clobbered &= ~(1 << write_reg);
7093 /* Restore any registers we scribbled over. */
7094 for (write_reg = 0; clobbered != 0; write_reg++)
7096 if ((clobbered & (1 << write_reg)) != 0)
7098 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7100 if (debug_displaced)
7101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7102 "clobbered register r%d\n"), write_reg);
7103 clobbered &= ~(1 << write_reg);
7107 /* Perform register writeback manually. */
7108 if (dsc->u.block.writeback)
7110 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7112 if (dsc->u.block.increment)
7113 new_rn_val += regs_loaded * 4;
7115 new_rn_val -= regs_loaded * 4;
7117 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7122 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7123 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7126 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7127 struct regcache *regs,
7128 struct displaced_step_closure *dsc)
7130 int load = bit (insn, 20);
7131 int user = bit (insn, 22);
7132 int increment = bit (insn, 23);
7133 int before = bit (insn, 24);
7134 int writeback = bit (insn, 21);
7135 int rn = bits (insn, 16, 19);
7137 /* Block transfers which don't mention PC can be run directly
7139 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7140 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7142 if (rn == ARM_PC_REGNUM)
7144 warning (_("displaced: Unpredictable LDM or STM with "
7145 "base register r15"));
7146 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7149 if (debug_displaced)
7150 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7151 "%.8lx\n", (unsigned long) insn);
7153 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7154 dsc->u.block.rn = rn;
7156 dsc->u.block.load = load;
7157 dsc->u.block.user = user;
7158 dsc->u.block.increment = increment;
7159 dsc->u.block.before = before;
7160 dsc->u.block.writeback = writeback;
7161 dsc->u.block.cond = bits (insn, 28, 31);
7163 dsc->u.block.regmask = insn & 0xffff;
7167 if ((insn & 0xffff) == 0xffff)
7169 /* LDM with a fully-populated register list. This case is
7170 particularly tricky. Implement for now by fully emulating the
7171 instruction (which might not behave perfectly in all cases, but
7172 these instructions should be rare enough for that not to matter
7174 dsc->modinsn[0] = ARM_NOP;
7176 dsc->cleanup = &cleanup_block_load_all;
7180 /* LDM of a list of registers which includes PC. Implement by
7181 rewriting the list of registers to be transferred into a
7182 contiguous chunk r0...rX before doing the transfer, then shuffling
7183 registers into the correct places in the cleanup routine. */
7184 unsigned int regmask = insn & 0xffff;
7185 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7186 unsigned int to = 0, from = 0, i, new_rn;
7188 for (i = 0; i < num_in_list; i++)
7189 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7191 /* Writeback makes things complicated. We need to avoid clobbering
7192 the base register with one of the registers in our modified
7193 register list, but just using a different register can't work in
7196 ldm r14!, {r0-r13,pc}
7198 which would need to be rewritten as:
7202 but that can't work, because there's no free register for N.
7204 Solve this by turning off the writeback bit, and emulating
7205 writeback manually in the cleanup routine. */
7210 new_regmask = (1 << num_in_list) - 1;
7212 if (debug_displaced)
7213 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7214 "{..., pc}: original reg list %.4x, modified "
7215 "list %.4x\n"), rn, writeback ? "!" : "",
7216 (int) insn & 0xffff, new_regmask);
7218 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7220 dsc->cleanup = &cleanup_block_load_pc;
7225 /* STM of a list of registers which includes PC. Run the instruction
7226 as-is, but out of line: this will store the wrong value for the PC,
7227 so we must manually fix up the memory in the cleanup routine.
7228 Doing things this way has the advantage that we can auto-detect
7229 the offset of the PC write (which is architecture-dependent) in
7230 the cleanup routine. */
7231 dsc->modinsn[0] = insn;
7233 dsc->cleanup = &cleanup_block_store_pc;
7240 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7241 struct regcache *regs,
7242 struct displaced_step_closure *dsc)
7244 int rn = bits (insn1, 0, 3);
7245 int load = bit (insn1, 4);
7246 int writeback = bit (insn1, 5);
7248 /* Block transfers which don't mention PC can be run directly
7250 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7251 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7253 if (rn == ARM_PC_REGNUM)
7255 warning (_("displaced: Unpredictable LDM or STM with "
7256 "base register r15"));
7257 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7258 "unpredictable ldm/stm", dsc);
7261 if (debug_displaced)
7262 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7263 "%.4x%.4x\n", insn1, insn2);
7265 /* Clear bit 13, since it should be always zero. */
7266 dsc->u.block.regmask = (insn2 & 0xdfff);
7267 dsc->u.block.rn = rn;
7269 dsc->u.block.load = load;
7270 dsc->u.block.user = 0;
7271 dsc->u.block.increment = bit (insn1, 7);
7272 dsc->u.block.before = bit (insn1, 8);
7273 dsc->u.block.writeback = writeback;
7274 dsc->u.block.cond = INST_AL;
7275 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7279 if (dsc->u.block.regmask == 0xffff)
7281 /* This branch is impossible to happen. */
7286 unsigned int regmask = dsc->u.block.regmask;
7287 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7288 unsigned int to = 0, from = 0, i, new_rn;
7290 for (i = 0; i < num_in_list; i++)
7291 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7296 new_regmask = (1 << num_in_list) - 1;
7298 if (debug_displaced)
7299 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7300 "{..., pc}: original reg list %.4x, modified "
7301 "list %.4x\n"), rn, writeback ? "!" : "",
7302 (int) dsc->u.block.regmask, new_regmask);
7304 dsc->modinsn[0] = insn1;
7305 dsc->modinsn[1] = (new_regmask & 0xffff);
7308 dsc->cleanup = &cleanup_block_load_pc;
7313 dsc->modinsn[0] = insn1;
7314 dsc->modinsn[1] = insn2;
7316 dsc->cleanup = &cleanup_block_store_pc;
7321 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7322 for Linux, where some SVC instructions must be treated specially. */
7325 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7326 struct displaced_step_closure *dsc)
7328 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7330 if (debug_displaced)
7331 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7332 "%.8lx\n", (unsigned long) resume_addr);
7334 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7338 /* Common copy routine for svc instruciton. */
7341 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7342 struct displaced_step_closure *dsc)
7344 /* Preparation: none.
7345 Insn: unmodified svc.
7346 Cleanup: pc <- insn_addr + insn_size. */
7348 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7350 dsc->wrote_to_pc = 1;
7352 /* Allow OS-specific code to override SVC handling. */
7353 if (dsc->u.svc.copy_svc_os)
7354 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7357 dsc->cleanup = &cleanup_svc;
7363 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7364 struct regcache *regs, struct displaced_step_closure *dsc)
7367 if (debug_displaced)
7368 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7369 (unsigned long) insn);
7371 dsc->modinsn[0] = insn;
7373 return install_svc (gdbarch, regs, dsc);
7377 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7378 struct regcache *regs, struct displaced_step_closure *dsc)
7381 if (debug_displaced)
7382 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7385 dsc->modinsn[0] = insn;
7387 return install_svc (gdbarch, regs, dsc);
7390 /* Copy undefined instructions. */
7393 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7394 struct displaced_step_closure *dsc)
7396 if (debug_displaced)
7397 fprintf_unfiltered (gdb_stdlog,
7398 "displaced: copying undefined insn %.8lx\n",
7399 (unsigned long) insn);
7401 dsc->modinsn[0] = insn;
7407 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7408 struct displaced_step_closure *dsc)
7411 if (debug_displaced)
7412 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7413 "%.4x %.4x\n", (unsigned short) insn1,
7414 (unsigned short) insn2);
7416 dsc->modinsn[0] = insn1;
7417 dsc->modinsn[1] = insn2;
7423 /* Copy unpredictable instructions. */
7426 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7427 struct displaced_step_closure *dsc)
7429 if (debug_displaced)
7430 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7431 "%.8lx\n", (unsigned long) insn);
7433 dsc->modinsn[0] = insn;
7438 /* The decode_* functions are instruction decoding helpers. They mostly follow
7439 the presentation in the ARM ARM. */
7442 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7443 struct regcache *regs,
7444 struct displaced_step_closure *dsc)
7446 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7447 unsigned int rn = bits (insn, 16, 19);
7449 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7450 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7451 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7452 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7453 else if ((op1 & 0x60) == 0x20)
7454 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7455 else if ((op1 & 0x71) == 0x40)
7456 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7458 else if ((op1 & 0x77) == 0x41)
7459 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7460 else if ((op1 & 0x77) == 0x45)
7461 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7462 else if ((op1 & 0x77) == 0x51)
7465 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7467 return arm_copy_unpred (gdbarch, insn, dsc);
7469 else if ((op1 & 0x77) == 0x55)
7470 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7471 else if (op1 == 0x57)
7474 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7475 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7476 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7477 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7478 default: return arm_copy_unpred (gdbarch, insn, dsc);
7480 else if ((op1 & 0x63) == 0x43)
7481 return arm_copy_unpred (gdbarch, insn, dsc);
7482 else if ((op2 & 0x1) == 0x0)
7483 switch (op1 & ~0x80)
7486 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7488 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7489 case 0x71: case 0x75:
7491 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7492 case 0x63: case 0x67: case 0x73: case 0x77:
7493 return arm_copy_unpred (gdbarch, insn, dsc);
7495 return arm_copy_undef (gdbarch, insn, dsc);
7498 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7502 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7503 struct regcache *regs,
7504 struct displaced_step_closure *dsc)
7506 if (bit (insn, 27) == 0)
7507 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7508 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7509 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7512 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7515 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7517 case 0x4: case 0x5: case 0x6: case 0x7:
7518 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7521 switch ((insn & 0xe00000) >> 21)
7523 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7525 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7528 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7531 return arm_copy_undef (gdbarch, insn, dsc);
7536 int rn_f = (bits (insn, 16, 19) == 0xf);
7537 switch ((insn & 0xe00000) >> 21)
7540 /* ldc/ldc2 imm (undefined for rn == pc). */
7541 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7542 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7545 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7547 case 0x4: case 0x5: case 0x6: case 0x7:
7548 /* ldc/ldc2 lit (undefined for rn != pc). */
7549 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7550 : arm_copy_undef (gdbarch, insn, dsc);
7553 return arm_copy_undef (gdbarch, insn, dsc);
7558 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7561 if (bits (insn, 16, 19) == 0xf)
7563 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7565 return arm_copy_undef (gdbarch, insn, dsc);
7569 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7571 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7575 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7577 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7580 return arm_copy_undef (gdbarch, insn, dsc);
7584 /* Decode miscellaneous instructions in dp/misc encoding space. */
7587 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7588 struct regcache *regs,
7589 struct displaced_step_closure *dsc)
7591 unsigned int op2 = bits (insn, 4, 6);
7592 unsigned int op = bits (insn, 21, 22);
7593 unsigned int op1 = bits (insn, 16, 19);
7598 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7601 if (op == 0x1) /* bx. */
7602 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7604 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7606 return arm_copy_undef (gdbarch, insn, dsc);
7610 /* Not really supported. */
7611 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7613 return arm_copy_undef (gdbarch, insn, dsc);
7617 return arm_copy_bx_blx_reg (gdbarch, insn,
7618 regs, dsc); /* blx register. */
7620 return arm_copy_undef (gdbarch, insn, dsc);
7623 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7627 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7629 /* Not really supported. */
7630 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7633 return arm_copy_undef (gdbarch, insn, dsc);
7638 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7639 struct regcache *regs,
7640 struct displaced_step_closure *dsc)
7643 switch (bits (insn, 20, 24))
7646 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7649 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7651 case 0x12: case 0x16:
7652 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7655 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7659 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7661 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7662 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7663 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7664 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7665 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7666 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7667 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7668 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7669 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7670 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7671 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7672 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7673 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7674 /* 2nd arg means "unpriveleged". */
7675 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7679 /* Should be unreachable. */
7684 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7685 struct regcache *regs,
7686 struct displaced_step_closure *dsc)
7688 int a = bit (insn, 25), b = bit (insn, 4);
7689 uint32_t op1 = bits (insn, 20, 24);
7690 int rn_f = bits (insn, 16, 19) == 0xf;
7692 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7693 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7694 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7695 else if ((!a && (op1 & 0x17) == 0x02)
7696 || (a && (op1 & 0x17) == 0x02 && !b))
7697 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7698 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7699 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7700 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7701 else if ((!a && (op1 & 0x17) == 0x03)
7702 || (a && (op1 & 0x17) == 0x03 && !b))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7704 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7705 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7707 else if ((!a && (op1 & 0x17) == 0x06)
7708 || (a && (op1 & 0x17) == 0x06 && !b))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7710 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7711 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7713 else if ((!a && (op1 & 0x17) == 0x07)
7714 || (a && (op1 & 0x17) == 0x07 && !b))
7715 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7717 /* Should be unreachable. */
7722 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7723 struct displaced_step_closure *dsc)
7725 switch (bits (insn, 20, 24))
7727 case 0x00: case 0x01: case 0x02: case 0x03:
7728 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7730 case 0x04: case 0x05: case 0x06: case 0x07:
7731 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7733 case 0x08: case 0x09: case 0x0a: case 0x0b:
7734 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7735 return arm_copy_unmodified (gdbarch, insn,
7736 "decode/pack/unpack/saturate/reverse", dsc);
7739 if (bits (insn, 5, 7) == 0) /* op2. */
7741 if (bits (insn, 12, 15) == 0xf)
7742 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7744 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7747 return arm_copy_undef (gdbarch, insn, dsc);
7749 case 0x1a: case 0x1b:
7750 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7751 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7753 return arm_copy_undef (gdbarch, insn, dsc);
7755 case 0x1c: case 0x1d:
7756 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7758 if (bits (insn, 0, 3) == 0xf)
7759 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7761 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7764 return arm_copy_undef (gdbarch, insn, dsc);
7766 case 0x1e: case 0x1f:
7767 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7768 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7770 return arm_copy_undef (gdbarch, insn, dsc);
7773 /* Should be unreachable. */
7778 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7779 struct regcache *regs,
7780 struct displaced_step_closure *dsc)
7783 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7785 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7789 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7790 struct regcache *regs,
7791 struct displaced_step_closure *dsc)
7793 unsigned int opcode = bits (insn, 20, 24);
7797 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7798 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7800 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7801 case 0x12: case 0x16:
7802 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7804 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7805 case 0x13: case 0x17:
7806 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7808 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7809 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7810 /* Note: no writeback for these instructions. Bit 25 will always be
7811 zero though (via caller), so the following works OK. */
7812 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7815 /* Should be unreachable. */
7819 /* Decode shifted register instructions. */
7822 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7823 uint16_t insn2, struct regcache *regs,
7824 struct displaced_step_closure *dsc)
7826 /* PC is only allowed to be used in instruction MOV. */
7828 unsigned int op = bits (insn1, 5, 8);
7829 unsigned int rn = bits (insn1, 0, 3);
7831 if (op == 0x2 && rn == 0xf) /* MOV */
7832 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7835 "dp (shift reg)", dsc);
7839 /* Decode extension register load/store. Exactly the same as
7840 arm_decode_ext_reg_ld_st. */
7843 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7844 uint16_t insn2, struct regcache *regs,
7845 struct displaced_step_closure *dsc)
7847 unsigned int opcode = bits (insn1, 4, 8);
7851 case 0x04: case 0x05:
7852 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7853 "vfp/neon vmov", dsc);
7855 case 0x08: case 0x0c: /* 01x00 */
7856 case 0x0a: case 0x0e: /* 01x10 */
7857 case 0x12: case 0x16: /* 10x10 */
7858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 "vfp/neon vstm/vpush", dsc);
7861 case 0x09: case 0x0d: /* 01x01 */
7862 case 0x0b: case 0x0f: /* 01x11 */
7863 case 0x13: case 0x17: /* 10x11 */
7864 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7865 "vfp/neon vldm/vpop", dsc);
7867 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7868 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7870 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7871 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7874 /* Should be unreachable. */
7879 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7880 struct regcache *regs, struct displaced_step_closure *dsc)
7882 unsigned int op1 = bits (insn, 20, 25);
7883 int op = bit (insn, 4);
7884 unsigned int coproc = bits (insn, 8, 11);
7885 unsigned int rn = bits (insn, 16, 19);
7887 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7888 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7889 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7890 && (coproc & 0xe) != 0xa)
7892 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7893 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7894 && (coproc & 0xe) != 0xa)
7895 /* ldc/ldc2 imm/lit. */
7896 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7897 else if ((op1 & 0x3e) == 0x00)
7898 return arm_copy_undef (gdbarch, insn, dsc);
7899 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7900 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7901 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7902 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7903 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7904 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7905 else if ((op1 & 0x30) == 0x20 && !op)
7907 if ((coproc & 0xe) == 0xa)
7908 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7910 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7912 else if ((op1 & 0x30) == 0x20 && op)
7913 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7914 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7915 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7916 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7917 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7918 else if ((op1 & 0x30) == 0x30)
7919 return arm_copy_svc (gdbarch, insn, regs, dsc);
7921 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7925 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7926 uint16_t insn2, struct regcache *regs,
7927 struct displaced_step_closure *dsc)
7929 unsigned int coproc = bits (insn2, 8, 11);
7930 unsigned int op1 = bits (insn1, 4, 9);
7931 unsigned int bit_5_8 = bits (insn1, 5, 8);
7932 unsigned int bit_9 = bit (insn1, 9);
7933 unsigned int bit_4 = bit (insn1, 4);
7934 unsigned int rn = bits (insn1, 0, 3);
7939 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7940 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7942 else if (bit_5_8 == 0) /* UNDEFINED. */
7943 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7946 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7947 if ((coproc & 0xe) == 0xa)
7948 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7950 else /* coproc is not 101x. */
7952 if (bit_4 == 0) /* STC/STC2. */
7953 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7955 else /* LDC/LDC2 {literal, immeidate}. */
7956 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7962 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7968 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7969 struct displaced_step_closure *dsc, int rd)
7975 Preparation: Rd <- PC
7981 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7982 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7986 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7987 struct displaced_step_closure *dsc,
7988 int rd, unsigned int imm)
7991 /* Encoding T2: ADDS Rd, #imm */
7992 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7994 install_pc_relative (gdbarch, regs, dsc, rd);
8000 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8001 struct regcache *regs,
8002 struct displaced_step_closure *dsc)
8004 unsigned int rd = bits (insn, 8, 10);
8005 unsigned int imm8 = bits (insn, 0, 7);
8007 if (debug_displaced)
8008 fprintf_unfiltered (gdb_stdlog,
8009 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8012 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8016 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8017 uint16_t insn2, struct regcache *regs,
8018 struct displaced_step_closure *dsc)
8020 unsigned int rd = bits (insn2, 8, 11);
8021 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8022 extract raw immediate encoding rather than computing immediate. When
8023 generating ADD or SUB instruction, we can simply perform OR operation to
8024 set immediate into ADD. */
8025 unsigned int imm_3_8 = insn2 & 0x70ff;
8026 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8028 if (debug_displaced)
8029 fprintf_unfiltered (gdb_stdlog,
8030 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8031 rd, imm_i, imm_3_8, insn1, insn2);
8033 if (bit (insn1, 7)) /* Encoding T2 */
8035 /* Encoding T3: SUB Rd, Rd, #imm */
8036 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8037 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8039 else /* Encoding T3 */
8041 /* Encoding T3: ADD Rd, Rd, #imm */
8042 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8043 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8047 install_pc_relative (gdbarch, regs, dsc, rd);
8053 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8054 struct regcache *regs,
8055 struct displaced_step_closure *dsc)
8057 unsigned int rt = bits (insn1, 8, 10);
8059 int imm8 = (bits (insn1, 0, 7) << 2);
8060 CORE_ADDR from = dsc->insn_addr;
8066 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8068 Insn: LDR R0, [R2, R3];
8069 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8071 if (debug_displaced)
8072 fprintf_unfiltered (gdb_stdlog,
8073 "displaced: copying thumb ldr r%d [pc #%d]\n"
8076 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8077 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8078 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8079 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8080 /* The assembler calculates the required value of the offset from the
8081 Align(PC,4) value of this instruction to the label. */
8082 pc = pc & 0xfffffffc;
8084 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8085 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8088 dsc->u.ldst.xfersize = 4;
8090 dsc->u.ldst.immed = 0;
8091 dsc->u.ldst.writeback = 0;
8092 dsc->u.ldst.restore_r4 = 0;
8094 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8096 dsc->cleanup = &cleanup_load;
8101 /* Copy Thumb cbnz/cbz insruction. */
8104 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8105 struct regcache *regs,
8106 struct displaced_step_closure *dsc)
8108 int non_zero = bit (insn1, 11);
8109 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8110 CORE_ADDR from = dsc->insn_addr;
8111 int rn = bits (insn1, 0, 2);
8112 int rn_val = displaced_read_reg (regs, dsc, rn);
8114 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8115 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8116 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8117 condition is false, let it be, cleanup_branch will do nothing. */
8118 if (dsc->u.branch.cond)
8120 dsc->u.branch.cond = INST_AL;
8121 dsc->u.branch.dest = from + 4 + imm5;
8124 dsc->u.branch.dest = from + 2;
8126 dsc->u.branch.link = 0;
8127 dsc->u.branch.exchange = 0;
8129 if (debug_displaced)
8130 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8131 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8132 rn, rn_val, insn1, dsc->u.branch.dest);
8134 dsc->modinsn[0] = THUMB_NOP;
8136 dsc->cleanup = &cleanup_branch;
8140 /* Copy Table Branch Byte/Halfword */
8142 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8143 uint16_t insn2, struct regcache *regs,
8144 struct displaced_step_closure *dsc)
8146 ULONGEST rn_val, rm_val;
8147 int is_tbh = bit (insn2, 4);
8148 CORE_ADDR halfwords = 0;
8149 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8151 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8152 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8158 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8159 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8165 target_read_memory (rn_val + rm_val, buf, 1);
8166 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8169 if (debug_displaced)
8170 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8171 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8172 (unsigned int) rn_val, (unsigned int) rm_val,
8173 (unsigned int) halfwords);
8175 dsc->u.branch.cond = INST_AL;
8176 dsc->u.branch.link = 0;
8177 dsc->u.branch.exchange = 0;
8178 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8180 dsc->cleanup = &cleanup_branch;
8186 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8187 struct displaced_step_closure *dsc)
8190 int val = displaced_read_reg (regs, dsc, 7);
8191 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8194 val = displaced_read_reg (regs, dsc, 8);
8195 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8198 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8203 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8204 struct regcache *regs,
8205 struct displaced_step_closure *dsc)
8207 dsc->u.block.regmask = insn1 & 0x00ff;
8209 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8212 (1) register list is full, that is, r0-r7 are used.
8213 Prepare: tmp[0] <- r8
8215 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8216 MOV r8, r7; Move value of r7 to r8;
8217 POP {r7}; Store PC value into r7.
8219 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8221 (2) register list is not full, supposing there are N registers in
8222 register list (except PC, 0 <= N <= 7).
8223 Prepare: for each i, 0 - N, tmp[i] <- ri.
8225 POP {r0, r1, ...., rN};
8227 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8228 from tmp[] properly.
8230 if (debug_displaced)
8231 fprintf_unfiltered (gdb_stdlog,
8232 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8233 dsc->u.block.regmask, insn1);
8235 if (dsc->u.block.regmask == 0xff)
8237 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8239 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8240 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8241 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8244 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8248 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8249 unsigned int new_regmask, bit = 1;
8250 unsigned int to = 0, from = 0, i, new_rn;
8252 for (i = 0; i < num_in_list + 1; i++)
8253 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8255 new_regmask = (1 << (num_in_list + 1)) - 1;
8257 if (debug_displaced)
8258 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8259 "{..., pc}: original reg list %.4x,"
8260 " modified list %.4x\n"),
8261 (int) dsc->u.block.regmask, new_regmask);
8263 dsc->u.block.regmask |= 0x8000;
8264 dsc->u.block.writeback = 0;
8265 dsc->u.block.cond = INST_AL;
8267 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8269 dsc->cleanup = &cleanup_block_load_pc;
8276 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8277 struct regcache *regs,
8278 struct displaced_step_closure *dsc)
8280 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8281 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8284 /* 16-bit thumb instructions. */
8285 switch (op_bit_12_15)
8287 /* Shift (imme), add, subtract, move and compare. */
8288 case 0: case 1: case 2: case 3:
8289 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8290 "shift/add/sub/mov/cmp",
8294 switch (op_bit_10_11)
8296 case 0: /* Data-processing */
8297 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8301 case 1: /* Special data instructions and branch and exchange. */
8303 unsigned short op = bits (insn1, 7, 9);
8304 if (op == 6 || op == 7) /* BX or BLX */
8305 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8306 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8307 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8309 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8313 default: /* LDR (literal) */
8314 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8317 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8318 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8321 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8322 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8323 else /* Generate SP-relative address */
8324 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8326 case 11: /* Misc 16-bit instructions */
8328 switch (bits (insn1, 8, 11))
8330 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8331 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8333 case 12: case 13: /* POP */
8334 if (bit (insn1, 8)) /* PC is in register list. */
8335 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8339 case 15: /* If-Then, and hints */
8340 if (bits (insn1, 0, 3))
8341 /* If-Then makes up to four following instructions conditional.
8342 IT instruction itself is not conditional, so handle it as a
8343 common unmodified instruction. */
8344 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8350 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8355 if (op_bit_10_11 < 2) /* Store multiple registers */
8356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8357 else /* Load multiple registers */
8358 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8360 case 13: /* Conditional branch and supervisor call */
8361 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8362 err = thumb_copy_b (gdbarch, insn1, dsc);
8364 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8366 case 14: /* Unconditional branch */
8367 err = thumb_copy_b (gdbarch, insn1, dsc);
8374 internal_error (__FILE__, __LINE__,
8375 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8379 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8380 uint16_t insn1, uint16_t insn2,
8381 struct regcache *regs,
8382 struct displaced_step_closure *dsc)
8384 int rt = bits (insn2, 12, 15);
8385 int rn = bits (insn1, 0, 3);
8386 int op1 = bits (insn1, 7, 8);
8389 switch (bits (insn1, 5, 6))
8391 case 0: /* Load byte and memory hints */
8392 if (rt == 0xf) /* PLD/PLI */
8395 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8396 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8398 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8403 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8404 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8407 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8408 "ldrb{reg, immediate}/ldrbt",
8413 case 1: /* Load halfword and memory hints. */
8414 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8415 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8416 "pld/unalloc memhint", dsc);
8420 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8423 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8427 case 2: /* Load word */
8429 int insn2_bit_8_11 = bits (insn2, 8, 11);
8432 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8433 else if (op1 == 0x1) /* Encoding T3 */
8434 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8436 else /* op1 == 0x0 */
8438 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8439 /* LDR (immediate) */
8440 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8441 dsc, bit (insn2, 8), 1);
8442 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8443 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8446 /* LDR (register) */
8447 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8453 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8460 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8461 uint16_t insn2, struct regcache *regs,
8462 struct displaced_step_closure *dsc)
8465 unsigned short op = bit (insn2, 15);
8466 unsigned int op1 = bits (insn1, 11, 12);
8472 switch (bits (insn1, 9, 10))
8477 /* Load/store {dual, execlusive}, table branch. */
8478 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8479 && bits (insn2, 5, 7) == 0)
8480 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8483 /* PC is not allowed to use in load/store {dual, exclusive}
8485 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8486 "load/store dual/ex", dsc);
8488 else /* load/store multiple */
8490 switch (bits (insn1, 7, 8))
8492 case 0: case 3: /* SRS, RFE */
8493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8496 case 1: case 2: /* LDM/STM/PUSH/POP */
8497 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8504 /* Data-processing (shift register). */
8505 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8508 default: /* Coprocessor instructions. */
8509 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8514 case 2: /* op1 = 2 */
8515 if (op) /* Branch and misc control. */
8517 if (bit (insn2, 14) /* BLX/BL */
8518 || bit (insn2, 12) /* Unconditional branch */
8519 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8520 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8527 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8529 int op = bits (insn1, 4, 8);
8530 int rn = bits (insn1, 0, 3);
8531 if ((op == 0 || op == 0xa) && rn == 0xf)
8532 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8535 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8538 else /* Data processing (modified immeidate) */
8539 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8543 case 3: /* op1 = 3 */
8544 switch (bits (insn1, 9, 10))
8548 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8550 else /* NEON Load/Store and Store single data item */
8551 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8552 "neon elt/struct load/store",
8555 case 1: /* op1 = 3, bits (9, 10) == 1 */
8556 switch (bits (insn1, 7, 8))
8558 case 0: case 1: /* Data processing (register) */
8559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8562 case 2: /* Multiply and absolute difference */
8563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8564 "mul/mua/diff", dsc);
8566 case 3: /* Long multiply and divide */
8567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8572 default: /* Coprocessor instructions */
8573 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8582 internal_error (__FILE__, __LINE__,
8583 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8588 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8589 CORE_ADDR to, struct regcache *regs,
8590 struct displaced_step_closure *dsc)
8592 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8594 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8596 if (debug_displaced)
8597 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8598 "at %.8lx\n", insn1, (unsigned long) from);
8601 dsc->insn_size = thumb_insn_size (insn1);
8602 if (thumb_insn_size (insn1) == 4)
8605 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8606 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8609 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8613 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8614 CORE_ADDR to, struct regcache *regs,
8615 struct displaced_step_closure *dsc)
8618 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8621 /* Most displaced instructions use a 1-instruction scratch space, so set this
8622 here and override below if/when necessary. */
8624 dsc->insn_addr = from;
8625 dsc->scratch_base = to;
8626 dsc->cleanup = NULL;
8627 dsc->wrote_to_pc = 0;
8629 if (!displaced_in_arm_mode (regs))
8630 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8634 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8635 if (debug_displaced)
8636 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8637 "at %.8lx\n", (unsigned long) insn,
8638 (unsigned long) from);
8640 if ((insn & 0xf0000000) == 0xf0000000)
8641 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8642 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8644 case 0x0: case 0x1: case 0x2: case 0x3:
8645 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8648 case 0x4: case 0x5: case 0x6:
8649 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8653 err = arm_decode_media (gdbarch, insn, dsc);
8656 case 0x8: case 0x9: case 0xa: case 0xb:
8657 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8660 case 0xc: case 0xd: case 0xe: case 0xf:
8661 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8666 internal_error (__FILE__, __LINE__,
8667 _("arm_process_displaced_insn: Instruction decode error"));
8670 /* Actually set up the scratch space for a displaced instruction. */
8673 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8674 CORE_ADDR to, struct displaced_step_closure *dsc)
8676 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8677 unsigned int i, len, offset;
8678 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8679 int size = dsc->is_thumb? 2 : 4;
8680 const gdb_byte *bkp_insn;
8683 /* Poke modified instruction(s). */
8684 for (i = 0; i < dsc->numinsns; i++)
8686 if (debug_displaced)
8688 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8690 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8693 fprintf_unfiltered (gdb_stdlog, "%.4x",
8694 (unsigned short)dsc->modinsn[i]);
8696 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8697 (unsigned long) to + offset);
8700 write_memory_unsigned_integer (to + offset, size,
8701 byte_order_for_code,
8706 /* Choose the correct breakpoint instruction. */
8709 bkp_insn = tdep->thumb_breakpoint;
8710 len = tdep->thumb_breakpoint_size;
8714 bkp_insn = tdep->arm_breakpoint;
8715 len = tdep->arm_breakpoint_size;
8718 /* Put breakpoint afterwards. */
8719 write_memory (to + offset, bkp_insn, len);
8721 if (debug_displaced)
8722 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8723 paddress (gdbarch, from), paddress (gdbarch, to));
8726 /* Entry point for copying an instruction into scratch space for displaced
8729 struct displaced_step_closure *
8730 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8731 CORE_ADDR from, CORE_ADDR to,
8732 struct regcache *regs)
8734 struct displaced_step_closure *dsc
8735 = xmalloc (sizeof (struct displaced_step_closure));
8736 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8737 arm_displaced_init_closure (gdbarch, from, to, dsc);
8742 /* Entry point for cleaning things up after a displaced instruction has been
8746 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8747 struct displaced_step_closure *dsc,
8748 CORE_ADDR from, CORE_ADDR to,
8749 struct regcache *regs)
8752 dsc->cleanup (gdbarch, regs, dsc);
8754 if (!dsc->wrote_to_pc)
8755 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8756 dsc->insn_addr + dsc->insn_size);
8760 #include "bfd-in2.h"
8761 #include "libcoff.h"
8764 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8766 struct gdbarch *gdbarch = info->application_data;
8768 if (arm_pc_is_thumb (gdbarch, memaddr))
8770 static asymbol *asym;
8771 static combined_entry_type ce;
8772 static struct coff_symbol_struct csym;
8773 static struct bfd fake_bfd;
8774 static bfd_target fake_target;
8776 if (csym.native == NULL)
8778 /* Create a fake symbol vector containing a Thumb symbol.
8779 This is solely so that the code in print_insn_little_arm()
8780 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8781 the presence of a Thumb symbol and switch to decoding
8782 Thumb instructions. */
8784 fake_target.flavour = bfd_target_coff_flavour;
8785 fake_bfd.xvec = &fake_target;
8786 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8788 csym.symbol.the_bfd = &fake_bfd;
8789 csym.symbol.name = "fake";
8790 asym = (asymbol *) & csym;
8793 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8794 info->symbols = &asym;
8797 info->symbols = NULL;
8799 if (info->endian == BFD_ENDIAN_BIG)
8800 return print_insn_big_arm (memaddr, info);
8802 return print_insn_little_arm (memaddr, info);
8805 /* The following define instruction sequences that will cause ARM
8806 cpu's to take an undefined instruction trap. These are used to
8807 signal a breakpoint to GDB.
8809 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8810 modes. A different instruction is required for each mode. The ARM
8811 cpu's can also be big or little endian. Thus four different
8812 instructions are needed to support all cases.
8814 Note: ARMv4 defines several new instructions that will take the
8815 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8816 not in fact add the new instructions. The new undefined
8817 instructions in ARMv4 are all instructions that had no defined
8818 behaviour in earlier chips. There is no guarantee that they will
8819 raise an exception, but may be treated as NOP's. In practice, it
8820 may only safe to rely on instructions matching:
8822 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8823 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8824 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8826 Even this may only true if the condition predicate is true. The
8827 following use a condition predicate of ALWAYS so it is always TRUE.
8829 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8830 and NetBSD all use a software interrupt rather than an undefined
8831 instruction to force a trap. This can be handled by by the
8832 abi-specific code during establishment of the gdbarch vector. */
8834 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8835 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8836 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8837 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8839 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8840 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8841 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8842 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8844 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8845 the program counter value to determine whether a 16-bit or 32-bit
8846 breakpoint should be used. It returns a pointer to a string of
8847 bytes that encode a breakpoint instruction, stores the length of
8848 the string to *lenptr, and adjusts the program counter (if
8849 necessary) to point to the actual memory location where the
8850 breakpoint should be inserted. */
8852 static const unsigned char *
8853 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8855 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8856 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8858 if (arm_pc_is_thumb (gdbarch, *pcptr))
8860 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8862 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8863 check whether we are replacing a 32-bit instruction. */
8864 if (tdep->thumb2_breakpoint != NULL)
8867 if (target_read_memory (*pcptr, buf, 2) == 0)
8869 unsigned short inst1;
8870 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8871 if (thumb_insn_size (inst1) == 4)
8873 *lenptr = tdep->thumb2_breakpoint_size;
8874 return tdep->thumb2_breakpoint;
8879 *lenptr = tdep->thumb_breakpoint_size;
8880 return tdep->thumb_breakpoint;
8884 *lenptr = tdep->arm_breakpoint_size;
8885 return tdep->arm_breakpoint;
8890 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8893 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8895 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8896 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8897 that this is not confused with a 32-bit ARM breakpoint. */
8901 /* Extract from an array REGBUF containing the (raw) register state a
8902 function return value of type TYPE, and copy that, in virtual
8903 format, into VALBUF. */
8906 arm_extract_return_value (struct type *type, struct regcache *regs,
8909 struct gdbarch *gdbarch = get_regcache_arch (regs);
8910 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8912 if (TYPE_CODE_FLT == TYPE_CODE (type))
8914 switch (gdbarch_tdep (gdbarch)->fp_model)
8918 /* The value is in register F0 in internal format. We need to
8919 extract the raw value and then convert it to the desired
8921 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8923 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8924 convert_from_extended (floatformat_from_type (type), tmpbuf,
8925 valbuf, gdbarch_byte_order (gdbarch));
8929 case ARM_FLOAT_SOFT_FPA:
8930 case ARM_FLOAT_SOFT_VFP:
8931 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8932 not using the VFP ABI code. */
8934 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8935 if (TYPE_LENGTH (type) > 4)
8936 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8937 valbuf + INT_REGISTER_SIZE);
8941 internal_error (__FILE__, __LINE__,
8942 _("arm_extract_return_value: "
8943 "Floating point model not supported"));
8947 else if (TYPE_CODE (type) == TYPE_CODE_INT
8948 || TYPE_CODE (type) == TYPE_CODE_CHAR
8949 || TYPE_CODE (type) == TYPE_CODE_BOOL
8950 || TYPE_CODE (type) == TYPE_CODE_PTR
8951 || TYPE_CODE (type) == TYPE_CODE_REF
8952 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8954 /* If the type is a plain integer, then the access is
8955 straight-forward. Otherwise we have to play around a bit
8957 int len = TYPE_LENGTH (type);
8958 int regno = ARM_A1_REGNUM;
8963 /* By using store_unsigned_integer we avoid having to do
8964 anything special for small big-endian values. */
8965 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8966 store_unsigned_integer (valbuf,
8967 (len > INT_REGISTER_SIZE
8968 ? INT_REGISTER_SIZE : len),
8970 len -= INT_REGISTER_SIZE;
8971 valbuf += INT_REGISTER_SIZE;
8976 /* For a structure or union the behaviour is as if the value had
8977 been stored to word-aligned memory and then loaded into
8978 registers with 32-bit load instruction(s). */
8979 int len = TYPE_LENGTH (type);
8980 int regno = ARM_A1_REGNUM;
8981 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8985 regcache_cooked_read (regs, regno++, tmpbuf);
8986 memcpy (valbuf, tmpbuf,
8987 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8988 len -= INT_REGISTER_SIZE;
8989 valbuf += INT_REGISTER_SIZE;
8995 /* Will a function return an aggregate type in memory or in a
8996 register? Return 0 if an aggregate type can be returned in a
8997 register, 1 if it must be returned in memory. */
9000 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9003 enum type_code code;
9005 CHECK_TYPEDEF (type);
9007 /* In the ARM ABI, "integer" like aggregate types are returned in
9008 registers. For an aggregate type to be integer like, its size
9009 must be less than or equal to INT_REGISTER_SIZE and the
9010 offset of each addressable subfield must be zero. Note that bit
9011 fields are not addressable, and all addressable subfields of
9012 unions always start at offset zero.
9014 This function is based on the behaviour of GCC 2.95.1.
9015 See: gcc/arm.c: arm_return_in_memory() for details.
9017 Note: All versions of GCC before GCC 2.95.2 do not set up the
9018 parameters correctly for a function returning the following
9019 structure: struct { float f;}; This should be returned in memory,
9020 not a register. Richard Earnshaw sent me a patch, but I do not
9021 know of any way to detect if a function like the above has been
9022 compiled with the correct calling convention. */
9024 /* All aggregate types that won't fit in a register must be returned
9026 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9031 /* The AAPCS says all aggregates not larger than a word are returned
9033 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9036 /* The only aggregate types that can be returned in a register are
9037 structs and unions. Arrays must be returned in memory. */
9038 code = TYPE_CODE (type);
9039 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9044 /* Assume all other aggregate types can be returned in a register.
9045 Run a check for structures, unions and arrays. */
9048 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9051 /* Need to check if this struct/union is "integer" like. For
9052 this to be true, its size must be less than or equal to
9053 INT_REGISTER_SIZE and the offset of each addressable
9054 subfield must be zero. Note that bit fields are not
9055 addressable, and unions always start at offset zero. If any
9056 of the subfields is a floating point type, the struct/union
9057 cannot be an integer type. */
9059 /* For each field in the object, check:
9060 1) Is it FP? --> yes, nRc = 1;
9061 2) Is it addressable (bitpos != 0) and
9062 not packed (bitsize == 0)?
9066 for (i = 0; i < TYPE_NFIELDS (type); i++)
9068 enum type_code field_type_code;
9069 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9072 /* Is it a floating point type field? */
9073 if (field_type_code == TYPE_CODE_FLT)
9079 /* If bitpos != 0, then we have to care about it. */
9080 if (TYPE_FIELD_BITPOS (type, i) != 0)
9082 /* Bitfields are not addressable. If the field bitsize is
9083 zero, then the field is not packed. Hence it cannot be
9084 a bitfield or any other packed type. */
9085 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9097 /* Write into appropriate registers a function return value of type
9098 TYPE, given in virtual format. */
9101 arm_store_return_value (struct type *type, struct regcache *regs,
9102 const gdb_byte *valbuf)
9104 struct gdbarch *gdbarch = get_regcache_arch (regs);
9105 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9107 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9109 gdb_byte buf[MAX_REGISTER_SIZE];
9111 switch (gdbarch_tdep (gdbarch)->fp_model)
9115 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9116 gdbarch_byte_order (gdbarch));
9117 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9120 case ARM_FLOAT_SOFT_FPA:
9121 case ARM_FLOAT_SOFT_VFP:
9122 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9123 not using the VFP ABI code. */
9125 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9126 if (TYPE_LENGTH (type) > 4)
9127 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9128 valbuf + INT_REGISTER_SIZE);
9132 internal_error (__FILE__, __LINE__,
9133 _("arm_store_return_value: Floating "
9134 "point model not supported"));
9138 else if (TYPE_CODE (type) == TYPE_CODE_INT
9139 || TYPE_CODE (type) == TYPE_CODE_CHAR
9140 || TYPE_CODE (type) == TYPE_CODE_BOOL
9141 || TYPE_CODE (type) == TYPE_CODE_PTR
9142 || TYPE_CODE (type) == TYPE_CODE_REF
9143 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9145 if (TYPE_LENGTH (type) <= 4)
9147 /* Values of one word or less are zero/sign-extended and
9149 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9150 LONGEST val = unpack_long (type, valbuf);
9152 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9153 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9157 /* Integral values greater than one word are stored in consecutive
9158 registers starting with r0. This will always be a multiple of
9159 the regiser size. */
9160 int len = TYPE_LENGTH (type);
9161 int regno = ARM_A1_REGNUM;
9165 regcache_cooked_write (regs, regno++, valbuf);
9166 len -= INT_REGISTER_SIZE;
9167 valbuf += INT_REGISTER_SIZE;
9173 /* For a structure or union the behaviour is as if the value had
9174 been stored to word-aligned memory and then loaded into
9175 registers with 32-bit load instruction(s). */
9176 int len = TYPE_LENGTH (type);
9177 int regno = ARM_A1_REGNUM;
9178 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9182 memcpy (tmpbuf, valbuf,
9183 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9184 regcache_cooked_write (regs, regno++, tmpbuf);
9185 len -= INT_REGISTER_SIZE;
9186 valbuf += INT_REGISTER_SIZE;
9192 /* Handle function return values. */
9194 static enum return_value_convention
9195 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9196 struct type *valtype, struct regcache *regcache,
9197 gdb_byte *readbuf, const gdb_byte *writebuf)
9199 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9200 struct type *func_type = function ? value_type (function) : NULL;
9201 enum arm_vfp_cprc_base_type vfp_base_type;
9204 if (arm_vfp_abi_for_function (gdbarch, func_type)
9205 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9207 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9208 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9210 for (i = 0; i < vfp_base_count; i++)
9212 if (reg_char == 'q')
9215 arm_neon_quad_write (gdbarch, regcache, i,
9216 writebuf + i * unit_length);
9219 arm_neon_quad_read (gdbarch, regcache, i,
9220 readbuf + i * unit_length);
9227 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9228 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9231 regcache_cooked_write (regcache, regnum,
9232 writebuf + i * unit_length);
9234 regcache_cooked_read (regcache, regnum,
9235 readbuf + i * unit_length);
9238 return RETURN_VALUE_REGISTER_CONVENTION;
9241 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9242 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9243 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9245 if (tdep->struct_return == pcc_struct_return
9246 || arm_return_in_memory (gdbarch, valtype))
9247 return RETURN_VALUE_STRUCT_CONVENTION;
9250 /* AAPCS returns complex types longer than a register in memory. */
9251 if (tdep->arm_abi != ARM_ABI_APCS
9252 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9253 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9254 return RETURN_VALUE_STRUCT_CONVENTION;
9257 arm_store_return_value (valtype, regcache, writebuf);
9260 arm_extract_return_value (valtype, regcache, readbuf);
9262 return RETURN_VALUE_REGISTER_CONVENTION;
9267 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9269 struct gdbarch *gdbarch = get_frame_arch (frame);
9270 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9271 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9273 gdb_byte buf[INT_REGISTER_SIZE];
9275 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9277 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9281 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9285 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9286 return the target PC. Otherwise return 0. */
9289 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9293 CORE_ADDR start_addr;
9295 /* Find the starting address and name of the function containing the PC. */
9296 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9298 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9300 start_addr = arm_skip_bx_reg (frame, pc);
9301 if (start_addr != 0)
9307 /* If PC is in a Thumb call or return stub, return the address of the
9308 target PC, which is in a register. The thunk functions are called
9309 _call_via_xx, where x is the register name. The possible names
9310 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9311 functions, named __ARM_call_via_r[0-7]. */
9312 if (strncmp (name, "_call_via_", 10) == 0
9313 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9315 /* Use the name suffix to determine which register contains the
9317 static char *table[15] =
9318 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9319 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9322 int offset = strlen (name) - 2;
9324 for (regno = 0; regno <= 14; regno++)
9325 if (strcmp (&name[offset], table[regno]) == 0)
9326 return get_frame_register_unsigned (frame, regno);
9329 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9330 non-interworking calls to foo. We could decode the stubs
9331 to find the target but it's easier to use the symbol table. */
9332 namelen = strlen (name);
9333 if (name[0] == '_' && name[1] == '_'
9334 && ((namelen > 2 + strlen ("_from_thumb")
9335 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9336 strlen ("_from_thumb")) == 0)
9337 || (namelen > 2 + strlen ("_from_arm")
9338 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9339 strlen ("_from_arm")) == 0)))
9342 int target_len = namelen - 2;
9343 struct bound_minimal_symbol minsym;
9344 struct objfile *objfile;
9345 struct obj_section *sec;
9347 if (name[namelen - 1] == 'b')
9348 target_len -= strlen ("_from_thumb");
9350 target_len -= strlen ("_from_arm");
9352 target_name = alloca (target_len + 1);
9353 memcpy (target_name, name + 2, target_len);
9354 target_name[target_len] = '\0';
9356 sec = find_pc_section (pc);
9357 objfile = (sec == NULL) ? NULL : sec->objfile;
9358 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9359 if (minsym.minsym != NULL)
9360 return BMSYMBOL_VALUE_ADDRESS (minsym);
9365 return 0; /* not a stub */
9369 set_arm_command (char *args, int from_tty)
9371 printf_unfiltered (_("\
9372 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9373 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9377 show_arm_command (char *args, int from_tty)
9379 cmd_show_list (showarmcmdlist, from_tty, "");
9383 arm_update_current_architecture (void)
9385 struct gdbarch_info info;
9387 /* If the current architecture is not ARM, we have nothing to do. */
9388 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9391 /* Update the architecture. */
9392 gdbarch_info_init (&info);
9394 if (!gdbarch_update_p (info))
9395 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9399 set_fp_model_sfunc (char *args, int from_tty,
9400 struct cmd_list_element *c)
9402 enum arm_float_model fp_model;
9404 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9405 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9407 arm_fp_model = fp_model;
9411 if (fp_model == ARM_FLOAT_LAST)
9412 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9415 arm_update_current_architecture ();
9419 show_fp_model (struct ui_file *file, int from_tty,
9420 struct cmd_list_element *c, const char *value)
9422 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9424 if (arm_fp_model == ARM_FLOAT_AUTO
9425 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9426 fprintf_filtered (file, _("\
9427 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9428 fp_model_strings[tdep->fp_model]);
9430 fprintf_filtered (file, _("\
9431 The current ARM floating point model is \"%s\".\n"),
9432 fp_model_strings[arm_fp_model]);
9436 arm_set_abi (char *args, int from_tty,
9437 struct cmd_list_element *c)
9439 enum arm_abi_kind arm_abi;
9441 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9442 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9444 arm_abi_global = arm_abi;
9448 if (arm_abi == ARM_ABI_LAST)
9449 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9452 arm_update_current_architecture ();
9456 arm_show_abi (struct ui_file *file, int from_tty,
9457 struct cmd_list_element *c, const char *value)
9459 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9461 if (arm_abi_global == ARM_ABI_AUTO
9462 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9463 fprintf_filtered (file, _("\
9464 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9465 arm_abi_strings[tdep->arm_abi]);
9467 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9472 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9473 struct cmd_list_element *c, const char *value)
9475 fprintf_filtered (file,
9476 _("The current execution mode assumed "
9477 "(when symbols are unavailable) is \"%s\".\n"),
9478 arm_fallback_mode_string);
9482 arm_show_force_mode (struct ui_file *file, int from_tty,
9483 struct cmd_list_element *c, const char *value)
9485 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9487 fprintf_filtered (file,
9488 _("The current execution mode assumed "
9489 "(even when symbols are available) is \"%s\".\n"),
9490 arm_force_mode_string);
9493 /* If the user changes the register disassembly style used for info
9494 register and other commands, we have to also switch the style used
9495 in opcodes for disassembly output. This function is run in the "set
9496 arm disassembly" command, and does that. */
9499 set_disassembly_style_sfunc (char *args, int from_tty,
9500 struct cmd_list_element *c)
9502 set_disassembly_style ();
9505 /* Return the ARM register name corresponding to register I. */
9507 arm_register_name (struct gdbarch *gdbarch, int i)
9509 const int num_regs = gdbarch_num_regs (gdbarch);
9511 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9512 && i >= num_regs && i < num_regs + 32)
9514 static const char *const vfp_pseudo_names[] = {
9515 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9516 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9517 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9518 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9521 return vfp_pseudo_names[i - num_regs];
9524 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9525 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9527 static const char *const neon_pseudo_names[] = {
9528 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9529 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9532 return neon_pseudo_names[i - num_regs - 32];
9535 if (i >= ARRAY_SIZE (arm_register_names))
9536 /* These registers are only supported on targets which supply
9537 an XML description. */
9540 return arm_register_names[i];
9544 set_disassembly_style (void)
9548 /* Find the style that the user wants. */
9549 for (current = 0; current < num_disassembly_options; current++)
9550 if (disassembly_style == valid_disassembly_styles[current])
9552 gdb_assert (current < num_disassembly_options);
9554 /* Synchronize the disassembler. */
9555 set_arm_regname_option (current);
9558 /* Test whether the coff symbol specific value corresponds to a Thumb
9562 coff_sym_is_thumb (int val)
9564 return (val == C_THUMBEXT
9565 || val == C_THUMBSTAT
9566 || val == C_THUMBEXTFUNC
9567 || val == C_THUMBSTATFUNC
9568 || val == C_THUMBLABEL);
9571 /* arm_coff_make_msymbol_special()
9572 arm_elf_make_msymbol_special()
9574 These functions test whether the COFF or ELF symbol corresponds to
9575 an address in thumb code, and set a "special" bit in a minimal
9576 symbol to indicate that it does. */
9579 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9581 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9582 == ST_BRANCH_TO_THUMB)
9583 MSYMBOL_SET_SPECIAL (msym);
9587 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9589 if (coff_sym_is_thumb (val))
9590 MSYMBOL_SET_SPECIAL (msym);
9594 arm_objfile_data_free (struct objfile *objfile, void *arg)
9596 struct arm_per_objfile *data = arg;
9599 for (i = 0; i < objfile->obfd->section_count; i++)
9600 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9604 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9607 const char *name = bfd_asymbol_name (sym);
9608 struct arm_per_objfile *data;
9609 VEC(arm_mapping_symbol_s) **map_p;
9610 struct arm_mapping_symbol new_map_sym;
9612 gdb_assert (name[0] == '$');
9613 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9616 data = objfile_data (objfile, arm_objfile_data_key);
9619 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9620 struct arm_per_objfile);
9621 set_objfile_data (objfile, arm_objfile_data_key, data);
9622 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9623 objfile->obfd->section_count,
9624 VEC(arm_mapping_symbol_s) *);
9626 map_p = &data->section_maps[bfd_get_section (sym)->index];
9628 new_map_sym.value = sym->value;
9629 new_map_sym.type = name[1];
9631 /* Assume that most mapping symbols appear in order of increasing
9632 value. If they were randomly distributed, it would be faster to
9633 always push here and then sort at first use. */
9634 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9636 struct arm_mapping_symbol *prev_map_sym;
9638 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9639 if (prev_map_sym->value >= sym->value)
9642 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9643 arm_compare_mapping_symbols);
9644 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9649 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9653 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9655 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9656 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9658 /* If necessary, set the T bit. */
9661 ULONGEST val, t_bit;
9662 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9663 t_bit = arm_psr_thumb_bit (gdbarch);
9664 if (arm_pc_is_thumb (gdbarch, pc))
9665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9668 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9673 /* Read the contents of a NEON quad register, by reading from two
9674 double registers. This is used to implement the quad pseudo
9675 registers, and for argument passing in case the quad registers are
9676 missing; vectors are passed in quad registers when using the VFP
9677 ABI, even if a NEON unit is not present. REGNUM is the index of
9678 the quad register, in [0, 15]. */
9680 static enum register_status
9681 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9682 int regnum, gdb_byte *buf)
9685 gdb_byte reg_buf[8];
9686 int offset, double_regnum;
9687 enum register_status status;
9689 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9690 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9693 /* d0 is always the least significant half of q0. */
9694 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9699 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9700 if (status != REG_VALID)
9702 memcpy (buf + offset, reg_buf, 8);
9704 offset = 8 - offset;
9705 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9706 if (status != REG_VALID)
9708 memcpy (buf + offset, reg_buf, 8);
9713 static enum register_status
9714 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9715 int regnum, gdb_byte *buf)
9717 const int num_regs = gdbarch_num_regs (gdbarch);
9719 gdb_byte reg_buf[8];
9720 int offset, double_regnum;
9722 gdb_assert (regnum >= num_regs);
9725 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9726 /* Quad-precision register. */
9727 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9730 enum register_status status;
9732 /* Single-precision register. */
9733 gdb_assert (regnum < 32);
9735 /* s0 is always the least significant half of d0. */
9736 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9737 offset = (regnum & 1) ? 0 : 4;
9739 offset = (regnum & 1) ? 4 : 0;
9741 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9742 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9745 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9746 if (status == REG_VALID)
9747 memcpy (buf, reg_buf + offset, 4);
9752 /* Store the contents of BUF to a NEON quad register, by writing to
9753 two double registers. This is used to implement the quad pseudo
9754 registers, and for argument passing in case the quad registers are
9755 missing; vectors are passed in quad registers when using the VFP
9756 ABI, even if a NEON unit is not present. REGNUM is the index
9757 of the quad register, in [0, 15]. */
9760 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9761 int regnum, const gdb_byte *buf)
9764 int offset, double_regnum;
9766 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9767 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9770 /* d0 is always the least significant half of q0. */
9771 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9776 regcache_raw_write (regcache, double_regnum, buf + offset);
9777 offset = 8 - offset;
9778 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9782 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9783 int regnum, const gdb_byte *buf)
9785 const int num_regs = gdbarch_num_regs (gdbarch);
9787 gdb_byte reg_buf[8];
9788 int offset, double_regnum;
9790 gdb_assert (regnum >= num_regs);
9793 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9794 /* Quad-precision register. */
9795 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9798 /* Single-precision register. */
9799 gdb_assert (regnum < 32);
9801 /* s0 is always the least significant half of d0. */
9802 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9803 offset = (regnum & 1) ? 0 : 4;
9805 offset = (regnum & 1) ? 4 : 0;
9807 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9808 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9811 regcache_raw_read (regcache, double_regnum, reg_buf);
9812 memcpy (reg_buf + offset, buf, 4);
9813 regcache_raw_write (regcache, double_regnum, reg_buf);
9817 static struct value *
9818 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9820 const int *reg_p = baton;
9821 return value_of_register (*reg_p, frame);
9824 static enum gdb_osabi
9825 arm_elf_osabi_sniffer (bfd *abfd)
9827 unsigned int elfosabi;
9828 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9830 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9832 if (elfosabi == ELFOSABI_ARM)
9833 /* GNU tools use this value. Check note sections in this case,
9835 bfd_map_over_sections (abfd,
9836 generic_elf_osabi_sniff_abi_tag_sections,
9839 /* Anything else will be handled by the generic ELF sniffer. */
9844 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9845 struct reggroup *group)
9847 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9848 this, FPS register belongs to save_regroup, restore_reggroup, and
9849 all_reggroup, of course. */
9850 if (regnum == ARM_FPS_REGNUM)
9851 return (group == float_reggroup
9852 || group == save_reggroup
9853 || group == restore_reggroup
9854 || group == all_reggroup);
9856 return default_register_reggroup_p (gdbarch, regnum, group);
9860 /* For backward-compatibility we allow two 'g' packet lengths with
9861 the remote protocol depending on whether FPA registers are
9862 supplied. M-profile targets do not have FPA registers, but some
9863 stubs already exist in the wild which use a 'g' packet which
9864 supplies them albeit with dummy values. The packet format which
9865 includes FPA registers should be considered deprecated for
9866 M-profile targets. */
9869 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9871 if (gdbarch_tdep (gdbarch)->is_m)
9873 /* If we know from the executable this is an M-profile target,
9874 cater for remote targets whose register set layout is the
9875 same as the FPA layout. */
9876 register_remote_g_packet_guess (gdbarch,
9877 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9878 (16 * INT_REGISTER_SIZE)
9879 + (8 * FP_REGISTER_SIZE)
9880 + (2 * INT_REGISTER_SIZE),
9881 tdesc_arm_with_m_fpa_layout);
9883 /* The regular M-profile layout. */
9884 register_remote_g_packet_guess (gdbarch,
9885 /* r0-r12,sp,lr,pc; xpsr */
9886 (16 * INT_REGISTER_SIZE)
9887 + INT_REGISTER_SIZE,
9890 /* M-profile plus M4F VFP. */
9891 register_remote_g_packet_guess (gdbarch,
9892 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9893 (16 * INT_REGISTER_SIZE)
9894 + (16 * VFP_REGISTER_SIZE)
9895 + (2 * INT_REGISTER_SIZE),
9896 tdesc_arm_with_m_vfp_d16);
9899 /* Otherwise we don't have a useful guess. */
9903 /* Initialize the current architecture based on INFO. If possible,
9904 re-use an architecture from ARCHES, which is a list of
9905 architectures already created during this debugging session.
9907 Called e.g. at program startup, when reading a core file, and when
9908 reading a binary file. */
9910 static struct gdbarch *
9911 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9913 struct gdbarch_tdep *tdep;
9914 struct gdbarch *gdbarch;
9915 struct gdbarch_list *best_arch;
9916 enum arm_abi_kind arm_abi = arm_abi_global;
9917 enum arm_float_model fp_model = arm_fp_model;
9918 struct tdesc_arch_data *tdesc_data = NULL;
9920 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9922 int have_fpa_registers = 1;
9923 const struct target_desc *tdesc = info.target_desc;
9925 /* If we have an object to base this architecture on, try to determine
9928 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9930 int ei_osabi, e_flags;
9932 switch (bfd_get_flavour (info.abfd))
9934 case bfd_target_aout_flavour:
9935 /* Assume it's an old APCS-style ABI. */
9936 arm_abi = ARM_ABI_APCS;
9939 case bfd_target_coff_flavour:
9940 /* Assume it's an old APCS-style ABI. */
9942 arm_abi = ARM_ABI_APCS;
9945 case bfd_target_elf_flavour:
9946 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9947 e_flags = elf_elfheader (info.abfd)->e_flags;
9949 if (ei_osabi == ELFOSABI_ARM)
9951 /* GNU tools used to use this value, but do not for EABI
9952 objects. There's nowhere to tag an EABI version
9953 anyway, so assume APCS. */
9954 arm_abi = ARM_ABI_APCS;
9956 else if (ei_osabi == ELFOSABI_NONE)
9958 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9959 int attr_arch, attr_profile;
9963 case EF_ARM_EABI_UNKNOWN:
9964 /* Assume GNU tools. */
9965 arm_abi = ARM_ABI_APCS;
9968 case EF_ARM_EABI_VER4:
9969 case EF_ARM_EABI_VER5:
9970 arm_abi = ARM_ABI_AAPCS;
9971 /* EABI binaries default to VFP float ordering.
9972 They may also contain build attributes that can
9973 be used to identify if the VFP argument-passing
9975 if (fp_model == ARM_FLOAT_AUTO)
9978 switch (bfd_elf_get_obj_attr_int (info.abfd,
9983 /* "The user intended FP parameter/result
9984 passing to conform to AAPCS, base
9986 fp_model = ARM_FLOAT_SOFT_VFP;
9989 /* "The user intended FP parameter/result
9990 passing to conform to AAPCS, VFP
9992 fp_model = ARM_FLOAT_VFP;
9995 /* "The user intended FP parameter/result
9996 passing to conform to tool chain-specific
9997 conventions" - we don't know any such
9998 conventions, so leave it as "auto". */
10001 /* Attribute value not mentioned in the
10002 October 2008 ABI, so leave it as
10007 fp_model = ARM_FLOAT_SOFT_VFP;
10013 /* Leave it as "auto". */
10014 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10019 /* Detect M-profile programs. This only works if the
10020 executable file includes build attributes; GCC does
10021 copy them to the executable, but e.g. RealView does
10023 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10025 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10027 Tag_CPU_arch_profile);
10028 /* GCC specifies the profile for v6-M; RealView only
10029 specifies the profile for architectures starting with
10030 V7 (as opposed to architectures with a tag
10031 numerically greater than TAG_CPU_ARCH_V7). */
10032 if (!tdesc_has_registers (tdesc)
10033 && (attr_arch == TAG_CPU_ARCH_V6_M
10034 || attr_arch == TAG_CPU_ARCH_V6S_M
10035 || attr_profile == 'M'))
10040 if (fp_model == ARM_FLOAT_AUTO)
10042 int e_flags = elf_elfheader (info.abfd)->e_flags;
10044 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10047 /* Leave it as "auto". Strictly speaking this case
10048 means FPA, but almost nobody uses that now, and
10049 many toolchains fail to set the appropriate bits
10050 for the floating-point model they use. */
10052 case EF_ARM_SOFT_FLOAT:
10053 fp_model = ARM_FLOAT_SOFT_FPA;
10055 case EF_ARM_VFP_FLOAT:
10056 fp_model = ARM_FLOAT_VFP;
10058 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10059 fp_model = ARM_FLOAT_SOFT_VFP;
10064 if (e_flags & EF_ARM_BE8)
10065 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10070 /* Leave it as "auto". */
10075 /* Check any target description for validity. */
10076 if (tdesc_has_registers (tdesc))
10078 /* For most registers we require GDB's default names; but also allow
10079 the numeric names for sp / lr / pc, as a convenience. */
10080 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10081 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10082 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10084 const struct tdesc_feature *feature;
10087 feature = tdesc_find_feature (tdesc,
10088 "org.gnu.gdb.arm.core");
10089 if (feature == NULL)
10091 feature = tdesc_find_feature (tdesc,
10092 "org.gnu.gdb.arm.m-profile");
10093 if (feature == NULL)
10099 tdesc_data = tdesc_data_alloc ();
10102 for (i = 0; i < ARM_SP_REGNUM; i++)
10103 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10104 arm_register_names[i]);
10105 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10108 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10111 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10115 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10116 ARM_PS_REGNUM, "xpsr");
10118 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10119 ARM_PS_REGNUM, "cpsr");
10123 tdesc_data_cleanup (tdesc_data);
10127 feature = tdesc_find_feature (tdesc,
10128 "org.gnu.gdb.arm.fpa");
10129 if (feature != NULL)
10132 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10133 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10134 arm_register_names[i]);
10137 tdesc_data_cleanup (tdesc_data);
10142 have_fpa_registers = 0;
10144 feature = tdesc_find_feature (tdesc,
10145 "org.gnu.gdb.xscale.iwmmxt");
10146 if (feature != NULL)
10148 static const char *const iwmmxt_names[] = {
10149 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10150 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10151 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10152 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10156 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10158 &= tdesc_numbered_register (feature, tdesc_data, i,
10159 iwmmxt_names[i - ARM_WR0_REGNUM]);
10161 /* Check for the control registers, but do not fail if they
10163 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10164 tdesc_numbered_register (feature, tdesc_data, i,
10165 iwmmxt_names[i - ARM_WR0_REGNUM]);
10167 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10169 &= tdesc_numbered_register (feature, tdesc_data, i,
10170 iwmmxt_names[i - ARM_WR0_REGNUM]);
10174 tdesc_data_cleanup (tdesc_data);
10179 /* If we have a VFP unit, check whether the single precision registers
10180 are present. If not, then we will synthesize them as pseudo
10182 feature = tdesc_find_feature (tdesc,
10183 "org.gnu.gdb.arm.vfp");
10184 if (feature != NULL)
10186 static const char *const vfp_double_names[] = {
10187 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10188 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10189 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10190 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10193 /* Require the double precision registers. There must be either
10196 for (i = 0; i < 32; i++)
10198 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10200 vfp_double_names[i]);
10204 if (!valid_p && i == 16)
10207 /* Also require FPSCR. */
10208 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10209 ARM_FPSCR_REGNUM, "fpscr");
10212 tdesc_data_cleanup (tdesc_data);
10216 if (tdesc_unnumbered_register (feature, "s0") == 0)
10217 have_vfp_pseudos = 1;
10219 have_vfp_registers = 1;
10221 /* If we have VFP, also check for NEON. The architecture allows
10222 NEON without VFP (integer vector operations only), but GDB
10223 does not support that. */
10224 feature = tdesc_find_feature (tdesc,
10225 "org.gnu.gdb.arm.neon");
10226 if (feature != NULL)
10228 /* NEON requires 32 double-precision registers. */
10231 tdesc_data_cleanup (tdesc_data);
10235 /* If there are quad registers defined by the stub, use
10236 their type; otherwise (normally) provide them with
10237 the default type. */
10238 if (tdesc_unnumbered_register (feature, "q0") == 0)
10239 have_neon_pseudos = 1;
10246 /* If there is already a candidate, use it. */
10247 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10249 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10251 if (arm_abi != ARM_ABI_AUTO
10252 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10255 if (fp_model != ARM_FLOAT_AUTO
10256 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10259 /* There are various other properties in tdep that we do not
10260 need to check here: those derived from a target description,
10261 since gdbarches with a different target description are
10262 automatically disqualified. */
10264 /* Do check is_m, though, since it might come from the binary. */
10265 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10268 /* Found a match. */
10272 if (best_arch != NULL)
10274 if (tdesc_data != NULL)
10275 tdesc_data_cleanup (tdesc_data);
10276 return best_arch->gdbarch;
10279 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10280 gdbarch = gdbarch_alloc (&info, tdep);
10282 /* Record additional information about the architecture we are defining.
10283 These are gdbarch discriminators, like the OSABI. */
10284 tdep->arm_abi = arm_abi;
10285 tdep->fp_model = fp_model;
10287 tdep->have_fpa_registers = have_fpa_registers;
10288 tdep->have_vfp_registers = have_vfp_registers;
10289 tdep->have_vfp_pseudos = have_vfp_pseudos;
10290 tdep->have_neon_pseudos = have_neon_pseudos;
10291 tdep->have_neon = have_neon;
10293 arm_register_g_packet_guesses (gdbarch);
10296 switch (info.byte_order_for_code)
10298 case BFD_ENDIAN_BIG:
10299 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10300 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10301 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10302 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10306 case BFD_ENDIAN_LITTLE:
10307 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10308 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10309 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10310 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10315 internal_error (__FILE__, __LINE__,
10316 _("arm_gdbarch_init: bad byte order for float format"));
10319 /* On ARM targets char defaults to unsigned. */
10320 set_gdbarch_char_signed (gdbarch, 0);
10322 /* Note: for displaced stepping, this includes the breakpoint, and one word
10323 of additional scratch space. This setting isn't used for anything beside
10324 displaced stepping at present. */
10325 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10327 /* This should be low enough for everything. */
10328 tdep->lowest_pc = 0x20;
10329 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10331 /* The default, for both APCS and AAPCS, is to return small
10332 structures in registers. */
10333 tdep->struct_return = reg_struct_return;
10335 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10336 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10338 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10340 /* Frame handling. */
10341 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10342 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10343 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10345 frame_base_set_default (gdbarch, &arm_normal_base);
10347 /* Address manipulation. */
10348 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10350 /* Advance PC across function entry code. */
10351 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10353 /* Detect whether PC is in function epilogue. */
10354 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10356 /* Skip trampolines. */
10357 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10359 /* The stack grows downward. */
10360 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10362 /* Breakpoint manipulation. */
10363 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10364 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10365 arm_remote_breakpoint_from_pc);
10367 /* Information about registers, etc. */
10368 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10369 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10370 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10371 set_gdbarch_register_type (gdbarch, arm_register_type);
10372 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10374 /* This "info float" is FPA-specific. Use the generic version if we
10375 do not have FPA. */
10376 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10377 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10379 /* Internal <-> external register number maps. */
10380 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10381 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10383 set_gdbarch_register_name (gdbarch, arm_register_name);
10385 /* Returning results. */
10386 set_gdbarch_return_value (gdbarch, arm_return_value);
10389 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10391 /* Minsymbol frobbing. */
10392 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10393 set_gdbarch_coff_make_msymbol_special (gdbarch,
10394 arm_coff_make_msymbol_special);
10395 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10397 /* Thumb-2 IT block support. */
10398 set_gdbarch_adjust_breakpoint_address (gdbarch,
10399 arm_adjust_breakpoint_address);
10401 /* Virtual tables. */
10402 set_gdbarch_vbit_in_delta (gdbarch, 1);
10404 /* Hook in the ABI-specific overrides, if they have been registered. */
10405 gdbarch_init_osabi (info, gdbarch);
10407 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10409 /* Add some default predicates. */
10411 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10412 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10413 dwarf2_append_unwinders (gdbarch);
10414 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10415 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10417 /* Now we have tuned the configuration, set a few final things,
10418 based on what the OS ABI has told us. */
10420 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10421 binaries are always marked. */
10422 if (tdep->arm_abi == ARM_ABI_AUTO)
10423 tdep->arm_abi = ARM_ABI_APCS;
10425 /* Watchpoints are not steppable. */
10426 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10428 /* We used to default to FPA for generic ARM, but almost nobody
10429 uses that now, and we now provide a way for the user to force
10430 the model. So default to the most useful variant. */
10431 if (tdep->fp_model == ARM_FLOAT_AUTO)
10432 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10434 if (tdep->jb_pc >= 0)
10435 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10437 /* Floating point sizes and format. */
10438 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10439 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10441 set_gdbarch_double_format
10442 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10443 set_gdbarch_long_double_format
10444 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10448 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10449 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10452 if (have_vfp_pseudos)
10454 /* NOTE: These are the only pseudo registers used by
10455 the ARM target at the moment. If more are added, a
10456 little more care in numbering will be needed. */
10458 int num_pseudos = 32;
10459 if (have_neon_pseudos)
10461 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10462 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10463 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10468 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10470 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10472 /* Override tdesc_register_type to adjust the types of VFP
10473 registers for NEON. */
10474 set_gdbarch_register_type (gdbarch, arm_register_type);
10477 /* Add standard register aliases. We add aliases even for those
10478 nanes which are used by the current architecture - it's simpler,
10479 and does no harm, since nothing ever lists user registers. */
10480 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10481 user_reg_add (gdbarch, arm_register_aliases[i].name,
10482 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10488 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10490 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10495 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10496 (unsigned long) tdep->lowest_pc);
10499 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10502 _initialize_arm_tdep (void)
10504 struct ui_file *stb;
10506 struct cmd_list_element *new_set, *new_show;
10507 const char *setname;
10508 const char *setdesc;
10509 const char *const *regnames;
10511 static char *helptext;
10512 char regdesc[1024], *rdptr = regdesc;
10513 size_t rest = sizeof (regdesc);
10515 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10517 arm_objfile_data_key
10518 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10520 /* Add ourselves to objfile event chain. */
10521 observer_attach_new_objfile (arm_exidx_new_objfile);
10523 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10525 /* Register an ELF OS ABI sniffer for ARM binaries. */
10526 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10527 bfd_target_elf_flavour,
10528 arm_elf_osabi_sniffer);
10530 /* Initialize the standard target descriptions. */
10531 initialize_tdesc_arm_with_m ();
10532 initialize_tdesc_arm_with_m_fpa_layout ();
10533 initialize_tdesc_arm_with_m_vfp_d16 ();
10534 initialize_tdesc_arm_with_iwmmxt ();
10535 initialize_tdesc_arm_with_vfpv2 ();
10536 initialize_tdesc_arm_with_vfpv3 ();
10537 initialize_tdesc_arm_with_neon ();
10539 /* Get the number of possible sets of register names defined in opcodes. */
10540 num_disassembly_options = get_arm_regname_num_options ();
10542 /* Add root prefix command for all "set arm"/"show arm" commands. */
10543 add_prefix_cmd ("arm", no_class, set_arm_command,
10544 _("Various ARM-specific commands."),
10545 &setarmcmdlist, "set arm ", 0, &setlist);
10547 add_prefix_cmd ("arm", no_class, show_arm_command,
10548 _("Various ARM-specific commands."),
10549 &showarmcmdlist, "show arm ", 0, &showlist);
10551 /* Sync the opcode insn printer with our register viewer. */
10552 parse_arm_disassembler_option ("reg-names-std");
10554 /* Initialize the array that will be passed to
10555 add_setshow_enum_cmd(). */
10556 valid_disassembly_styles
10557 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10558 for (i = 0; i < num_disassembly_options; i++)
10560 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10561 valid_disassembly_styles[i] = setname;
10562 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10565 /* When we find the default names, tell the disassembler to use
10567 if (!strcmp (setname, "std"))
10569 disassembly_style = setname;
10570 set_arm_regname_option (i);
10573 /* Mark the end of valid options. */
10574 valid_disassembly_styles[num_disassembly_options] = NULL;
10576 /* Create the help text. */
10577 stb = mem_fileopen ();
10578 fprintf_unfiltered (stb, "%s%s%s",
10579 _("The valid values are:\n"),
10581 _("The default is \"std\"."));
10582 helptext = ui_file_xstrdup (stb, NULL);
10583 ui_file_delete (stb);
10585 add_setshow_enum_cmd("disassembler", no_class,
10586 valid_disassembly_styles, &disassembly_style,
10587 _("Set the disassembly style."),
10588 _("Show the disassembly style."),
10590 set_disassembly_style_sfunc,
10591 NULL, /* FIXME: i18n: The disassembly style is
10593 &setarmcmdlist, &showarmcmdlist);
10595 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10596 _("Set usage of ARM 32-bit mode."),
10597 _("Show usage of ARM 32-bit mode."),
10598 _("When off, a 26-bit PC will be used."),
10600 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10602 &setarmcmdlist, &showarmcmdlist);
10604 /* Add a command to allow the user to force the FPU model. */
10605 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10606 _("Set the floating point type."),
10607 _("Show the floating point type."),
10608 _("auto - Determine the FP typefrom the OS-ABI.\n\
10609 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10610 fpa - FPA co-processor (GCC compiled).\n\
10611 softvfp - Software FP with pure-endian doubles.\n\
10612 vfp - VFP co-processor."),
10613 set_fp_model_sfunc, show_fp_model,
10614 &setarmcmdlist, &showarmcmdlist);
10616 /* Add a command to allow the user to force the ABI. */
10617 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10619 _("Show the ABI."),
10620 NULL, arm_set_abi, arm_show_abi,
10621 &setarmcmdlist, &showarmcmdlist);
10623 /* Add two commands to allow the user to force the assumed
10625 add_setshow_enum_cmd ("fallback-mode", class_support,
10626 arm_mode_strings, &arm_fallback_mode_string,
10627 _("Set the mode assumed when symbols are unavailable."),
10628 _("Show the mode assumed when symbols are unavailable."),
10629 NULL, NULL, arm_show_fallback_mode,
10630 &setarmcmdlist, &showarmcmdlist);
10631 add_setshow_enum_cmd ("force-mode", class_support,
10632 arm_mode_strings, &arm_force_mode_string,
10633 _("Set the mode assumed even when symbols are available."),
10634 _("Show the mode assumed even when symbols are available."),
10635 NULL, NULL, arm_show_force_mode,
10636 &setarmcmdlist, &showarmcmdlist);
10638 /* Debugging flag. */
10639 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10640 _("Set ARM debugging."),
10641 _("Show ARM debugging."),
10642 _("When on, arm-specific debugging is enabled."),
10644 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10645 &setdebuglist, &showdebuglist);
10648 /* ARM-reversible process record data structures. */
10650 #define ARM_INSN_SIZE_BYTES 4
10651 #define THUMB_INSN_SIZE_BYTES 2
10652 #define THUMB2_INSN_SIZE_BYTES 4
10655 #define INSN_S_L_BIT_NUM 20
10657 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10660 unsigned int reg_len = LENGTH; \
10663 REGS = XNEWVEC (uint32_t, reg_len); \
10664 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10669 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10672 unsigned int mem_len = LENGTH; \
10675 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10676 memcpy(&MEMS->len, &RECORD_BUF[0], \
10677 sizeof(struct arm_mem_r) * LENGTH); \
10682 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10683 #define INSN_RECORDED(ARM_RECORD) \
10684 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10686 /* ARM memory record structure. */
10689 uint32_t len; /* Record length. */
10690 uint32_t addr; /* Memory address. */
10693 /* ARM instruction record contains opcode of current insn
10694 and execution state (before entry to decode_insn()),
10695 contains list of to-be-modified registers and
10696 memory blocks (on return from decode_insn()). */
10698 typedef struct insn_decode_record_t
10700 struct gdbarch *gdbarch;
10701 struct regcache *regcache;
10702 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10703 uint32_t arm_insn; /* Should accommodate thumb. */
10704 uint32_t cond; /* Condition code. */
10705 uint32_t opcode; /* Insn opcode. */
10706 uint32_t decode; /* Insn decode bits. */
10707 uint32_t mem_rec_count; /* No of mem records. */
10708 uint32_t reg_rec_count; /* No of reg records. */
10709 uint32_t *arm_regs; /* Registers to be saved for this record. */
10710 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10711 } insn_decode_record;
10714 /* Checks ARM SBZ and SBO mandatory fields. */
10717 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10719 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10738 enum arm_record_result
10740 ARM_RECORD_SUCCESS = 0,
10741 ARM_RECORD_FAILURE = 1
10748 } arm_record_strx_t;
10759 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10760 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10763 struct regcache *reg_cache = arm_insn_r->regcache;
10764 ULONGEST u_regval[2]= {0};
10766 uint32_t reg_src1 = 0, reg_src2 = 0;
10767 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10768 uint32_t opcode1 = 0;
10770 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10771 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10772 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10775 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10777 /* 1) Handle misc store, immediate offset. */
10778 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10779 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10780 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10781 regcache_raw_read_unsigned (reg_cache, reg_src1,
10783 if (ARM_PC_REGNUM == reg_src1)
10785 /* If R15 was used as Rn, hence current PC+8. */
10786 u_regval[0] = u_regval[0] + 8;
10788 offset_8 = (immed_high << 4) | immed_low;
10789 /* Calculate target store address. */
10790 if (14 == arm_insn_r->opcode)
10792 tgt_mem_addr = u_regval[0] + offset_8;
10796 tgt_mem_addr = u_regval[0] - offset_8;
10798 if (ARM_RECORD_STRH == str_type)
10800 record_buf_mem[0] = 2;
10801 record_buf_mem[1] = tgt_mem_addr;
10802 arm_insn_r->mem_rec_count = 1;
10804 else if (ARM_RECORD_STRD == str_type)
10806 record_buf_mem[0] = 4;
10807 record_buf_mem[1] = tgt_mem_addr;
10808 record_buf_mem[2] = 4;
10809 record_buf_mem[3] = tgt_mem_addr + 4;
10810 arm_insn_r->mem_rec_count = 2;
10813 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10815 /* 2) Store, register offset. */
10817 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10819 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10820 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10821 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10822 if (15 == reg_src2)
10824 /* If R15 was used as Rn, hence current PC+8. */
10825 u_regval[0] = u_regval[0] + 8;
10827 /* Calculate target store address, Rn +/- Rm, register offset. */
10828 if (12 == arm_insn_r->opcode)
10830 tgt_mem_addr = u_regval[0] + u_regval[1];
10834 tgt_mem_addr = u_regval[1] - u_regval[0];
10836 if (ARM_RECORD_STRH == str_type)
10838 record_buf_mem[0] = 2;
10839 record_buf_mem[1] = tgt_mem_addr;
10840 arm_insn_r->mem_rec_count = 1;
10842 else if (ARM_RECORD_STRD == str_type)
10844 record_buf_mem[0] = 4;
10845 record_buf_mem[1] = tgt_mem_addr;
10846 record_buf_mem[2] = 4;
10847 record_buf_mem[3] = tgt_mem_addr + 4;
10848 arm_insn_r->mem_rec_count = 2;
10851 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10852 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10854 /* 3) Store, immediate pre-indexed. */
10855 /* 5) Store, immediate post-indexed. */
10856 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10857 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10858 offset_8 = (immed_high << 4) | immed_low;
10859 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10860 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10861 /* Calculate target store address, Rn +/- Rm, register offset. */
10862 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10864 tgt_mem_addr = u_regval[0] + offset_8;
10868 tgt_mem_addr = u_regval[0] - offset_8;
10870 if (ARM_RECORD_STRH == str_type)
10872 record_buf_mem[0] = 2;
10873 record_buf_mem[1] = tgt_mem_addr;
10874 arm_insn_r->mem_rec_count = 1;
10876 else if (ARM_RECORD_STRD == str_type)
10878 record_buf_mem[0] = 4;
10879 record_buf_mem[1] = tgt_mem_addr;
10880 record_buf_mem[2] = 4;
10881 record_buf_mem[3] = tgt_mem_addr + 4;
10882 arm_insn_r->mem_rec_count = 2;
10884 /* Record Rn also as it changes. */
10885 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10886 arm_insn_r->reg_rec_count = 1;
10888 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10889 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10891 /* 4) Store, register pre-indexed. */
10892 /* 6) Store, register post -indexed. */
10893 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10894 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10895 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10896 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10897 /* Calculate target store address, Rn +/- Rm, register offset. */
10898 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10900 tgt_mem_addr = u_regval[0] + u_regval[1];
10904 tgt_mem_addr = u_regval[1] - u_regval[0];
10906 if (ARM_RECORD_STRH == str_type)
10908 record_buf_mem[0] = 2;
10909 record_buf_mem[1] = tgt_mem_addr;
10910 arm_insn_r->mem_rec_count = 1;
10912 else if (ARM_RECORD_STRD == str_type)
10914 record_buf_mem[0] = 4;
10915 record_buf_mem[1] = tgt_mem_addr;
10916 record_buf_mem[2] = 4;
10917 record_buf_mem[3] = tgt_mem_addr + 4;
10918 arm_insn_r->mem_rec_count = 2;
10920 /* Record Rn also as it changes. */
10921 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10922 arm_insn_r->reg_rec_count = 1;
10927 /* Handling ARM extension space insns. */
10930 arm_record_extension_space (insn_decode_record *arm_insn_r)
10932 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10933 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10934 uint32_t record_buf[8], record_buf_mem[8];
10935 uint32_t reg_src1 = 0;
10936 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10937 struct regcache *reg_cache = arm_insn_r->regcache;
10938 ULONGEST u_regval = 0;
10940 gdb_assert (!INSN_RECORDED(arm_insn_r));
10941 /* Handle unconditional insn extension space. */
10943 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10944 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10945 if (arm_insn_r->cond)
10947 /* PLD has no affect on architectural state, it just affects
10949 if (5 == ((opcode1 & 0xE0) >> 5))
10952 record_buf[0] = ARM_PS_REGNUM;
10953 record_buf[1] = ARM_LR_REGNUM;
10954 arm_insn_r->reg_rec_count = 2;
10956 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10960 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10961 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10964 /* Undefined instruction on ARM V5; need to handle if later
10965 versions define it. */
10968 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10969 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10970 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10972 /* Handle arithmetic insn extension space. */
10973 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10974 && !INSN_RECORDED(arm_insn_r))
10976 /* Handle MLA(S) and MUL(S). */
10977 if (0 <= insn_op1 && 3 >= insn_op1)
10979 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10980 record_buf[1] = ARM_PS_REGNUM;
10981 arm_insn_r->reg_rec_count = 2;
10983 else if (4 <= insn_op1 && 15 >= insn_op1)
10985 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10986 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10987 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10988 record_buf[2] = ARM_PS_REGNUM;
10989 arm_insn_r->reg_rec_count = 3;
10993 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10994 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10995 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10997 /* Handle control insn extension space. */
10999 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11000 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11002 if (!bit (arm_insn_r->arm_insn,25))
11004 if (!bits (arm_insn_r->arm_insn, 4, 7))
11006 if ((0 == insn_op1) || (2 == insn_op1))
11009 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11010 arm_insn_r->reg_rec_count = 1;
11012 else if (1 == insn_op1)
11014 /* CSPR is going to be changed. */
11015 record_buf[0] = ARM_PS_REGNUM;
11016 arm_insn_r->reg_rec_count = 1;
11018 else if (3 == insn_op1)
11020 /* SPSR is going to be changed. */
11021 /* We need to get SPSR value, which is yet to be done. */
11022 printf_unfiltered (_("Process record does not support "
11023 "instruction 0x%0x at address %s.\n"),
11024 arm_insn_r->arm_insn,
11025 paddress (arm_insn_r->gdbarch,
11026 arm_insn_r->this_addr));
11030 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11035 record_buf[0] = ARM_PS_REGNUM;
11036 arm_insn_r->reg_rec_count = 1;
11038 else if (3 == insn_op1)
11041 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11042 arm_insn_r->reg_rec_count = 1;
11045 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11048 record_buf[0] = ARM_PS_REGNUM;
11049 record_buf[1] = ARM_LR_REGNUM;
11050 arm_insn_r->reg_rec_count = 2;
11052 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11054 /* QADD, QSUB, QDADD, QDSUB */
11055 record_buf[0] = ARM_PS_REGNUM;
11056 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11057 arm_insn_r->reg_rec_count = 2;
11059 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11062 record_buf[0] = ARM_PS_REGNUM;
11063 record_buf[1] = ARM_LR_REGNUM;
11064 arm_insn_r->reg_rec_count = 2;
11066 /* Save SPSR also;how? */
11067 printf_unfiltered (_("Process record does not support "
11068 "instruction 0x%0x at address %s.\n"),
11069 arm_insn_r->arm_insn,
11070 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11073 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11074 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11075 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11076 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11079 if (0 == insn_op1 || 1 == insn_op1)
11081 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11082 /* We dont do optimization for SMULW<y> where we
11084 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11085 record_buf[1] = ARM_PS_REGNUM;
11086 arm_insn_r->reg_rec_count = 2;
11088 else if (2 == insn_op1)
11091 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11092 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11093 arm_insn_r->reg_rec_count = 2;
11095 else if (3 == insn_op1)
11098 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11099 arm_insn_r->reg_rec_count = 1;
11105 /* MSR : immediate form. */
11108 /* CSPR is going to be changed. */
11109 record_buf[0] = ARM_PS_REGNUM;
11110 arm_insn_r->reg_rec_count = 1;
11112 else if (3 == insn_op1)
11114 /* SPSR is going to be changed. */
11115 /* we need to get SPSR value, which is yet to be done */
11116 printf_unfiltered (_("Process record does not support "
11117 "instruction 0x%0x at address %s.\n"),
11118 arm_insn_r->arm_insn,
11119 paddress (arm_insn_r->gdbarch,
11120 arm_insn_r->this_addr));
11126 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11127 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11128 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11130 /* Handle load/store insn extension space. */
11132 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11133 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11134 && !INSN_RECORDED(arm_insn_r))
11139 /* These insn, changes register and memory as well. */
11140 /* SWP or SWPB insn. */
11141 /* Get memory address given by Rn. */
11142 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11143 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11144 /* SWP insn ?, swaps word. */
11145 if (8 == arm_insn_r->opcode)
11147 record_buf_mem[0] = 4;
11151 /* SWPB insn, swaps only byte. */
11152 record_buf_mem[0] = 1;
11154 record_buf_mem[1] = u_regval;
11155 arm_insn_r->mem_rec_count = 1;
11156 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11157 arm_insn_r->reg_rec_count = 1;
11159 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11162 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11165 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11169 record_buf[1] = record_buf[0] + 1;
11170 arm_insn_r->reg_rec_count = 2;
11172 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11175 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11178 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11180 /* LDRH, LDRSB, LDRSH. */
11181 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11182 arm_insn_r->reg_rec_count = 1;
11187 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11188 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11189 && !INSN_RECORDED(arm_insn_r))
11192 /* Handle coprocessor insn extension space. */
11195 /* To be done for ARMv5 and later; as of now we return -1. */
11197 printf_unfiltered (_("Process record does not support instruction x%0x "
11198 "at address %s.\n"),arm_insn_r->arm_insn,
11199 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11202 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11203 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11208 /* Handling opcode 000 insns. */
11211 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11213 struct regcache *reg_cache = arm_insn_r->regcache;
11214 uint32_t record_buf[8], record_buf_mem[8];
11215 ULONGEST u_regval[2] = {0};
11217 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11218 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11219 uint32_t opcode1 = 0;
11221 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11222 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11223 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11225 /* Data processing insn /multiply insn. */
11226 if (9 == arm_insn_r->decode
11227 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11228 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11230 /* Handle multiply instructions. */
11231 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11232 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11234 /* Handle MLA and MUL. */
11235 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11236 record_buf[1] = ARM_PS_REGNUM;
11237 arm_insn_r->reg_rec_count = 2;
11239 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11241 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11242 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11243 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11244 record_buf[2] = ARM_PS_REGNUM;
11245 arm_insn_r->reg_rec_count = 3;
11248 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11249 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11251 /* Handle misc load insns, as 20th bit (L = 1). */
11252 /* LDR insn has a capability to do branching, if
11253 MOV LR, PC is precceded by LDR insn having Rn as R15
11254 in that case, it emulates branch and link insn, and hence we
11255 need to save CSPR and PC as well. I am not sure this is right
11256 place; as opcode = 010 LDR insn make this happen, if R15 was
11258 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11259 if (15 != reg_dest)
11261 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11262 arm_insn_r->reg_rec_count = 1;
11266 record_buf[0] = reg_dest;
11267 record_buf[1] = ARM_PS_REGNUM;
11268 arm_insn_r->reg_rec_count = 2;
11271 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11272 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11273 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11274 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11276 /* Handle MSR insn. */
11277 if (9 == arm_insn_r->opcode)
11279 /* CSPR is going to be changed. */
11280 record_buf[0] = ARM_PS_REGNUM;
11281 arm_insn_r->reg_rec_count = 1;
11285 /* SPSR is going to be changed. */
11286 /* How to read SPSR value? */
11287 printf_unfiltered (_("Process record does not support instruction "
11288 "0x%0x at address %s.\n"),
11289 arm_insn_r->arm_insn,
11290 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11294 else if (9 == arm_insn_r->decode
11295 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11296 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11298 /* Handling SWP, SWPB. */
11299 /* These insn, changes register and memory as well. */
11300 /* SWP or SWPB insn. */
11302 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11303 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11304 /* SWP insn ?, swaps word. */
11305 if (8 == arm_insn_r->opcode)
11307 record_buf_mem[0] = 4;
11311 /* SWPB insn, swaps only byte. */
11312 record_buf_mem[0] = 1;
11314 record_buf_mem[1] = u_regval[0];
11315 arm_insn_r->mem_rec_count = 1;
11316 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11317 arm_insn_r->reg_rec_count = 1;
11319 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11320 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11322 /* Handle BLX, branch and link/exchange. */
11323 if (9 == arm_insn_r->opcode)
11325 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11326 and R14 stores the return address. */
11327 record_buf[0] = ARM_PS_REGNUM;
11328 record_buf[1] = ARM_LR_REGNUM;
11329 arm_insn_r->reg_rec_count = 2;
11332 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11334 /* Handle enhanced software breakpoint insn, BKPT. */
11335 /* CPSR is changed to be executed in ARM state, disabling normal
11336 interrupts, entering abort mode. */
11337 /* According to high vector configuration PC is set. */
11338 /* user hit breakpoint and type reverse, in
11339 that case, we need to go back with previous CPSR and
11340 Program Counter. */
11341 record_buf[0] = ARM_PS_REGNUM;
11342 record_buf[1] = ARM_LR_REGNUM;
11343 arm_insn_r->reg_rec_count = 2;
11345 /* Save SPSR also; how? */
11346 printf_unfiltered (_("Process record does not support instruction "
11347 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11348 paddress (arm_insn_r->gdbarch,
11349 arm_insn_r->this_addr));
11352 else if (11 == arm_insn_r->decode
11353 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11355 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11357 /* Handle str(x) insn */
11358 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11361 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11362 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11364 /* Handle BX, branch and link/exchange. */
11365 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11366 record_buf[0] = ARM_PS_REGNUM;
11367 arm_insn_r->reg_rec_count = 1;
11369 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11370 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11371 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11373 /* Count leading zeros: CLZ. */
11374 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11375 arm_insn_r->reg_rec_count = 1;
11377 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11378 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11379 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11380 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11383 /* Handle MRS insn. */
11384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11385 arm_insn_r->reg_rec_count = 1;
11387 else if (arm_insn_r->opcode <= 15)
11389 /* Normal data processing insns. */
11390 /* Out of 11 shifter operands mode, all the insn modifies destination
11391 register, which is specified by 13-16 decode. */
11392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11393 record_buf[1] = ARM_PS_REGNUM;
11394 arm_insn_r->reg_rec_count = 2;
11401 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11402 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11406 /* Handling opcode 001 insns. */
11409 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11411 uint32_t record_buf[8], record_buf_mem[8];
11413 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11414 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11416 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11417 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11418 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11421 /* Handle MSR insn. */
11422 if (9 == arm_insn_r->opcode)
11424 /* CSPR is going to be changed. */
11425 record_buf[0] = ARM_PS_REGNUM;
11426 arm_insn_r->reg_rec_count = 1;
11430 /* SPSR is going to be changed. */
11433 else if (arm_insn_r->opcode <= 15)
11435 /* Normal data processing insns. */
11436 /* Out of 11 shifter operands mode, all the insn modifies destination
11437 register, which is specified by 13-16 decode. */
11438 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11439 record_buf[1] = ARM_PS_REGNUM;
11440 arm_insn_r->reg_rec_count = 2;
11447 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11448 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11452 /* Handling opcode 010 insns. */
11455 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11457 struct regcache *reg_cache = arm_insn_r->regcache;
11459 uint32_t reg_src1 = 0 , reg_dest = 0;
11460 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11461 uint32_t record_buf[8], record_buf_mem[8];
11463 ULONGEST u_regval = 0;
11465 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11466 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11468 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11470 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11471 /* LDR insn has a capability to do branching, if
11472 MOV LR, PC is precedded by LDR insn having Rn as R15
11473 in that case, it emulates branch and link insn, and hence we
11474 need to save CSPR and PC as well. */
11475 if (ARM_PC_REGNUM != reg_dest)
11477 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11478 arm_insn_r->reg_rec_count = 1;
11482 record_buf[0] = reg_dest;
11483 record_buf[1] = ARM_PS_REGNUM;
11484 arm_insn_r->reg_rec_count = 2;
11489 /* Store, immediate offset, immediate pre-indexed,
11490 immediate post-indexed. */
11491 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11492 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11493 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11495 if (bit (arm_insn_r->arm_insn, 23))
11497 tgt_mem_addr = u_regval + offset_12;
11501 tgt_mem_addr = u_regval - offset_12;
11504 switch (arm_insn_r->opcode)
11518 record_buf_mem[0] = 4;
11533 record_buf_mem[0] = 1;
11537 gdb_assert_not_reached ("no decoding pattern found");
11540 record_buf_mem[1] = tgt_mem_addr;
11541 arm_insn_r->mem_rec_count = 1;
11543 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11544 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11545 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11546 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11547 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11548 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11551 /* We are handling pre-indexed mode; post-indexed mode;
11552 where Rn is going to be changed. */
11553 record_buf[0] = reg_src1;
11554 arm_insn_r->reg_rec_count = 1;
11558 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11559 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11563 /* Handling opcode 011 insns. */
11566 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11568 struct regcache *reg_cache = arm_insn_r->regcache;
11570 uint32_t shift_imm = 0;
11571 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11572 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11573 uint32_t record_buf[8], record_buf_mem[8];
11576 ULONGEST u_regval[2];
11578 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11579 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11581 /* Handle enhanced store insns and LDRD DSP insn,
11582 order begins according to addressing modes for store insns
11586 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11588 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11589 /* LDR insn has a capability to do branching, if
11590 MOV LR, PC is precedded by LDR insn having Rn as R15
11591 in that case, it emulates branch and link insn, and hence we
11592 need to save CSPR and PC as well. */
11593 if (15 != reg_dest)
11595 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11596 arm_insn_r->reg_rec_count = 1;
11600 record_buf[0] = reg_dest;
11601 record_buf[1] = ARM_PS_REGNUM;
11602 arm_insn_r->reg_rec_count = 2;
11607 if (! bits (arm_insn_r->arm_insn, 4, 11))
11609 /* Store insn, register offset and register pre-indexed,
11610 register post-indexed. */
11612 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11614 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11615 regcache_raw_read_unsigned (reg_cache, reg_src1
11617 regcache_raw_read_unsigned (reg_cache, reg_src2
11619 if (15 == reg_src2)
11621 /* If R15 was used as Rn, hence current PC+8. */
11622 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11623 u_regval[0] = u_regval[0] + 8;
11625 /* Calculate target store address, Rn +/- Rm, register offset. */
11627 if (bit (arm_insn_r->arm_insn, 23))
11629 tgt_mem_addr = u_regval[0] + u_regval[1];
11633 tgt_mem_addr = u_regval[1] - u_regval[0];
11636 switch (arm_insn_r->opcode)
11650 record_buf_mem[0] = 4;
11665 record_buf_mem[0] = 1;
11669 gdb_assert_not_reached ("no decoding pattern found");
11672 record_buf_mem[1] = tgt_mem_addr;
11673 arm_insn_r->mem_rec_count = 1;
11675 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11676 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11677 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11678 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11679 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11680 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11683 /* Rn is going to be changed in pre-indexed mode and
11684 post-indexed mode as well. */
11685 record_buf[0] = reg_src2;
11686 arm_insn_r->reg_rec_count = 1;
11691 /* Store insn, scaled register offset; scaled pre-indexed. */
11692 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11694 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11696 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11697 /* Get shift_imm. */
11698 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11699 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11700 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11701 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11702 /* Offset_12 used as shift. */
11706 /* Offset_12 used as index. */
11707 offset_12 = u_regval[0] << shift_imm;
11711 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11717 if (bit (u_regval[0], 31))
11719 offset_12 = 0xFFFFFFFF;
11728 /* This is arithmetic shift. */
11729 offset_12 = s_word >> shift_imm;
11736 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11738 /* Get C flag value and shift it by 31. */
11739 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11740 | (u_regval[0]) >> 1);
11744 offset_12 = (u_regval[0] >> shift_imm) \
11746 (sizeof(uint32_t) - shift_imm));
11751 gdb_assert_not_reached ("no decoding pattern found");
11755 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11757 if (bit (arm_insn_r->arm_insn, 23))
11759 tgt_mem_addr = u_regval[1] + offset_12;
11763 tgt_mem_addr = u_regval[1] - offset_12;
11766 switch (arm_insn_r->opcode)
11780 record_buf_mem[0] = 4;
11795 record_buf_mem[0] = 1;
11799 gdb_assert_not_reached ("no decoding pattern found");
11802 record_buf_mem[1] = tgt_mem_addr;
11803 arm_insn_r->mem_rec_count = 1;
11805 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11806 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11807 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11808 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11809 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11810 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11813 /* Rn is going to be changed in register scaled pre-indexed
11814 mode,and scaled post indexed mode. */
11815 record_buf[0] = reg_src2;
11816 arm_insn_r->reg_rec_count = 1;
11821 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11822 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11826 /* Handling opcode 100 insns. */
11829 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11831 struct regcache *reg_cache = arm_insn_r->regcache;
11833 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11834 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11835 uint32_t start_address = 0, index = 0;
11836 uint32_t record_buf[24], record_buf_mem[48];
11838 ULONGEST u_regval[2] = {0};
11840 /* This mode is exclusively for load and store multiple. */
11841 /* Handle incremenrt after/before and decrment after.before mode;
11842 Rn is changing depending on W bit, but as of now we store Rn too
11843 without optimization. */
11845 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11847 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11849 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11851 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11856 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11860 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11861 while (register_bits)
11863 if (register_bits & 0x00000001)
11864 record_buf[index++] = register_count;
11865 register_bits = register_bits >> 1;
11869 /* Extra space for Base Register and CPSR; wihtout optimization. */
11870 record_buf[index++] = reg_src1;
11871 record_buf[index++] = ARM_PS_REGNUM;
11872 arm_insn_r->reg_rec_count = index;
11876 /* It handles both STM(1) and STM(2). */
11877 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11879 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11881 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11882 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11883 while (register_bits)
11885 if (register_bits & 0x00000001)
11887 register_bits = register_bits >> 1;
11892 /* Decrement after. */
11894 start_address = (u_regval[0]) - (register_count * 4) + 4;
11895 arm_insn_r->mem_rec_count = register_count;
11896 while (register_count)
11898 record_buf_mem[(register_count * 2) - 1] = start_address;
11899 record_buf_mem[(register_count * 2) - 2] = 4;
11900 start_address = start_address + 4;
11905 /* Increment after. */
11907 start_address = u_regval[0];
11908 arm_insn_r->mem_rec_count = register_count;
11909 while (register_count)
11911 record_buf_mem[(register_count * 2) - 1] = start_address;
11912 record_buf_mem[(register_count * 2) - 2] = 4;
11913 start_address = start_address + 4;
11918 /* Decrement before. */
11921 start_address = (u_regval[0]) - (register_count * 4);
11922 arm_insn_r->mem_rec_count = register_count;
11923 while (register_count)
11925 record_buf_mem[(register_count * 2) - 1] = start_address;
11926 record_buf_mem[(register_count * 2) - 2] = 4;
11927 start_address = start_address + 4;
11932 /* Increment before. */
11934 start_address = u_regval[0] + 4;
11935 arm_insn_r->mem_rec_count = register_count;
11936 while (register_count)
11938 record_buf_mem[(register_count * 2) - 1] = start_address;
11939 record_buf_mem[(register_count * 2) - 2] = 4;
11940 start_address = start_address + 4;
11946 gdb_assert_not_reached ("no decoding pattern found");
11950 /* Base register also changes; based on condition and W bit. */
11951 /* We save it anyway without optimization. */
11952 record_buf[0] = reg_src1;
11953 arm_insn_r->reg_rec_count = 1;
11956 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11957 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11961 /* Handling opcode 101 insns. */
11964 arm_record_b_bl (insn_decode_record *arm_insn_r)
11966 uint32_t record_buf[8];
11968 /* Handle B, BL, BLX(1) insns. */
11969 /* B simply branches so we do nothing here. */
11970 /* Note: BLX(1) doesnt fall here but instead it falls into
11971 extension space. */
11972 if (bit (arm_insn_r->arm_insn, 24))
11974 record_buf[0] = ARM_LR_REGNUM;
11975 arm_insn_r->reg_rec_count = 1;
11978 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11983 /* Handling opcode 110 insns. */
11986 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11988 printf_unfiltered (_("Process record does not support instruction "
11989 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11990 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11995 /* Handling opcode 111 insns. */
11998 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12000 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12001 struct regcache *reg_cache = arm_insn_r->regcache;
12002 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12003 ULONGEST u_regval = 0;
12005 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12007 /* Handle arm SWI/SVC system call instructions. */
12008 if (15 == arm_insn_r->opcode)
12010 if (tdep->arm_syscall_record != NULL)
12012 ULONGEST svc_operand, svc_number;
12014 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12016 if (svc_operand) /* OABI. */
12017 svc_number = svc_operand - 0x900000;
12019 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12021 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12025 printf_unfiltered (_("no syscall record support\n"));
12031 arm_record_unsupported_insn (arm_insn_r);
12038 /* Handling opcode 000 insns. */
12041 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12043 uint32_t record_buf[8];
12044 uint32_t reg_src1 = 0;
12046 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12048 record_buf[0] = ARM_PS_REGNUM;
12049 record_buf[1] = reg_src1;
12050 thumb_insn_r->reg_rec_count = 2;
12052 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12058 /* Handling opcode 001 insns. */
12061 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12063 uint32_t record_buf[8];
12064 uint32_t reg_src1 = 0;
12066 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12068 record_buf[0] = ARM_PS_REGNUM;
12069 record_buf[1] = reg_src1;
12070 thumb_insn_r->reg_rec_count = 2;
12072 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12077 /* Handling opcode 010 insns. */
12080 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12082 struct regcache *reg_cache = thumb_insn_r->regcache;
12083 uint32_t record_buf[8], record_buf_mem[8];
12085 uint32_t reg_src1 = 0, reg_src2 = 0;
12086 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12088 ULONGEST u_regval[2] = {0};
12090 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12092 if (bit (thumb_insn_r->arm_insn, 12))
12094 /* Handle load/store register offset. */
12095 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12096 if (opcode2 >= 12 && opcode2 <= 15)
12098 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12099 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12100 record_buf[0] = reg_src1;
12101 thumb_insn_r->reg_rec_count = 1;
12103 else if (opcode2 >= 8 && opcode2 <= 10)
12105 /* STR(2), STRB(2), STRH(2) . */
12106 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12107 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12108 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12109 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12111 record_buf_mem[0] = 4; /* STR (2). */
12112 else if (10 == opcode2)
12113 record_buf_mem[0] = 1; /* STRB (2). */
12114 else if (9 == opcode2)
12115 record_buf_mem[0] = 2; /* STRH (2). */
12116 record_buf_mem[1] = u_regval[0] + u_regval[1];
12117 thumb_insn_r->mem_rec_count = 1;
12120 else if (bit (thumb_insn_r->arm_insn, 11))
12122 /* Handle load from literal pool. */
12124 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12125 record_buf[0] = reg_src1;
12126 thumb_insn_r->reg_rec_count = 1;
12130 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12131 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12132 if ((3 == opcode2) && (!opcode3))
12134 /* Branch with exchange. */
12135 record_buf[0] = ARM_PS_REGNUM;
12136 thumb_insn_r->reg_rec_count = 1;
12140 /* Format 8; special data processing insns. */
12141 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12142 record_buf[0] = ARM_PS_REGNUM;
12143 record_buf[1] = reg_src1;
12144 thumb_insn_r->reg_rec_count = 2;
12149 /* Format 5; data processing insns. */
12150 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12151 if (bit (thumb_insn_r->arm_insn, 7))
12153 reg_src1 = reg_src1 + 8;
12155 record_buf[0] = ARM_PS_REGNUM;
12156 record_buf[1] = reg_src1;
12157 thumb_insn_r->reg_rec_count = 2;
12160 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12161 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12167 /* Handling opcode 001 insns. */
12170 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12172 struct regcache *reg_cache = thumb_insn_r->regcache;
12173 uint32_t record_buf[8], record_buf_mem[8];
12175 uint32_t reg_src1 = 0;
12176 uint32_t opcode = 0, immed_5 = 0;
12178 ULONGEST u_regval = 0;
12180 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12185 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12186 record_buf[0] = reg_src1;
12187 thumb_insn_r->reg_rec_count = 1;
12192 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12193 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12194 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12195 record_buf_mem[0] = 4;
12196 record_buf_mem[1] = u_regval + (immed_5 * 4);
12197 thumb_insn_r->mem_rec_count = 1;
12200 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12201 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12207 /* Handling opcode 100 insns. */
12210 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12212 struct regcache *reg_cache = thumb_insn_r->regcache;
12213 uint32_t record_buf[8], record_buf_mem[8];
12215 uint32_t reg_src1 = 0;
12216 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12218 ULONGEST u_regval = 0;
12220 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12225 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12226 record_buf[0] = reg_src1;
12227 thumb_insn_r->reg_rec_count = 1;
12229 else if (1 == opcode)
12232 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12233 record_buf[0] = reg_src1;
12234 thumb_insn_r->reg_rec_count = 1;
12236 else if (2 == opcode)
12239 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12240 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12241 record_buf_mem[0] = 4;
12242 record_buf_mem[1] = u_regval + (immed_8 * 4);
12243 thumb_insn_r->mem_rec_count = 1;
12245 else if (0 == opcode)
12248 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12249 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12250 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12251 record_buf_mem[0] = 2;
12252 record_buf_mem[1] = u_regval + (immed_5 * 2);
12253 thumb_insn_r->mem_rec_count = 1;
12256 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12257 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12263 /* Handling opcode 101 insns. */
12266 thumb_record_misc (insn_decode_record *thumb_insn_r)
12268 struct regcache *reg_cache = thumb_insn_r->regcache;
12270 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12271 uint32_t register_bits = 0, register_count = 0;
12272 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12273 uint32_t record_buf[24], record_buf_mem[48];
12276 ULONGEST u_regval = 0;
12278 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12279 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12280 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12285 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12286 while (register_bits)
12288 if (register_bits & 0x00000001)
12289 record_buf[index++] = register_count;
12290 register_bits = register_bits >> 1;
12293 record_buf[index++] = ARM_PS_REGNUM;
12294 record_buf[index++] = ARM_SP_REGNUM;
12295 thumb_insn_r->reg_rec_count = index;
12297 else if (10 == opcode2)
12300 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12301 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12302 while (register_bits)
12304 if (register_bits & 0x00000001)
12306 register_bits = register_bits >> 1;
12308 start_address = u_regval - \
12309 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12310 thumb_insn_r->mem_rec_count = register_count;
12311 while (register_count)
12313 record_buf_mem[(register_count * 2) - 1] = start_address;
12314 record_buf_mem[(register_count * 2) - 2] = 4;
12315 start_address = start_address + 4;
12318 record_buf[0] = ARM_SP_REGNUM;
12319 thumb_insn_r->reg_rec_count = 1;
12321 else if (0x1E == opcode1)
12324 /* Handle enhanced software breakpoint insn, BKPT. */
12325 /* CPSR is changed to be executed in ARM state, disabling normal
12326 interrupts, entering abort mode. */
12327 /* According to high vector configuration PC is set. */
12328 /* User hits breakpoint and type reverse, in that case, we need to go back with
12329 previous CPSR and Program Counter. */
12330 record_buf[0] = ARM_PS_REGNUM;
12331 record_buf[1] = ARM_LR_REGNUM;
12332 thumb_insn_r->reg_rec_count = 2;
12333 /* We need to save SPSR value, which is not yet done. */
12334 printf_unfiltered (_("Process record does not support instruction "
12335 "0x%0x at address %s.\n"),
12336 thumb_insn_r->arm_insn,
12337 paddress (thumb_insn_r->gdbarch,
12338 thumb_insn_r->this_addr));
12341 else if ((0 == opcode) || (1 == opcode))
12343 /* ADD(5), ADD(6). */
12344 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12345 record_buf[0] = reg_src1;
12346 thumb_insn_r->reg_rec_count = 1;
12348 else if (2 == opcode)
12350 /* ADD(7), SUB(4). */
12351 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12352 record_buf[0] = ARM_SP_REGNUM;
12353 thumb_insn_r->reg_rec_count = 1;
12356 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12357 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12363 /* Handling opcode 110 insns. */
12366 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12368 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12369 struct regcache *reg_cache = thumb_insn_r->regcache;
12371 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12372 uint32_t reg_src1 = 0;
12373 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12374 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12375 uint32_t record_buf[24], record_buf_mem[48];
12377 ULONGEST u_regval = 0;
12379 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12380 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12386 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12388 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12389 while (register_bits)
12391 if (register_bits & 0x00000001)
12392 record_buf[index++] = register_count;
12393 register_bits = register_bits >> 1;
12396 record_buf[index++] = reg_src1;
12397 thumb_insn_r->reg_rec_count = index;
12399 else if (0 == opcode2)
12401 /* It handles both STMIA. */
12402 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12404 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12405 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12406 while (register_bits)
12408 if (register_bits & 0x00000001)
12410 register_bits = register_bits >> 1;
12412 start_address = u_regval;
12413 thumb_insn_r->mem_rec_count = register_count;
12414 while (register_count)
12416 record_buf_mem[(register_count * 2) - 1] = start_address;
12417 record_buf_mem[(register_count * 2) - 2] = 4;
12418 start_address = start_address + 4;
12422 else if (0x1F == opcode1)
12424 /* Handle arm syscall insn. */
12425 if (tdep->arm_syscall_record != NULL)
12427 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12428 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12432 printf_unfiltered (_("no syscall record support\n"));
12437 /* B (1), conditional branch is automatically taken care in process_record,
12438 as PC is saved there. */
12440 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12441 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12447 /* Handling opcode 111 insns. */
12450 thumb_record_branch (insn_decode_record *thumb_insn_r)
12452 uint32_t record_buf[8];
12453 uint32_t bits_h = 0;
12455 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12457 if (2 == bits_h || 3 == bits_h)
12460 record_buf[0] = ARM_LR_REGNUM;
12461 thumb_insn_r->reg_rec_count = 1;
12463 else if (1 == bits_h)
12466 record_buf[0] = ARM_PS_REGNUM;
12467 record_buf[1] = ARM_LR_REGNUM;
12468 thumb_insn_r->reg_rec_count = 2;
12471 /* B(2) is automatically taken care in process_record, as PC is
12474 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12479 /* Handler for thumb2 load/store multiple instructions. */
12482 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12484 struct regcache *reg_cache = thumb2_insn_r->regcache;
12486 uint32_t reg_rn, op;
12487 uint32_t register_bits = 0, register_count = 0;
12488 uint32_t index = 0, start_address = 0;
12489 uint32_t record_buf[24], record_buf_mem[48];
12491 ULONGEST u_regval = 0;
12493 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12494 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12496 if (0 == op || 3 == op)
12498 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12500 /* Handle RFE instruction. */
12501 record_buf[0] = ARM_PS_REGNUM;
12502 thumb2_insn_r->reg_rec_count = 1;
12506 /* Handle SRS instruction after reading banked SP. */
12507 return arm_record_unsupported_insn (thumb2_insn_r);
12510 else if (1 == op || 2 == op)
12512 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12514 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12515 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12516 while (register_bits)
12518 if (register_bits & 0x00000001)
12519 record_buf[index++] = register_count;
12522 register_bits = register_bits >> 1;
12524 record_buf[index++] = reg_rn;
12525 record_buf[index++] = ARM_PS_REGNUM;
12526 thumb2_insn_r->reg_rec_count = index;
12530 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12531 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12532 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12533 while (register_bits)
12535 if (register_bits & 0x00000001)
12538 register_bits = register_bits >> 1;
12543 /* Start address calculation for LDMDB/LDMEA. */
12544 start_address = u_regval;
12548 /* Start address calculation for LDMDB/LDMEA. */
12549 start_address = u_regval - register_count * 4;
12552 thumb2_insn_r->mem_rec_count = register_count;
12553 while (register_count)
12555 record_buf_mem[register_count * 2 - 1] = start_address;
12556 record_buf_mem[register_count * 2 - 2] = 4;
12557 start_address = start_address + 4;
12560 record_buf[0] = reg_rn;
12561 record_buf[1] = ARM_PS_REGNUM;
12562 thumb2_insn_r->reg_rec_count = 2;
12566 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12568 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12570 return ARM_RECORD_SUCCESS;
12573 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12577 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12579 struct regcache *reg_cache = thumb2_insn_r->regcache;
12581 uint32_t reg_rd, reg_rn, offset_imm;
12582 uint32_t reg_dest1, reg_dest2;
12583 uint32_t address, offset_addr;
12584 uint32_t record_buf[8], record_buf_mem[8];
12585 uint32_t op1, op2, op3;
12588 ULONGEST u_regval[2];
12590 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12591 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12592 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12594 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12596 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12598 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12599 record_buf[0] = reg_dest1;
12600 record_buf[1] = ARM_PS_REGNUM;
12601 thumb2_insn_r->reg_rec_count = 2;
12604 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12606 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12607 record_buf[2] = reg_dest2;
12608 thumb2_insn_r->reg_rec_count = 3;
12613 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12614 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12616 if (0 == op1 && 0 == op2)
12618 /* Handle STREX. */
12619 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12620 address = u_regval[0] + (offset_imm * 4);
12621 record_buf_mem[0] = 4;
12622 record_buf_mem[1] = address;
12623 thumb2_insn_r->mem_rec_count = 1;
12624 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12625 record_buf[0] = reg_rd;
12626 thumb2_insn_r->reg_rec_count = 1;
12628 else if (1 == op1 && 0 == op2)
12630 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12631 record_buf[0] = reg_rd;
12632 thumb2_insn_r->reg_rec_count = 1;
12633 address = u_regval[0];
12634 record_buf_mem[1] = address;
12638 /* Handle STREXB. */
12639 record_buf_mem[0] = 1;
12640 thumb2_insn_r->mem_rec_count = 1;
12644 /* Handle STREXH. */
12645 record_buf_mem[0] = 2 ;
12646 thumb2_insn_r->mem_rec_count = 1;
12650 /* Handle STREXD. */
12651 address = u_regval[0];
12652 record_buf_mem[0] = 4;
12653 record_buf_mem[2] = 4;
12654 record_buf_mem[3] = address + 4;
12655 thumb2_insn_r->mem_rec_count = 2;
12660 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12662 if (bit (thumb2_insn_r->arm_insn, 24))
12664 if (bit (thumb2_insn_r->arm_insn, 23))
12665 offset_addr = u_regval[0] + (offset_imm * 4);
12667 offset_addr = u_regval[0] - (offset_imm * 4);
12669 address = offset_addr;
12672 address = u_regval[0];
12674 record_buf_mem[0] = 4;
12675 record_buf_mem[1] = address;
12676 record_buf_mem[2] = 4;
12677 record_buf_mem[3] = address + 4;
12678 thumb2_insn_r->mem_rec_count = 2;
12679 record_buf[0] = reg_rn;
12680 thumb2_insn_r->reg_rec_count = 1;
12684 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12686 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12688 return ARM_RECORD_SUCCESS;
12691 /* Handler for thumb2 data processing (shift register and modified immediate)
12695 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12697 uint32_t reg_rd, op;
12698 uint32_t record_buf[8];
12700 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12701 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12703 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12705 record_buf[0] = ARM_PS_REGNUM;
12706 thumb2_insn_r->reg_rec_count = 1;
12710 record_buf[0] = reg_rd;
12711 record_buf[1] = ARM_PS_REGNUM;
12712 thumb2_insn_r->reg_rec_count = 2;
12715 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12717 return ARM_RECORD_SUCCESS;
12720 /* Generic handler for thumb2 instructions which effect destination and PS
12724 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12727 uint32_t record_buf[8];
12729 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12731 record_buf[0] = reg_rd;
12732 record_buf[1] = ARM_PS_REGNUM;
12733 thumb2_insn_r->reg_rec_count = 2;
12735 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12737 return ARM_RECORD_SUCCESS;
12740 /* Handler for thumb2 branch and miscellaneous control instructions. */
12743 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12745 uint32_t op, op1, op2;
12746 uint32_t record_buf[8];
12748 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12749 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12750 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12752 /* Handle MSR insn. */
12753 if (!(op1 & 0x2) && 0x38 == op)
12757 /* CPSR is going to be changed. */
12758 record_buf[0] = ARM_PS_REGNUM;
12759 thumb2_insn_r->reg_rec_count = 1;
12763 arm_record_unsupported_insn(thumb2_insn_r);
12767 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12770 record_buf[0] = ARM_PS_REGNUM;
12771 record_buf[1] = ARM_LR_REGNUM;
12772 thumb2_insn_r->reg_rec_count = 2;
12775 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12777 return ARM_RECORD_SUCCESS;
12780 /* Handler for thumb2 store single data item instructions. */
12783 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12785 struct regcache *reg_cache = thumb2_insn_r->regcache;
12787 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12788 uint32_t address, offset_addr;
12789 uint32_t record_buf[8], record_buf_mem[8];
12792 ULONGEST u_regval[2];
12794 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12795 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12796 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12797 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12799 if (bit (thumb2_insn_r->arm_insn, 23))
12802 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12803 offset_addr = u_regval[0] + offset_imm;
12804 address = offset_addr;
12809 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12811 /* Handle STRB (register). */
12812 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12813 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12814 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12815 offset_addr = u_regval[1] << shift_imm;
12816 address = u_regval[0] + offset_addr;
12820 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12821 if (bit (thumb2_insn_r->arm_insn, 10))
12823 if (bit (thumb2_insn_r->arm_insn, 9))
12824 offset_addr = u_regval[0] + offset_imm;
12826 offset_addr = u_regval[0] - offset_imm;
12828 address = offset_addr;
12831 address = u_regval[0];
12837 /* Store byte instructions. */
12840 record_buf_mem[0] = 1;
12842 /* Store half word instructions. */
12845 record_buf_mem[0] = 2;
12847 /* Store word instructions. */
12850 record_buf_mem[0] = 4;
12854 gdb_assert_not_reached ("no decoding pattern found");
12858 record_buf_mem[1] = address;
12859 thumb2_insn_r->mem_rec_count = 1;
12860 record_buf[0] = reg_rn;
12861 thumb2_insn_r->reg_rec_count = 1;
12863 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12865 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12867 return ARM_RECORD_SUCCESS;
12870 /* Handler for thumb2 load memory hints instructions. */
12873 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12875 uint32_t record_buf[8];
12876 uint32_t reg_rt, reg_rn;
12878 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12879 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12881 if (ARM_PC_REGNUM != reg_rt)
12883 record_buf[0] = reg_rt;
12884 record_buf[1] = reg_rn;
12885 record_buf[2] = ARM_PS_REGNUM;
12886 thumb2_insn_r->reg_rec_count = 3;
12888 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12890 return ARM_RECORD_SUCCESS;
12893 return ARM_RECORD_FAILURE;
12896 /* Handler for thumb2 load word instructions. */
12899 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12901 uint32_t opcode1 = 0, opcode2 = 0;
12902 uint32_t record_buf[8];
12904 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12905 record_buf[1] = ARM_PS_REGNUM;
12906 thumb2_insn_r->reg_rec_count = 2;
12908 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12910 return ARM_RECORD_SUCCESS;
12913 /* Handler for thumb2 long multiply, long multiply accumulate, and
12914 divide instructions. */
12917 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12919 uint32_t opcode1 = 0, opcode2 = 0;
12920 uint32_t record_buf[8];
12921 uint32_t reg_src1 = 0;
12923 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12924 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12926 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12928 /* Handle SMULL, UMULL, SMULAL. */
12929 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12930 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12931 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12932 record_buf[2] = ARM_PS_REGNUM;
12933 thumb2_insn_r->reg_rec_count = 3;
12935 else if (1 == opcode1 || 3 == opcode2)
12937 /* Handle SDIV and UDIV. */
12938 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12939 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12940 record_buf[2] = ARM_PS_REGNUM;
12941 thumb2_insn_r->reg_rec_count = 3;
12944 return ARM_RECORD_FAILURE;
12946 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12948 return ARM_RECORD_SUCCESS;
12951 /* Decodes thumb2 instruction type and invokes its record handler. */
12953 static unsigned int
12954 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12956 uint32_t op, op1, op2;
12958 op = bit (thumb2_insn_r->arm_insn, 15);
12959 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12960 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12964 if (!(op2 & 0x64 ))
12966 /* Load/store multiple instruction. */
12967 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12969 else if (!((op2 & 0x64) ^ 0x04))
12971 /* Load/store (dual/exclusive) and table branch instruction. */
12972 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12974 else if (!((op2 & 0x20) ^ 0x20))
12976 /* Data-processing (shifted register). */
12977 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12979 else if (op2 & 0x40)
12981 /* Co-processor instructions. */
12982 arm_record_unsupported_insn (thumb2_insn_r);
12985 else if (op1 == 0x02)
12989 /* Branches and miscellaneous control instructions. */
12990 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12992 else if (op2 & 0x20)
12994 /* Data-processing (plain binary immediate) instruction. */
12995 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12999 /* Data-processing (modified immediate). */
13000 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13003 else if (op1 == 0x03)
13005 if (!(op2 & 0x71 ))
13007 /* Store single data item. */
13008 return thumb2_record_str_single_data (thumb2_insn_r);
13010 else if (!((op2 & 0x71) ^ 0x10))
13012 /* Advanced SIMD or structure load/store instructions. */
13013 return arm_record_unsupported_insn (thumb2_insn_r);
13015 else if (!((op2 & 0x67) ^ 0x01))
13017 /* Load byte, memory hints instruction. */
13018 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13020 else if (!((op2 & 0x67) ^ 0x03))
13022 /* Load halfword, memory hints instruction. */
13023 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13025 else if (!((op2 & 0x67) ^ 0x05))
13027 /* Load word instruction. */
13028 return thumb2_record_ld_word (thumb2_insn_r);
13030 else if (!((op2 & 0x70) ^ 0x20))
13032 /* Data-processing (register) instruction. */
13033 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13035 else if (!((op2 & 0x78) ^ 0x30))
13037 /* Multiply, multiply accumulate, abs diff instruction. */
13038 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13040 else if (!((op2 & 0x78) ^ 0x38))
13042 /* Long multiply, long multiply accumulate, and divide. */
13043 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13045 else if (op2 & 0x40)
13047 /* Co-processor instructions. */
13048 return arm_record_unsupported_insn (thumb2_insn_r);
13055 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13056 and positive val on fauilure. */
13059 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13061 gdb_byte buf[insn_size];
13063 memset (&buf[0], 0, insn_size);
13065 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13067 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13069 gdbarch_byte_order (insn_record->gdbarch));
13073 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13075 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13079 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13080 uint32_t insn_size)
13083 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13084 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13086 arm_record_data_proc_misc_ld_str, /* 000. */
13087 arm_record_data_proc_imm, /* 001. */
13088 arm_record_ld_st_imm_offset, /* 010. */
13089 arm_record_ld_st_reg_offset, /* 011. */
13090 arm_record_ld_st_multiple, /* 100. */
13091 arm_record_b_bl, /* 101. */
13092 arm_record_unsupported_insn, /* 110. */
13093 arm_record_coproc_data_proc /* 111. */
13096 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13097 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13099 thumb_record_shift_add_sub, /* 000. */
13100 thumb_record_add_sub_cmp_mov, /* 001. */
13101 thumb_record_ld_st_reg_offset, /* 010. */
13102 thumb_record_ld_st_imm_offset, /* 011. */
13103 thumb_record_ld_st_stack, /* 100. */
13104 thumb_record_misc, /* 101. */
13105 thumb_record_ldm_stm_swi, /* 110. */
13106 thumb_record_branch /* 111. */
13109 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13110 uint32_t insn_id = 0;
13112 if (extract_arm_insn (arm_record, insn_size))
13116 printf_unfiltered (_("Process record: error reading memory at "
13117 "addr %s len = %d.\n"),
13118 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13122 else if (ARM_RECORD == record_type)
13124 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13125 insn_id = bits (arm_record->arm_insn, 25, 27);
13126 ret = arm_record_extension_space (arm_record);
13127 /* If this insn has fallen into extension space
13128 then we need not decode it anymore. */
13129 if (ret != -1 && !INSN_RECORDED(arm_record))
13131 ret = arm_handle_insn[insn_id] (arm_record);
13134 else if (THUMB_RECORD == record_type)
13136 /* As thumb does not have condition codes, we set negative. */
13137 arm_record->cond = -1;
13138 insn_id = bits (arm_record->arm_insn, 13, 15);
13139 ret = thumb_handle_insn[insn_id] (arm_record);
13141 else if (THUMB2_RECORD == record_type)
13143 /* As thumb does not have condition codes, we set negative. */
13144 arm_record->cond = -1;
13146 /* Swap first half of 32bit thumb instruction with second half. */
13147 arm_record->arm_insn
13148 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13150 insn_id = thumb2_record_decode_insn_handler (arm_record);
13152 if (insn_id != ARM_RECORD_SUCCESS)
13154 arm_record_unsupported_insn (arm_record);
13160 /* Throw assertion. */
13161 gdb_assert_not_reached ("not a valid instruction, could not decode");
13168 /* Cleans up local record registers and memory allocations. */
13171 deallocate_reg_mem (insn_decode_record *record)
13173 xfree (record->arm_regs);
13174 xfree (record->arm_mems);
13178 /* Parse the current instruction and record the values of the registers and
13179 memory that will be changed in current instruction to record_arch_list".
13180 Return -1 if something is wrong. */
13183 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13184 CORE_ADDR insn_addr)
13187 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13188 uint32_t no_of_rec = 0;
13189 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13190 ULONGEST t_bit = 0, insn_id = 0;
13192 ULONGEST u_regval = 0;
13194 insn_decode_record arm_record;
13196 memset (&arm_record, 0, sizeof (insn_decode_record));
13197 arm_record.regcache = regcache;
13198 arm_record.this_addr = insn_addr;
13199 arm_record.gdbarch = gdbarch;
13202 if (record_debug > 1)
13204 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13206 paddress (gdbarch, arm_record.this_addr));
13209 if (extract_arm_insn (&arm_record, 2))
13213 printf_unfiltered (_("Process record: error reading memory at "
13214 "addr %s len = %d.\n"),
13215 paddress (arm_record.gdbarch,
13216 arm_record.this_addr), 2);
13221 /* Check the insn, whether it is thumb or arm one. */
13223 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13224 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13227 if (!(u_regval & t_bit))
13229 /* We are decoding arm insn. */
13230 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13234 insn_id = bits (arm_record.arm_insn, 11, 15);
13235 /* is it thumb2 insn? */
13236 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13238 ret = decode_insn (&arm_record, THUMB2_RECORD,
13239 THUMB2_INSN_SIZE_BYTES);
13243 /* We are decoding thumb insn. */
13244 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13250 /* Record registers. */
13251 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13252 if (arm_record.arm_regs)
13254 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13256 if (record_full_arch_list_add_reg
13257 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13261 /* Record memories. */
13262 if (arm_record.arm_mems)
13264 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13266 if (record_full_arch_list_add_mem
13267 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13268 arm_record.arm_mems[no_of_rec].len))
13273 if (record_full_arch_list_add_end ())
13278 deallocate_reg_mem (&arm_record);