1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
56 #include "gdb_assert.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
240 static int thumb_insn_size (unsigned short inst1);
242 struct arm_prologue_cache
244 /* The stack pointer at the time this frame was created; i.e. the
245 caller's stack pointer when this function was called. It is used
246 to identify this frame. */
249 /* The frame base for this frame is just prev_sp - frame size.
250 FRAMESIZE is the distance from the frame pointer to the
251 initial stack pointer. */
255 /* The register used to hold the frame pointer for this frame. */
258 /* Saved register offsets. */
259 struct trad_frame_saved_reg *saved_regs;
262 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
263 CORE_ADDR prologue_start,
264 CORE_ADDR prologue_end,
265 struct arm_prologue_cache *cache);
267 /* Architecture version for displaced stepping. This effects the behaviour of
268 certain instructions, and really should not be hard-wired. */
270 #define DISPLACED_STEPPING_ARCH_VERSION 5
272 /* Addresses for calling Thumb functions have the bit 0 set.
273 Here are some macros to test, set, or clear bit 0 of addresses. */
274 #define IS_THUMB_ADDR(addr) ((addr) & 1)
275 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
276 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 if (gdbarch_tdep (gdbarch)->is_m)
293 /* Determine if FRAME is executing in Thumb mode. */
296 arm_frame_is_thumb (struct frame_info *frame)
299 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
301 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
302 directly (from a signal frame or dummy frame) or by interpreting
303 the saved LR (from a prologue or DWARF frame). So consult it and
304 trust the unwinders. */
305 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
307 return (cpsr & t_bit) != 0;
310 /* Callback for VEC_lower_bound. */
313 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
314 const struct arm_mapping_symbol *rhs)
316 return lhs->value < rhs->value;
319 /* Search for the mapping symbol covering MEMADDR. If one is found,
320 return its type. Otherwise, return 0. If START is non-NULL,
321 set *START to the location of the mapping symbol. */
324 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
326 struct obj_section *sec;
328 /* If there are mapping symbols, consult them. */
329 sec = find_pc_section (memaddr);
332 struct arm_per_objfile *data;
333 VEC(arm_mapping_symbol_s) *map;
334 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
338 data = objfile_data (sec->objfile, arm_objfile_data_key);
341 map = data->section_maps[sec->the_bfd_section->index];
342 if (!VEC_empty (arm_mapping_symbol_s, map))
344 struct arm_mapping_symbol *map_sym;
346 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
347 arm_compare_mapping_symbols);
349 /* VEC_lower_bound finds the earliest ordered insertion
350 point. If the following symbol starts at this exact
351 address, we use that; otherwise, the preceding
352 mapping symbol covers this address. */
353 if (idx < VEC_length (arm_mapping_symbol_s, map))
355 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
356 if (map_sym->value == map_key.value)
359 *start = map_sym->value + obj_section_addr (sec);
360 return map_sym->type;
366 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
368 *start = map_sym->value + obj_section_addr (sec);
369 return map_sym->type;
378 /* Determine if the program counter specified in MEMADDR is in a Thumb
379 function. This function should be called for addresses unrelated to
380 any executing frame; otherwise, prefer arm_frame_is_thumb. */
383 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
385 struct bound_minimal_symbol sym;
387 struct displaced_step_closure* dsc
388 = get_displaced_step_closure_by_addr(memaddr);
390 /* If checking the mode of displaced instruction in copy area, the mode
391 should be determined by instruction on the original address. */
395 fprintf_unfiltered (gdb_stdlog,
396 "displaced: check mode of %.8lx instead of %.8lx\n",
397 (unsigned long) dsc->insn_addr,
398 (unsigned long) memaddr);
399 memaddr = dsc->insn_addr;
402 /* If bit 0 of the address is set, assume this is a Thumb address. */
403 if (IS_THUMB_ADDR (memaddr))
406 /* Respect internal mode override if active. */
407 if (arm_override_mode != -1)
408 return arm_override_mode;
410 /* If the user wants to override the symbol table, let him. */
411 if (strcmp (arm_force_mode_string, "arm") == 0)
413 if (strcmp (arm_force_mode_string, "thumb") == 0)
416 /* ARM v6-M and v7-M are always in Thumb mode. */
417 if (gdbarch_tdep (gdbarch)->is_m)
420 /* If there are mapping symbols, consult them. */
421 type = arm_find_mapping_symbol (memaddr, NULL);
425 /* Thumb functions have a "special" bit set in minimal symbols. */
426 sym = lookup_minimal_symbol_by_pc (memaddr);
428 return (MSYMBOL_IS_SPECIAL (sym.minsym));
430 /* If the user wants to override the fallback mode, let them. */
431 if (strcmp (arm_fallback_mode_string, "arm") == 0)
433 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
436 /* If we couldn't find any symbol, but we're talking to a running
437 target, then trust the current value of $cpsr. This lets
438 "display/i $pc" always show the correct mode (though if there is
439 a symbol table we will not reach here, so it still may not be
440 displayed in the mode it will be executed). */
441 if (target_has_registers)
442 return arm_frame_is_thumb (get_current_frame ());
444 /* Otherwise we're out of luck; we assume ARM. */
448 /* Remove useless bits from addresses in a running program. */
450 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
452 /* On M-profile devices, do not strip the low bit from EXC_RETURN
453 (the magic exception return address). */
454 if (gdbarch_tdep (gdbarch)->is_m
455 && (val & 0xfffffff0) == 0xfffffff0)
459 return UNMAKE_THUMB_ADDR (val);
461 return (val & 0x03fffffc);
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct bound_minimal_symbol msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
475 if (msym.minsym != NULL
476 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
477 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
479 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
703 struct pv_area *stack;
704 struct cleanup *back_to;
706 CORE_ADDR unrecognized_pc = 0;
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
713 while (start < limit)
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
740 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
743 offset = (insn & 0x7f) << 2; /* get scaled offset */
744 if (insn & 0x80) /* Check for SUB. */
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
791 if (pv_area_store_would_trash (stack, addr))
794 pv_area_store (stack, addr, 4, regs[regno]);
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
805 if (pv_area_store_would_trash (stack, addr))
808 pv_area_store (stack, addr, 4, regs[rd]);
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
846 unsigned short inst2;
848 inst2 = read_memory_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
857 int j1, j2, imm1, imm2;
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 pv_t addr = regs[bits (insn, 0, 3)];
884 if (pv_area_store_would_trash (stack, addr))
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
891 addr = pv_add_constant (addr, -4);
892 pv_area_store (stack, addr, 4, regs[regno]);
896 regs[bits (insn, 0, 3)] = addr;
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
907 offset = inst2 & 0xff;
909 addr = pv_add_constant (addr, offset);
911 addr = pv_add_constant (addr, -offset);
913 if (pv_area_store_would_trash (stack, addr))
916 pv_area_store (stack, addr, 4, regs[regno1]);
917 pv_area_store (stack, pv_add_constant (addr, 4),
921 regs[bits (insn, 0, 3)] = addr;
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
931 offset = inst2 & 0xff;
933 addr = pv_add_constant (addr, offset);
935 addr = pv_add_constant (addr, -offset);
937 if (pv_area_store_would_trash (stack, addr))
940 pv_area_store (stack, addr, 4, regs[regno]);
943 regs[bits (insn, 0, 3)] = addr;
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 int regno = bits (inst2, 12, 15);
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
955 if (pv_area_store_would_trash (stack, addr))
958 pv_area_store (stack, addr, 4, regs[regno]);
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
980 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1071 /* Constant pool loads. */
1072 unsigned int constant;
1075 offset = bits (inst2, 0, 11);
1077 loc = start + 4 + offset;
1079 loc = start + 4 - offset;
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1087 /* Constant pool loads. */
1088 unsigned int constant;
1091 offset = bits (inst2, 0, 7) << 2;
1093 loc = start + 4 + offset;
1095 loc = start + 4 - offset;
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1106 /* Don't scan past anything that might change control flow. */
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1118 else if (thumb_instruction_changes_pc (insn))
1120 /* Don't scan past anything that might change control flow. */
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1134 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1135 paddress (gdbarch, start));
1137 if (unrecognized_pc == 0)
1138 unrecognized_pc = start;
1142 do_cleanups (back_to);
1143 return unrecognized_pc;
1146 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1148 /* Frame pointer is fp. Frame size is constant. */
1149 cache->framereg = ARM_FP_REGNUM;
1150 cache->framesize = -regs[ARM_FP_REGNUM].k;
1152 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1154 /* Frame pointer is r7. Frame size is constant. */
1155 cache->framereg = THUMB_FP_REGNUM;
1156 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 /* Try the stack pointer... this is a bit desperate. */
1161 cache->framereg = ARM_SP_REGNUM;
1162 cache->framesize = -regs[ARM_SP_REGNUM].k;
1165 for (i = 0; i < 16; i++)
1166 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1167 cache->saved_regs[i].addr = offset;
1169 do_cleanups (back_to);
1170 return unrecognized_pc;
1174 /* Try to analyze the instructions starting from PC, which load symbol
1175 __stack_chk_guard. Return the address of instruction after loading this
1176 symbol, set the dest register number to *BASEREG, and set the size of
1177 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1182 unsigned int *destreg, int *offset)
1184 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1185 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1186 unsigned int low, high, address;
1191 unsigned short insn1
1192 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1194 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1196 *destreg = bits (insn1, 8, 10);
1198 address = bits (insn1, 0, 7);
1200 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1202 unsigned short insn2
1203 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1205 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1208 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1210 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1212 /* movt Rd, #const */
1213 if ((insn1 & 0xfbc0) == 0xf2c0)
1215 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1216 *destreg = bits (insn2, 8, 11);
1218 address = (high << 16 | low);
1225 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1227 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1229 address = bits (insn, 0, 11);
1230 *destreg = bits (insn, 12, 15);
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1245 address = (high << 16 | low);
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1276 .word __stack_chk_guard
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct bound_minimal_symbol stack_chk_guard;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* If name of symbol doesn't start with '__stack_chk_guard', this
1301 instruction sequence is not for stack protector. If symbol is
1302 removed, we conservatively think this sequence is for stack protector. */
1303 if (stack_chk_guard.minsym
1304 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1305 "__stack_chk_guard",
1306 strlen ("__stack_chk_guard")) != 0)
1311 unsigned int destreg;
1313 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1315 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1316 if ((insn & 0xf800) != 0x6800)
1318 if (bits (insn, 3, 5) != basereg)
1320 destreg = bits (insn, 0, 2);
1322 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1323 byte_order_for_code);
1324 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6000)
1327 if (destreg != bits (insn, 0, 2))
1332 unsigned int destreg;
1334 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1336 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1337 if ((insn & 0x0e500000) != 0x04100000)
1339 if (bits (insn, 16, 19) != basereg)
1341 destreg = bits (insn, 12, 15);
1342 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1343 insn = read_memory_unsigned_integer (pc + offset + 4,
1344 4, byte_order_for_code);
1345 if ((insn & 0x0e500000) != 0x04000000)
1347 if (bits (insn, 12, 15) != destreg)
1350 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1353 return pc + offset + 4;
1355 return pc + offset + 8;
1358 /* Advance the PC across any function entry prologue instructions to
1359 reach some "real" code.
1361 The APCS (ARM Procedure Call Standard) defines the following
1365 [stmfd sp!, {a1,a2,a3,a4}]
1366 stmfd sp!, {...,fp,ip,lr,pc}
1367 [stfe f7, [sp, #-12]!]
1368 [stfe f6, [sp, #-12]!]
1369 [stfe f5, [sp, #-12]!]
1370 [stfe f4, [sp, #-12]!]
1371 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1374 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1376 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1379 CORE_ADDR func_addr, limit_pc;
1381 /* See if we can determine the end of the prologue via the symbol table.
1382 If so, then return either PC, or the PC after the prologue, whichever
1384 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1386 CORE_ADDR post_prologue_pc
1387 = skip_prologue_using_sal (gdbarch, func_addr);
1388 struct symtab *s = find_pc_symtab (func_addr);
1390 if (post_prologue_pc)
1392 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1395 /* GCC always emits a line note before the prologue and another
1396 one after, even if the two are at the same address or on the
1397 same line. Take advantage of this so that we do not need to
1398 know every instruction that might appear in the prologue. We
1399 will have producer information for most binaries; if it is
1400 missing (e.g. for -gstabs), assuming the GNU tools. */
1401 if (post_prologue_pc
1403 || s->producer == NULL
1404 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1405 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1406 return post_prologue_pc;
1408 if (post_prologue_pc != 0)
1410 CORE_ADDR analyzed_limit;
1412 /* For non-GCC compilers, make sure the entire line is an
1413 acceptable prologue; GDB will round this function's
1414 return value up to the end of the following line so we
1415 can not skip just part of a line (and we do not want to).
1417 RealView does not treat the prologue specially, but does
1418 associate prologue code with the opening brace; so this
1419 lets us skip the first line if we think it is the opening
1421 if (arm_pc_is_thumb (gdbarch, func_addr))
1422 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1423 post_prologue_pc, NULL);
1425 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1426 post_prologue_pc, NULL);
1428 if (analyzed_limit != post_prologue_pc)
1431 return post_prologue_pc;
1435 /* Can't determine prologue from the symbol table, need to examine
1438 /* Find an upper limit on the function prologue using the debug
1439 information. If the debug information could not be used to provide
1440 that bound, then use an arbitrary large number as the upper bound. */
1441 /* Like arm_scan_prologue, stop no later than pc + 64. */
1442 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1444 limit_pc = pc + 64; /* Magic. */
1447 /* Check if this is Thumb code. */
1448 if (arm_pc_is_thumb (gdbarch, pc))
1449 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1451 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1453 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1455 /* "mov ip, sp" is no longer a required part of the prologue. */
1456 if (inst == 0xe1a0c00d) /* mov ip, sp */
1459 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1462 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1465 /* Some prologues begin with "str lr, [sp, #-4]!". */
1466 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1469 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1472 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1475 /* Any insns after this point may float into the code, if it makes
1476 for better instruction scheduling, so we skip them only if we
1477 find them, but still consider the function to be frame-ful. */
1479 /* We may have either one sfmfd instruction here, or several stfe
1480 insns, depending on the version of floating point code we
1482 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1485 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1488 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1491 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1494 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1495 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1496 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1499 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1500 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1501 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1504 /* Un-recognized instruction; stop scanning. */
1508 return skip_pc; /* End of prologue. */
1512 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1513 This function decodes a Thumb function prologue to determine:
1514 1) the size of the stack frame
1515 2) which registers are saved on it
1516 3) the offsets of saved regs
1517 4) the offset from the stack pointer to the frame pointer
1519 A typical Thumb function prologue would create this stack frame
1520 (offsets relative to FP)
1521 old SP -> 24 stack parameters
1524 R7 -> 0 local variables (16 bytes)
1525 SP -> -12 additional stack space (12 bytes)
1526 The frame size would thus be 36 bytes, and the frame offset would be
1527 12 bytes. The frame register is R7.
1529 The comments for thumb_skip_prolog() describe the algorithm we use
1530 to detect the end of the prolog. */
1534 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1535 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1537 CORE_ADDR prologue_start;
1538 CORE_ADDR prologue_end;
1540 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1543 /* See comment in arm_scan_prologue for an explanation of
1545 if (prologue_end > prologue_start + 64)
1547 prologue_end = prologue_start + 64;
1551 /* We're in the boondocks: we have no idea where the start of the
1555 prologue_end = min (prologue_end, prev_pc);
1557 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1560 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1563 arm_instruction_changes_pc (uint32_t this_instr)
1565 if (bits (this_instr, 28, 31) == INST_NV)
1566 /* Unconditional instructions. */
1567 switch (bits (this_instr, 24, 27))
1571 /* Branch with Link and change to Thumb. */
1576 /* Coprocessor register transfer. */
1577 if (bits (this_instr, 12, 15) == 15)
1578 error (_("Invalid update to pc in instruction"));
1584 switch (bits (this_instr, 25, 27))
1587 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1589 /* Multiplies and extra load/stores. */
1590 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1591 /* Neither multiplies nor extension load/stores are allowed
1595 /* Otherwise, miscellaneous instructions. */
1597 /* BX <reg>, BXJ <reg>, BLX <reg> */
1598 if (bits (this_instr, 4, 27) == 0x12fff1
1599 || bits (this_instr, 4, 27) == 0x12fff2
1600 || bits (this_instr, 4, 27) == 0x12fff3)
1603 /* Other miscellaneous instructions are unpredictable if they
1607 /* Data processing instruction. Fall through. */
1610 if (bits (this_instr, 12, 15) == 15)
1617 /* Media instructions and architecturally undefined instructions. */
1618 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1622 if (bit (this_instr, 20) == 0)
1626 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1632 /* Load/store multiple. */
1633 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1639 /* Branch and branch with link. */
1644 /* Coprocessor transfers or SWIs can not affect PC. */
1648 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1652 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1653 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1654 fill it in. Return the first address not recognized as a prologue
1657 We recognize all the instructions typically found in ARM prologues,
1658 plus harmless instructions which can be skipped (either for analysis
1659 purposes, or a more restrictive set that can be skipped when finding
1660 the end of the prologue). */
1663 arm_analyze_prologue (struct gdbarch *gdbarch,
1664 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1665 struct arm_prologue_cache *cache)
1667 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1668 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1670 CORE_ADDR offset, current_pc;
1671 pv_t regs[ARM_FPS_REGNUM];
1672 struct pv_area *stack;
1673 struct cleanup *back_to;
1674 int framereg, framesize;
1675 CORE_ADDR unrecognized_pc = 0;
1677 /* Search the prologue looking for instructions that set up the
1678 frame pointer, adjust the stack pointer, and save registers.
1680 Be careful, however, and if it doesn't look like a prologue,
1681 don't try to scan it. If, for instance, a frameless function
1682 begins with stmfd sp!, then we will tell ourselves there is
1683 a frame, which will confuse stack traceback, as well as "finish"
1684 and other operations that rely on a knowledge of the stack
1687 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1688 regs[regno] = pv_register (regno, 0);
1689 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1690 back_to = make_cleanup_free_pv_area (stack);
1692 for (current_pc = prologue_start;
1693 current_pc < prologue_end;
1697 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1699 if (insn == 0xe1a0c00d) /* mov ip, sp */
1701 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1704 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1705 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1707 unsigned imm = insn & 0xff; /* immediate value */
1708 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1709 int rd = bits (insn, 12, 15);
1710 imm = (imm >> rot) | (imm << (32 - rot));
1711 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1714 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1715 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1717 unsigned imm = insn & 0xff; /* immediate value */
1718 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1719 int rd = bits (insn, 12, 15);
1720 imm = (imm >> rot) | (imm << (32 - rot));
1721 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1724 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1729 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1730 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1731 regs[bits (insn, 12, 15)]);
1734 else if ((insn & 0xffff0000) == 0xe92d0000)
1735 /* stmfd sp!, {..., fp, ip, lr, pc}
1737 stmfd sp!, {a1, a2, a3, a4} */
1739 int mask = insn & 0xffff;
1741 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1744 /* Calculate offsets of saved registers. */
1745 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1746 if (mask & (1 << regno))
1749 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1750 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1753 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1754 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1755 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1757 /* No need to add this to saved_regs -- it's just an arg reg. */
1760 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1761 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1762 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1764 /* No need to add this to saved_regs -- it's just an arg reg. */
1767 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1769 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1771 /* No need to add this to saved_regs -- it's just arg regs. */
1774 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1776 unsigned imm = insn & 0xff; /* immediate value */
1777 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1778 imm = (imm >> rot) | (imm << (32 - rot));
1779 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1781 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1783 unsigned imm = insn & 0xff; /* immediate value */
1784 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1785 imm = (imm >> rot) | (imm << (32 - rot));
1786 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1788 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1790 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1792 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1796 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1797 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1799 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1801 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1803 int n_saved_fp_regs;
1804 unsigned int fp_start_reg, fp_bound_reg;
1806 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1809 if ((insn & 0x800) == 0x800) /* N0 is set */
1811 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1812 n_saved_fp_regs = 3;
1814 n_saved_fp_regs = 1;
1818 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1819 n_saved_fp_regs = 2;
1821 n_saved_fp_regs = 4;
1824 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1825 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1826 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1828 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1829 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1830 regs[fp_start_reg++]);
1833 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1835 /* Allow some special function calls when skipping the
1836 prologue; GCC generates these before storing arguments to
1838 CORE_ADDR dest = BranchDest (current_pc, insn);
1840 if (skip_prologue_function (gdbarch, dest, 0))
1845 else if ((insn & 0xf0000000) != 0xe0000000)
1846 break; /* Condition not true, exit early. */
1847 else if (arm_instruction_changes_pc (insn))
1848 /* Don't scan past anything that might change control flow. */
1850 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1851 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1852 /* Ignore block loads from the stack, potentially copying
1853 parameters from memory. */
1855 else if ((insn & 0xfc500000) == 0xe4100000
1856 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1857 /* Similarly ignore single loads from the stack. */
1859 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1860 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1861 register instead of the stack. */
1865 /* The optimizer might shove anything into the prologue,
1866 so we just skip what we don't recognize. */
1867 unrecognized_pc = current_pc;
1872 if (unrecognized_pc == 0)
1873 unrecognized_pc = current_pc;
1875 /* The frame size is just the distance from the frame register
1876 to the original stack pointer. */
1877 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1879 /* Frame pointer is fp. */
1880 framereg = ARM_FP_REGNUM;
1881 framesize = -regs[ARM_FP_REGNUM].k;
1885 /* Try the stack pointer... this is a bit desperate. */
1886 framereg = ARM_SP_REGNUM;
1887 framesize = -regs[ARM_SP_REGNUM].k;
1892 cache->framereg = framereg;
1893 cache->framesize = framesize;
1895 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1896 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1897 cache->saved_regs[regno].addr = offset;
1901 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1902 paddress (gdbarch, unrecognized_pc));
1904 do_cleanups (back_to);
1905 return unrecognized_pc;
1909 arm_scan_prologue (struct frame_info *this_frame,
1910 struct arm_prologue_cache *cache)
1912 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1915 CORE_ADDR prologue_start, prologue_end, current_pc;
1916 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1917 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1918 pv_t regs[ARM_FPS_REGNUM];
1919 struct pv_area *stack;
1920 struct cleanup *back_to;
1923 /* Assume there is no frame until proven otherwise. */
1924 cache->framereg = ARM_SP_REGNUM;
1925 cache->framesize = 0;
1927 /* Check for Thumb prologue. */
1928 if (arm_frame_is_thumb (this_frame))
1930 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1934 /* Find the function prologue. If we can't find the function in
1935 the symbol table, peek in the stack frame to find the PC. */
1936 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1939 /* One way to find the end of the prologue (which works well
1940 for unoptimized code) is to do the following:
1942 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1945 prologue_end = prev_pc;
1946 else if (sal.end < prologue_end)
1947 prologue_end = sal.end;
1949 This mechanism is very accurate so long as the optimizer
1950 doesn't move any instructions from the function body into the
1951 prologue. If this happens, sal.end will be the last
1952 instruction in the first hunk of prologue code just before
1953 the first instruction that the scheduler has moved from
1954 the body to the prologue.
1956 In order to make sure that we scan all of the prologue
1957 instructions, we use a slightly less accurate mechanism which
1958 may scan more than necessary. To help compensate for this
1959 lack of accuracy, the prologue scanning loop below contains
1960 several clauses which'll cause the loop to terminate early if
1961 an implausible prologue instruction is encountered.
1967 is a suitable endpoint since it accounts for the largest
1968 possible prologue plus up to five instructions inserted by
1971 if (prologue_end > prologue_start + 64)
1973 prologue_end = prologue_start + 64; /* See above. */
1978 /* We have no symbol information. Our only option is to assume this
1979 function has a standard stack frame and the normal frame register.
1980 Then, we can find the value of our frame pointer on entrance to
1981 the callee (or at the present moment if this is the innermost frame).
1982 The value stored there should be the address of the stmfd + 8. */
1983 CORE_ADDR frame_loc;
1984 LONGEST return_value;
1986 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1987 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1991 prologue_start = gdbarch_addr_bits_remove
1992 (gdbarch, return_value) - 8;
1993 prologue_end = prologue_start + 64; /* See above. */
1997 if (prev_pc < prologue_end)
1998 prologue_end = prev_pc;
2000 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2003 static struct arm_prologue_cache *
2004 arm_make_prologue_cache (struct frame_info *this_frame)
2007 struct arm_prologue_cache *cache;
2008 CORE_ADDR unwound_fp;
2010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2013 arm_scan_prologue (this_frame, cache);
2015 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2016 if (unwound_fp == 0)
2019 cache->prev_sp = unwound_fp + cache->framesize;
2021 /* Calculate actual addresses of saved registers using offsets
2022 determined by arm_scan_prologue. */
2023 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2024 if (trad_frame_addr_p (cache->saved_regs, reg))
2025 cache->saved_regs[reg].addr += cache->prev_sp;
2030 /* Our frame ID for a normal frame is the current function's starting PC
2031 and the caller's SP when we were called. */
2034 arm_prologue_this_id (struct frame_info *this_frame,
2036 struct frame_id *this_id)
2038 struct arm_prologue_cache *cache;
2042 if (*this_cache == NULL)
2043 *this_cache = arm_make_prologue_cache (this_frame);
2044 cache = *this_cache;
2046 /* This is meant to halt the backtrace at "_start". */
2047 pc = get_frame_pc (this_frame);
2048 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2051 /* If we've hit a wall, stop. */
2052 if (cache->prev_sp == 0)
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 func = get_frame_func (this_frame);
2062 id = frame_id_build (cache->prev_sp, func);
2066 static struct value *
2067 arm_prologue_prev_register (struct frame_info *this_frame,
2071 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2072 struct arm_prologue_cache *cache;
2074 if (*this_cache == NULL)
2075 *this_cache = arm_make_prologue_cache (this_frame);
2076 cache = *this_cache;
2078 /* If we are asked to unwind the PC, then we need to return the LR
2079 instead. The prologue may save PC, but it will point into this
2080 frame's prologue, not the next frame's resume location. Also
2081 strip the saved T bit. A valid LR may have the low bit set, but
2082 a valid PC never does. */
2083 if (prev_regnum == ARM_PC_REGNUM)
2087 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2088 return frame_unwind_got_constant (this_frame, prev_regnum,
2089 arm_addr_bits_remove (gdbarch, lr));
2092 /* SP is generally not saved to the stack, but this frame is
2093 identified by the next frame's stack pointer at the time of the call.
2094 The value was already reconstructed into PREV_SP. */
2095 if (prev_regnum == ARM_SP_REGNUM)
2096 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2098 /* The CPSR may have been changed by the call instruction and by the
2099 called function. The only bit we can reconstruct is the T bit,
2100 by checking the low bit of LR as of the call. This is a reliable
2101 indicator of Thumb-ness except for some ARM v4T pre-interworking
2102 Thumb code, which could get away with a clear low bit as long as
2103 the called function did not use bx. Guess that all other
2104 bits are unchanged; the condition flags are presumably lost,
2105 but the processor status is likely valid. */
2106 if (prev_regnum == ARM_PS_REGNUM)
2109 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2111 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2112 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2113 if (IS_THUMB_ADDR (lr))
2117 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2120 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2124 struct frame_unwind arm_prologue_unwind = {
2126 default_frame_unwind_stop_reason,
2127 arm_prologue_this_id,
2128 arm_prologue_prev_register,
2130 default_frame_sniffer
2133 /* Maintain a list of ARM exception table entries per objfile, similar to the
2134 list of mapping symbols. We only cache entries for standard ARM-defined
2135 personality routines; the cache will contain only the frame unwinding
2136 instructions associated with the entry (not the descriptors). */
2138 static const struct objfile_data *arm_exidx_data_key;
2140 struct arm_exidx_entry
2145 typedef struct arm_exidx_entry arm_exidx_entry_s;
2146 DEF_VEC_O(arm_exidx_entry_s);
2148 struct arm_exidx_data
2150 VEC(arm_exidx_entry_s) **section_maps;
2154 arm_exidx_data_free (struct objfile *objfile, void *arg)
2156 struct arm_exidx_data *data = arg;
2159 for (i = 0; i < objfile->obfd->section_count; i++)
2160 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2164 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2165 const struct arm_exidx_entry *rhs)
2167 return lhs->addr < rhs->addr;
2170 static struct obj_section *
2171 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2173 struct obj_section *osect;
2175 ALL_OBJFILE_OSECTIONS (objfile, osect)
2176 if (bfd_get_section_flags (objfile->obfd,
2177 osect->the_bfd_section) & SEC_ALLOC)
2179 bfd_vma start, size;
2180 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2181 size = bfd_get_section_size (osect->the_bfd_section);
2183 if (start <= vma && vma < start + size)
2190 /* Parse contents of exception table and exception index sections
2191 of OBJFILE, and fill in the exception table entry cache.
2193 For each entry that refers to a standard ARM-defined personality
2194 routine, extract the frame unwinding instructions (from either
2195 the index or the table section). The unwinding instructions
2197 - extracting them from the rest of the table data
2198 - converting to host endianness
2199 - appending the implicit 0xb0 ("Finish") code
2201 The extracted and normalized instructions are stored for later
2202 retrieval by the arm_find_exidx_entry routine. */
2205 arm_exidx_new_objfile (struct objfile *objfile)
2207 struct cleanup *cleanups;
2208 struct arm_exidx_data *data;
2209 asection *exidx, *extab;
2210 bfd_vma exidx_vma = 0, extab_vma = 0;
2211 bfd_size_type exidx_size = 0, extab_size = 0;
2212 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2215 /* If we've already touched this file, do nothing. */
2216 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2218 cleanups = make_cleanup (null_cleanup, NULL);
2220 /* Read contents of exception table and index. */
2221 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2224 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2225 exidx_size = bfd_get_section_size (exidx);
2226 exidx_data = xmalloc (exidx_size);
2227 make_cleanup (xfree, exidx_data);
2229 if (!bfd_get_section_contents (objfile->obfd, exidx,
2230 exidx_data, 0, exidx_size))
2232 do_cleanups (cleanups);
2237 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2240 extab_vma = bfd_section_vma (objfile->obfd, extab);
2241 extab_size = bfd_get_section_size (extab);
2242 extab_data = xmalloc (extab_size);
2243 make_cleanup (xfree, extab_data);
2245 if (!bfd_get_section_contents (objfile->obfd, extab,
2246 extab_data, 0, extab_size))
2248 do_cleanups (cleanups);
2253 /* Allocate exception table data structure. */
2254 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2255 set_objfile_data (objfile, arm_exidx_data_key, data);
2256 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2257 objfile->obfd->section_count,
2258 VEC(arm_exidx_entry_s) *);
2260 /* Fill in exception table. */
2261 for (i = 0; i < exidx_size / 8; i++)
2263 struct arm_exidx_entry new_exidx_entry;
2264 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2265 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2266 bfd_vma addr = 0, word = 0;
2267 int n_bytes = 0, n_words = 0;
2268 struct obj_section *sec;
2269 gdb_byte *entry = NULL;
2271 /* Extract address of start of function. */
2272 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2273 idx += exidx_vma + i * 8;
2275 /* Find section containing function and compute section offset. */
2276 sec = arm_obj_section_from_vma (objfile, idx);
2279 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2281 /* Determine address of exception table entry. */
2284 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2286 else if ((val & 0xff000000) == 0x80000000)
2288 /* Exception table entry embedded in .ARM.exidx
2289 -- must be short form. */
2293 else if (!(val & 0x80000000))
2295 /* Exception table entry in .ARM.extab. */
2296 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2297 addr += exidx_vma + i * 8 + 4;
2299 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2301 word = bfd_h_get_32 (objfile->obfd,
2302 extab_data + addr - extab_vma);
2305 if ((word & 0xff000000) == 0x80000000)
2310 else if ((word & 0xff000000) == 0x81000000
2311 || (word & 0xff000000) == 0x82000000)
2315 n_words = ((word >> 16) & 0xff);
2317 else if (!(word & 0x80000000))
2320 struct obj_section *pers_sec;
2321 int gnu_personality = 0;
2323 /* Custom personality routine. */
2324 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2325 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2327 /* Check whether we've got one of the variants of the
2328 GNU personality routines. */
2329 pers_sec = arm_obj_section_from_vma (objfile, pers);
2332 static const char *personality[] =
2334 "__gcc_personality_v0",
2335 "__gxx_personality_v0",
2336 "__gcj_personality_v0",
2337 "__gnu_objc_personality_v0",
2341 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2344 for (k = 0; personality[k]; k++)
2345 if (lookup_minimal_symbol_by_pc_name
2346 (pc, personality[k], objfile))
2348 gnu_personality = 1;
2353 /* If so, the next word contains a word count in the high
2354 byte, followed by the same unwind instructions as the
2355 pre-defined forms. */
2357 && addr + 4 <= extab_vma + extab_size)
2359 word = bfd_h_get_32 (objfile->obfd,
2360 extab_data + addr - extab_vma);
2363 n_words = ((word >> 24) & 0xff);
2369 /* Sanity check address. */
2371 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2372 n_words = n_bytes = 0;
2374 /* The unwind instructions reside in WORD (only the N_BYTES least
2375 significant bytes are valid), followed by N_WORDS words in the
2376 extab section starting at ADDR. */
2377 if (n_bytes || n_words)
2379 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2380 n_bytes + n_words * 4 + 1);
2383 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2387 word = bfd_h_get_32 (objfile->obfd,
2388 extab_data + addr - extab_vma);
2391 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2392 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2393 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2394 *p++ = (gdb_byte) (word & 0xff);
2397 /* Implied "Finish" to terminate the list. */
2401 /* Push entry onto vector. They are guaranteed to always
2402 appear in order of increasing addresses. */
2403 new_exidx_entry.addr = idx;
2404 new_exidx_entry.entry = entry;
2405 VEC_safe_push (arm_exidx_entry_s,
2406 data->section_maps[sec->the_bfd_section->index],
2410 do_cleanups (cleanups);
2413 /* Search for the exception table entry covering MEMADDR. If one is found,
2414 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2415 set *START to the start of the region covered by this entry. */
2418 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2420 struct obj_section *sec;
2422 sec = find_pc_section (memaddr);
2425 struct arm_exidx_data *data;
2426 VEC(arm_exidx_entry_s) *map;
2427 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2430 data = objfile_data (sec->objfile, arm_exidx_data_key);
2433 map = data->section_maps[sec->the_bfd_section->index];
2434 if (!VEC_empty (arm_exidx_entry_s, map))
2436 struct arm_exidx_entry *map_sym;
2438 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2439 arm_compare_exidx_entries);
2441 /* VEC_lower_bound finds the earliest ordered insertion
2442 point. If the following symbol starts at this exact
2443 address, we use that; otherwise, the preceding
2444 exception table entry covers this address. */
2445 if (idx < VEC_length (arm_exidx_entry_s, map))
2447 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2448 if (map_sym->addr == map_key.addr)
2451 *start = map_sym->addr + obj_section_addr (sec);
2452 return map_sym->entry;
2458 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2470 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2471 instruction list from the ARM exception table entry ENTRY, allocate and
2472 return a prologue cache structure describing how to unwind this frame.
2474 Return NULL if the unwinding instruction list contains a "spare",
2475 "reserved" or "refuse to unwind" instruction as defined in section
2476 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2477 for the ARM Architecture" document. */
2479 static struct arm_prologue_cache *
2480 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2485 struct arm_prologue_cache *cache;
2486 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2487 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2493 /* Whenever we reload SP, we actually have to retrieve its
2494 actual value in the current frame. */
2497 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2499 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2500 vsp = get_frame_register_unsigned (this_frame, reg);
2504 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2505 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2511 /* Decode next unwind instruction. */
2514 if ((insn & 0xc0) == 0)
2516 int offset = insn & 0x3f;
2517 vsp += (offset << 2) + 4;
2519 else if ((insn & 0xc0) == 0x40)
2521 int offset = insn & 0x3f;
2522 vsp -= (offset << 2) + 4;
2524 else if ((insn & 0xf0) == 0x80)
2526 int mask = ((insn & 0xf) << 8) | *entry++;
2529 /* The special case of an all-zero mask identifies
2530 "Refuse to unwind". We return NULL to fall back
2531 to the prologue analyzer. */
2535 /* Pop registers r4..r15 under mask. */
2536 for (i = 0; i < 12; i++)
2537 if (mask & (1 << i))
2539 cache->saved_regs[4 + i].addr = vsp;
2543 /* Special-case popping SP -- we need to reload vsp. */
2544 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2547 else if ((insn & 0xf0) == 0x90)
2549 int reg = insn & 0xf;
2551 /* Reserved cases. */
2552 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2555 /* Set SP from another register and mark VSP for reload. */
2556 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2559 else if ((insn & 0xf0) == 0xa0)
2561 int count = insn & 0x7;
2562 int pop_lr = (insn & 0x8) != 0;
2565 /* Pop r4..r[4+count]. */
2566 for (i = 0; i <= count; i++)
2568 cache->saved_regs[4 + i].addr = vsp;
2572 /* If indicated by flag, pop LR as well. */
2575 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2579 else if (insn == 0xb0)
2581 /* We could only have updated PC by popping into it; if so, it
2582 will show up as address. Otherwise, copy LR into PC. */
2583 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2584 cache->saved_regs[ARM_PC_REGNUM]
2585 = cache->saved_regs[ARM_LR_REGNUM];
2590 else if (insn == 0xb1)
2592 int mask = *entry++;
2595 /* All-zero mask and mask >= 16 is "spare". */
2596 if (mask == 0 || mask >= 16)
2599 /* Pop r0..r3 under mask. */
2600 for (i = 0; i < 4; i++)
2601 if (mask & (1 << i))
2603 cache->saved_regs[i].addr = vsp;
2607 else if (insn == 0xb2)
2609 ULONGEST offset = 0;
2614 offset |= (*entry & 0x7f) << shift;
2617 while (*entry++ & 0x80);
2619 vsp += 0x204 + (offset << 2);
2621 else if (insn == 0xb3)
2623 int start = *entry >> 4;
2624 int count = (*entry++) & 0xf;
2627 /* Only registers D0..D15 are valid here. */
2628 if (start + count >= 16)
2631 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2632 for (i = 0; i <= count; i++)
2634 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2638 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2641 else if ((insn & 0xf8) == 0xb8)
2643 int count = insn & 0x7;
2646 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2647 for (i = 0; i <= count; i++)
2649 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2653 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2656 else if (insn == 0xc6)
2658 int start = *entry >> 4;
2659 int count = (*entry++) & 0xf;
2662 /* Only registers WR0..WR15 are valid. */
2663 if (start + count >= 16)
2666 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2667 for (i = 0; i <= count; i++)
2669 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2673 else if (insn == 0xc7)
2675 int mask = *entry++;
2678 /* All-zero mask and mask >= 16 is "spare". */
2679 if (mask == 0 || mask >= 16)
2682 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2683 for (i = 0; i < 4; i++)
2684 if (mask & (1 << i))
2686 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2690 else if ((insn & 0xf8) == 0xc0)
2692 int count = insn & 0x7;
2695 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2696 for (i = 0; i <= count; i++)
2698 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2702 else if (insn == 0xc8)
2704 int start = *entry >> 4;
2705 int count = (*entry++) & 0xf;
2708 /* Only registers D0..D31 are valid. */
2709 if (start + count >= 16)
2712 /* Pop VFP double-precision registers
2713 D[16+start]..D[16+start+count]. */
2714 for (i = 0; i <= count; i++)
2716 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2720 else if (insn == 0xc9)
2722 int start = *entry >> 4;
2723 int count = (*entry++) & 0xf;
2726 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2727 for (i = 0; i <= count; i++)
2729 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2733 else if ((insn & 0xf8) == 0xd0)
2735 int count = insn & 0x7;
2738 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2739 for (i = 0; i <= count; i++)
2741 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2747 /* Everything else is "spare". */
2752 /* If we restore SP from a register, assume this was the frame register.
2753 Otherwise just fall back to SP as frame register. */
2754 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2755 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2757 cache->framereg = ARM_SP_REGNUM;
2759 /* Determine offset to previous frame. */
2761 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2763 /* We already got the previous SP. */
2764 cache->prev_sp = vsp;
2769 /* Unwinding via ARM exception table entries. Note that the sniffer
2770 already computes a filled-in prologue cache, which is then used
2771 with the same arm_prologue_this_id and arm_prologue_prev_register
2772 routines also used for prologue-parsing based unwinding. */
2775 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2776 struct frame_info *this_frame,
2777 void **this_prologue_cache)
2779 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2780 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2781 CORE_ADDR addr_in_block, exidx_region, func_start;
2782 struct arm_prologue_cache *cache;
2785 /* See if we have an ARM exception table entry covering this address. */
2786 addr_in_block = get_frame_address_in_block (this_frame);
2787 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2791 /* The ARM exception table does not describe unwind information
2792 for arbitrary PC values, but is guaranteed to be correct only
2793 at call sites. We have to decide here whether we want to use
2794 ARM exception table information for this frame, or fall back
2795 to using prologue parsing. (Note that if we have DWARF CFI,
2796 this sniffer isn't even called -- CFI is always preferred.)
2798 Before we make this decision, however, we check whether we
2799 actually have *symbol* information for the current frame.
2800 If not, prologue parsing would not work anyway, so we might
2801 as well use the exception table and hope for the best. */
2802 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2806 /* If the next frame is "normal", we are at a call site in this
2807 frame, so exception information is guaranteed to be valid. */
2808 if (get_next_frame (this_frame)
2809 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2812 /* We also assume exception information is valid if we're currently
2813 blocked in a system call. The system library is supposed to
2814 ensure this, so that e.g. pthread cancellation works. */
2815 if (arm_frame_is_thumb (this_frame))
2819 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2820 byte_order_for_code, &insn)
2821 && (insn & 0xff00) == 0xdf00 /* svc */)
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2829 byte_order_for_code, &insn)
2830 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2834 /* Bail out if we don't know that exception information is valid. */
2838 /* The ARM exception index does not mark the *end* of the region
2839 covered by the entry, and some functions will not have any entry.
2840 To correctly recognize the end of the covered region, the linker
2841 should have inserted dummy records with a CANTUNWIND marker.
2843 Unfortunately, current versions of GNU ld do not reliably do
2844 this, and thus we may have found an incorrect entry above.
2845 As a (temporary) sanity check, we only use the entry if it
2846 lies *within* the bounds of the function. Note that this check
2847 might reject perfectly valid entries that just happen to cover
2848 multiple functions; therefore this check ought to be removed
2849 once the linker is fixed. */
2850 if (func_start > exidx_region)
2854 /* Decode the list of unwinding instructions into a prologue cache.
2855 Note that this may fail due to e.g. a "refuse to unwind" code. */
2856 cache = arm_exidx_fill_cache (this_frame, entry);
2860 *this_prologue_cache = cache;
2864 struct frame_unwind arm_exidx_unwind = {
2866 default_frame_unwind_stop_reason,
2867 arm_prologue_this_id,
2868 arm_prologue_prev_register,
2870 arm_exidx_unwind_sniffer
2873 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2874 trampoline, return the target PC. Otherwise return 0.
2876 void call0a (char c, short s, int i, long l) {}
2880 (*pointer_to_call0a) (c, s, i, l);
2883 Instead of calling a stub library function _call_via_xx (xx is
2884 the register name), GCC may inline the trampoline in the object
2885 file as below (register r2 has the address of call0a).
2888 .type main, %function
2897 The trampoline 'bx r2' doesn't belong to main. */
2900 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2902 /* The heuristics of recognizing such trampoline is that FRAME is
2903 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2904 if (arm_frame_is_thumb (frame))
2908 if (target_read_memory (pc, buf, 2) == 0)
2910 struct gdbarch *gdbarch = get_frame_arch (frame);
2911 enum bfd_endian byte_order_for_code
2912 = gdbarch_byte_order_for_code (gdbarch);
2914 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2916 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2919 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2921 /* Clear the LSB so that gdb core sets step-resume
2922 breakpoint at the right address. */
2923 return UNMAKE_THUMB_ADDR (dest);
2931 static struct arm_prologue_cache *
2932 arm_make_stub_cache (struct frame_info *this_frame)
2934 struct arm_prologue_cache *cache;
2936 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2937 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2939 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2944 /* Our frame ID for a stub frame is the current SP and LR. */
2947 arm_stub_this_id (struct frame_info *this_frame,
2949 struct frame_id *this_id)
2951 struct arm_prologue_cache *cache;
2953 if (*this_cache == NULL)
2954 *this_cache = arm_make_stub_cache (this_frame);
2955 cache = *this_cache;
2957 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2961 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2962 struct frame_info *this_frame,
2963 void **this_prologue_cache)
2965 CORE_ADDR addr_in_block;
2967 CORE_ADDR pc, start_addr;
2970 addr_in_block = get_frame_address_in_block (this_frame);
2971 pc = get_frame_pc (this_frame);
2972 if (in_plt_section (addr_in_block)
2973 /* We also use the stub winder if the target memory is unreadable
2974 to avoid having the prologue unwinder trying to read it. */
2975 || target_read_memory (pc, dummy, 4) != 0)
2978 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2979 && arm_skip_bx_reg (this_frame, pc) != 0)
2985 struct frame_unwind arm_stub_unwind = {
2987 default_frame_unwind_stop_reason,
2989 arm_prologue_prev_register,
2991 arm_stub_unwind_sniffer
2994 /* Put here the code to store, into CACHE->saved_regs, the addresses
2995 of the saved registers of frame described by THIS_FRAME. CACHE is
2998 static struct arm_prologue_cache *
2999 arm_m_exception_cache (struct frame_info *this_frame)
3001 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3002 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3003 struct arm_prologue_cache *cache;
3004 CORE_ADDR unwound_sp;
3007 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3008 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3010 unwound_sp = get_frame_register_unsigned (this_frame,
3013 /* The hardware saves eight 32-bit words, comprising xPSR,
3014 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3015 "B1.5.6 Exception entry behavior" in
3016 "ARMv7-M Architecture Reference Manual". */
3017 cache->saved_regs[0].addr = unwound_sp;
3018 cache->saved_regs[1].addr = unwound_sp + 4;
3019 cache->saved_regs[2].addr = unwound_sp + 8;
3020 cache->saved_regs[3].addr = unwound_sp + 12;
3021 cache->saved_regs[12].addr = unwound_sp + 16;
3022 cache->saved_regs[14].addr = unwound_sp + 20;
3023 cache->saved_regs[15].addr = unwound_sp + 24;
3024 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3026 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3027 aligner between the top of the 32-byte stack frame and the
3028 previous context's stack pointer. */
3029 cache->prev_sp = unwound_sp + 32;
3030 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3031 && (xpsr & (1 << 9)) != 0)
3032 cache->prev_sp += 4;
3037 /* Implementation of function hook 'this_id' in
3038 'struct frame_uwnind'. */
3041 arm_m_exception_this_id (struct frame_info *this_frame,
3043 struct frame_id *this_id)
3045 struct arm_prologue_cache *cache;
3047 if (*this_cache == NULL)
3048 *this_cache = arm_m_exception_cache (this_frame);
3049 cache = *this_cache;
3051 /* Our frame ID for a stub frame is the current SP and LR. */
3052 *this_id = frame_id_build (cache->prev_sp,
3053 get_frame_pc (this_frame));
3056 /* Implementation of function hook 'prev_register' in
3057 'struct frame_uwnind'. */
3059 static struct value *
3060 arm_m_exception_prev_register (struct frame_info *this_frame,
3064 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3065 struct arm_prologue_cache *cache;
3067 if (*this_cache == NULL)
3068 *this_cache = arm_m_exception_cache (this_frame);
3069 cache = *this_cache;
3071 /* The value was already reconstructed into PREV_SP. */
3072 if (prev_regnum == ARM_SP_REGNUM)
3073 return frame_unwind_got_constant (this_frame, prev_regnum,
3076 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3080 /* Implementation of function hook 'sniffer' in
3081 'struct frame_uwnind'. */
3084 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3085 struct frame_info *this_frame,
3086 void **this_prologue_cache)
3088 CORE_ADDR this_pc = get_frame_pc (this_frame);
3090 /* No need to check is_m; this sniffer is only registered for
3091 M-profile architectures. */
3093 /* Exception frames return to one of these magic PCs. Other values
3094 are not defined as of v7-M. See details in "B1.5.8 Exception
3095 return behavior" in "ARMv7-M Architecture Reference Manual". */
3096 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3097 || this_pc == 0xfffffffd)
3103 /* Frame unwinder for M-profile exceptions. */
3105 struct frame_unwind arm_m_exception_unwind =
3108 default_frame_unwind_stop_reason,
3109 arm_m_exception_this_id,
3110 arm_m_exception_prev_register,
3112 arm_m_exception_unwind_sniffer
3116 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3118 struct arm_prologue_cache *cache;
3120 if (*this_cache == NULL)
3121 *this_cache = arm_make_prologue_cache (this_frame);
3122 cache = *this_cache;
3124 return cache->prev_sp - cache->framesize;
3127 struct frame_base arm_normal_base = {
3128 &arm_prologue_unwind,
3129 arm_normal_frame_base,
3130 arm_normal_frame_base,
3131 arm_normal_frame_base
3134 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3135 dummy frame. The frame ID's base needs to match the TOS value
3136 saved by save_dummy_frame_tos() and returned from
3137 arm_push_dummy_call, and the PC needs to match the dummy frame's
3140 static struct frame_id
3141 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3143 return frame_id_build (get_frame_register_unsigned (this_frame,
3145 get_frame_pc (this_frame));
3148 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3149 be used to construct the previous frame's ID, after looking up the
3150 containing function). */
3153 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3156 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3157 return arm_addr_bits_remove (gdbarch, pc);
3161 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3163 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3166 static struct value *
3167 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3170 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3172 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3177 /* The PC is normally copied from the return column, which
3178 describes saves of LR. However, that version may have an
3179 extra bit set to indicate Thumb state. The bit is not
3181 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3182 return frame_unwind_got_constant (this_frame, regnum,
3183 arm_addr_bits_remove (gdbarch, lr));
3186 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3187 cpsr = get_frame_register_unsigned (this_frame, regnum);
3188 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3189 if (IS_THUMB_ADDR (lr))
3193 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3196 internal_error (__FILE__, __LINE__,
3197 _("Unexpected register %d"), regnum);
3202 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3203 struct dwarf2_frame_state_reg *reg,
3204 struct frame_info *this_frame)
3210 reg->how = DWARF2_FRAME_REG_FN;
3211 reg->loc.fn = arm_dwarf2_prev_register;
3214 reg->how = DWARF2_FRAME_REG_CFA;
3219 /* Return true if we are in the function's epilogue, i.e. after the
3220 instruction that destroyed the function's stack frame. */
3223 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3225 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3226 unsigned int insn, insn2;
3227 int found_return = 0, found_stack_adjust = 0;
3228 CORE_ADDR func_start, func_end;
3232 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3235 /* The epilogue is a sequence of instructions along the following lines:
3237 - add stack frame size to SP or FP
3238 - [if frame pointer used] restore SP from FP
3239 - restore registers from SP [may include PC]
3240 - a return-type instruction [if PC wasn't already restored]
3242 In a first pass, we scan forward from the current PC and verify the
3243 instructions we find as compatible with this sequence, ending in a
3246 However, this is not sufficient to distinguish indirect function calls
3247 within a function from indirect tail calls in the epilogue in some cases.
3248 Therefore, if we didn't already find any SP-changing instruction during
3249 forward scan, we add a backward scanning heuristic to ensure we actually
3250 are in the epilogue. */
3253 while (scan_pc < func_end && !found_return)
3255 if (target_read_memory (scan_pc, buf, 2))
3259 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3261 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3263 else if (insn == 0x46f7) /* mov pc, lr */
3265 else if (insn == 0x46bd) /* mov sp, r7 */
3266 found_stack_adjust = 1;
3267 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3268 found_stack_adjust = 1;
3269 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3271 found_stack_adjust = 1;
3272 if (insn & 0x0100) /* <registers> include PC. */
3275 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3277 if (target_read_memory (scan_pc, buf, 2))
3281 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3283 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3285 found_stack_adjust = 1;
3286 if (insn2 & 0x8000) /* <registers> include PC. */
3289 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3290 && (insn2 & 0x0fff) == 0x0b04)
3292 found_stack_adjust = 1;
3293 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3296 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3297 && (insn2 & 0x0e00) == 0x0a00)
3298 found_stack_adjust = 1;
3309 /* Since any instruction in the epilogue sequence, with the possible
3310 exception of return itself, updates the stack pointer, we need to
3311 scan backwards for at most one instruction. Try either a 16-bit or
3312 a 32-bit instruction. This is just a heuristic, so we do not worry
3313 too much about false positives. */
3315 if (!found_stack_adjust)
3317 if (pc - 4 < func_start)
3319 if (target_read_memory (pc - 4, buf, 4))
3322 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3323 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3325 if (insn2 == 0x46bd) /* mov sp, r7 */
3326 found_stack_adjust = 1;
3327 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3328 found_stack_adjust = 1;
3329 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3330 found_stack_adjust = 1;
3331 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3332 found_stack_adjust = 1;
3333 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3334 && (insn2 & 0x0fff) == 0x0b04)
3335 found_stack_adjust = 1;
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 found_stack_adjust = 1;
3341 return found_stack_adjust;
3344 /* Return true if we are in the function's epilogue, i.e. after the
3345 instruction that destroyed the function's stack frame. */
3348 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3350 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3352 int found_return, found_stack_adjust;
3353 CORE_ADDR func_start, func_end;
3355 if (arm_pc_is_thumb (gdbarch, pc))
3356 return thumb_in_function_epilogue_p (gdbarch, pc);
3358 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3361 /* We are in the epilogue if the previous instruction was a stack
3362 adjustment and the next instruction is a possible return (bx, mov
3363 pc, or pop). We could have to scan backwards to find the stack
3364 adjustment, or forwards to find the return, but this is a decent
3365 approximation. First scan forwards. */
3368 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3369 if (bits (insn, 28, 31) != INST_NV)
3371 if ((insn & 0x0ffffff0) == 0x012fff10)
3374 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3377 else if ((insn & 0x0fff0000) == 0x08bd0000
3378 && (insn & 0x0000c000) != 0)
3379 /* POP (LDMIA), including PC or LR. */
3386 /* Scan backwards. This is just a heuristic, so do not worry about
3387 false positives from mode changes. */
3389 if (pc < func_start + 4)
3392 found_stack_adjust = 0;
3393 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3394 if (bits (insn, 28, 31) != INST_NV)
3396 if ((insn & 0x0df0f000) == 0x0080d000)
3397 /* ADD SP (register or immediate). */
3398 found_stack_adjust = 1;
3399 else if ((insn & 0x0df0f000) == 0x0040d000)
3400 /* SUB SP (register or immediate). */
3401 found_stack_adjust = 1;
3402 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3404 found_stack_adjust = 1;
3405 else if ((insn & 0x0fff0000) == 0x08bd0000)
3407 found_stack_adjust = 1;
3408 else if ((insn & 0x0fff0000) == 0x049d0000)
3409 /* POP of a single register. */
3410 found_stack_adjust = 1;
3413 if (found_stack_adjust)
3420 /* When arguments must be pushed onto the stack, they go on in reverse
3421 order. The code below implements a FILO (stack) to do this. */
3426 struct stack_item *prev;
3430 static struct stack_item *
3431 push_stack_item (struct stack_item *prev, const void *contents, int len)
3433 struct stack_item *si;
3434 si = xmalloc (sizeof (struct stack_item));
3435 si->data = xmalloc (len);
3438 memcpy (si->data, contents, len);
3442 static struct stack_item *
3443 pop_stack_item (struct stack_item *si)
3445 struct stack_item *dead = si;
3453 /* Return the alignment (in bytes) of the given type. */
3456 arm_type_align (struct type *t)
3462 t = check_typedef (t);
3463 switch (TYPE_CODE (t))
3466 /* Should never happen. */
3467 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3471 case TYPE_CODE_ENUM:
3475 case TYPE_CODE_RANGE:
3477 case TYPE_CODE_CHAR:
3478 case TYPE_CODE_BOOL:
3479 return TYPE_LENGTH (t);
3481 case TYPE_CODE_ARRAY:
3482 case TYPE_CODE_COMPLEX:
3483 /* TODO: What about vector types? */
3484 return arm_type_align (TYPE_TARGET_TYPE (t));
3486 case TYPE_CODE_STRUCT:
3487 case TYPE_CODE_UNION:
3489 for (n = 0; n < TYPE_NFIELDS (t); n++)
3491 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3499 /* Possible base types for a candidate for passing and returning in
3502 enum arm_vfp_cprc_base_type
3511 /* The length of one element of base type B. */
3514 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3518 case VFP_CPRC_SINGLE:
3520 case VFP_CPRC_DOUBLE:
3522 case VFP_CPRC_VEC64:
3524 case VFP_CPRC_VEC128:
3527 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3532 /* The character ('s', 'd' or 'q') for the type of VFP register used
3533 for passing base type B. */
3536 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3540 case VFP_CPRC_SINGLE:
3542 case VFP_CPRC_DOUBLE:
3544 case VFP_CPRC_VEC64:
3546 case VFP_CPRC_VEC128:
3549 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3554 /* Determine whether T may be part of a candidate for passing and
3555 returning in VFP registers, ignoring the limit on the total number
3556 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3557 classification of the first valid component found; if it is not
3558 VFP_CPRC_UNKNOWN, all components must have the same classification
3559 as *BASE_TYPE. If it is found that T contains a type not permitted
3560 for passing and returning in VFP registers, a type differently
3561 classified from *BASE_TYPE, or two types differently classified
3562 from each other, return -1, otherwise return the total number of
3563 base-type elements found (possibly 0 in an empty structure or
3564 array). Vectors and complex types are not currently supported,
3565 matching the generic AAPCS support. */
3568 arm_vfp_cprc_sub_candidate (struct type *t,
3569 enum arm_vfp_cprc_base_type *base_type)
3571 t = check_typedef (t);
3572 switch (TYPE_CODE (t))
3575 switch (TYPE_LENGTH (t))
3578 if (*base_type == VFP_CPRC_UNKNOWN)
3579 *base_type = VFP_CPRC_SINGLE;
3580 else if (*base_type != VFP_CPRC_SINGLE)
3585 if (*base_type == VFP_CPRC_UNKNOWN)
3586 *base_type = VFP_CPRC_DOUBLE;
3587 else if (*base_type != VFP_CPRC_DOUBLE)
3596 case TYPE_CODE_ARRAY:
3600 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3603 if (TYPE_LENGTH (t) == 0)
3605 gdb_assert (count == 0);
3608 else if (count == 0)
3610 unitlen = arm_vfp_cprc_unit_length (*base_type);
3611 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3612 return TYPE_LENGTH (t) / unitlen;
3616 case TYPE_CODE_STRUCT:
3621 for (i = 0; i < TYPE_NFIELDS (t); i++)
3623 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3625 if (sub_count == -1)
3629 if (TYPE_LENGTH (t) == 0)
3631 gdb_assert (count == 0);
3634 else if (count == 0)
3636 unitlen = arm_vfp_cprc_unit_length (*base_type);
3637 if (TYPE_LENGTH (t) != unitlen * count)
3642 case TYPE_CODE_UNION:
3647 for (i = 0; i < TYPE_NFIELDS (t); i++)
3649 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3651 if (sub_count == -1)
3653 count = (count > sub_count ? count : sub_count);
3655 if (TYPE_LENGTH (t) == 0)
3657 gdb_assert (count == 0);
3660 else if (count == 0)
3662 unitlen = arm_vfp_cprc_unit_length (*base_type);
3663 if (TYPE_LENGTH (t) != unitlen * count)
3675 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3676 if passed to or returned from a non-variadic function with the VFP
3677 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3678 *BASE_TYPE to the base type for T and *COUNT to the number of
3679 elements of that base type before returning. */
3682 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3685 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3686 int c = arm_vfp_cprc_sub_candidate (t, &b);
3687 if (c <= 0 || c > 4)
3694 /* Return 1 if the VFP ABI should be used for passing arguments to and
3695 returning values from a function of type FUNC_TYPE, 0
3699 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3701 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3702 /* Variadic functions always use the base ABI. Assume that functions
3703 without debug info are not variadic. */
3704 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3706 /* The VFP ABI is only supported as a variant of AAPCS. */
3707 if (tdep->arm_abi != ARM_ABI_AAPCS)
3709 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3712 /* We currently only support passing parameters in integer registers, which
3713 conforms with GCC's default model, and VFP argument passing following
3714 the VFP variant of AAPCS. Several other variants exist and
3715 we should probably support some of them based on the selected ABI. */
3718 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3719 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3720 struct value **args, CORE_ADDR sp, int struct_return,
3721 CORE_ADDR struct_addr)
3723 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3727 struct stack_item *si = NULL;
3730 unsigned vfp_regs_free = (1 << 16) - 1;
3732 /* Determine the type of this function and whether the VFP ABI
3734 ftype = check_typedef (value_type (function));
3735 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3736 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3737 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3739 /* Set the return address. For the ARM, the return breakpoint is
3740 always at BP_ADDR. */
3741 if (arm_pc_is_thumb (gdbarch, bp_addr))
3743 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3745 /* Walk through the list of args and determine how large a temporary
3746 stack is required. Need to take care here as structs may be
3747 passed on the stack, and we have to push them. */
3750 argreg = ARM_A1_REGNUM;
3753 /* The struct_return pointer occupies the first parameter
3754 passing register. */
3758 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3759 gdbarch_register_name (gdbarch, argreg),
3760 paddress (gdbarch, struct_addr));
3761 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3765 for (argnum = 0; argnum < nargs; argnum++)
3768 struct type *arg_type;
3769 struct type *target_type;
3770 enum type_code typecode;
3771 const bfd_byte *val;
3773 enum arm_vfp_cprc_base_type vfp_base_type;
3775 int may_use_core_reg = 1;
3777 arg_type = check_typedef (value_type (args[argnum]));
3778 len = TYPE_LENGTH (arg_type);
3779 target_type = TYPE_TARGET_TYPE (arg_type);
3780 typecode = TYPE_CODE (arg_type);
3781 val = value_contents (args[argnum]);
3783 align = arm_type_align (arg_type);
3784 /* Round alignment up to a whole number of words. */
3785 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3786 /* Different ABIs have different maximum alignments. */
3787 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3789 /* The APCS ABI only requires word alignment. */
3790 align = INT_REGISTER_SIZE;
3794 /* The AAPCS requires at most doubleword alignment. */
3795 if (align > INT_REGISTER_SIZE * 2)
3796 align = INT_REGISTER_SIZE * 2;
3800 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3808 /* Because this is a CPRC it cannot go in a core register or
3809 cause a core register to be skipped for alignment.
3810 Either it goes in VFP registers and the rest of this loop
3811 iteration is skipped for this argument, or it goes on the
3812 stack (and the stack alignment code is correct for this
3814 may_use_core_reg = 0;
3816 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3817 shift = unit_length / 4;
3818 mask = (1 << (shift * vfp_base_count)) - 1;
3819 for (regno = 0; regno < 16; regno += shift)
3820 if (((vfp_regs_free >> regno) & mask) == mask)
3829 vfp_regs_free &= ~(mask << regno);
3830 reg_scaled = regno / shift;
3831 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3832 for (i = 0; i < vfp_base_count; i++)
3836 if (reg_char == 'q')
3837 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3838 val + i * unit_length);
3841 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3842 reg_char, reg_scaled + i);
3843 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3845 regcache_cooked_write (regcache, regnum,
3846 val + i * unit_length);
3853 /* This CPRC could not go in VFP registers, so all VFP
3854 registers are now marked as used. */
3859 /* Push stack padding for dowubleword alignment. */
3860 if (nstack & (align - 1))
3862 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3863 nstack += INT_REGISTER_SIZE;
3866 /* Doubleword aligned quantities must go in even register pairs. */
3867 if (may_use_core_reg
3868 && argreg <= ARM_LAST_ARG_REGNUM
3869 && align > INT_REGISTER_SIZE
3873 /* If the argument is a pointer to a function, and it is a
3874 Thumb function, create a LOCAL copy of the value and set
3875 the THUMB bit in it. */
3876 if (TYPE_CODE_PTR == typecode
3877 && target_type != NULL
3878 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3880 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3881 if (arm_pc_is_thumb (gdbarch, regval))
3883 bfd_byte *copy = alloca (len);
3884 store_unsigned_integer (copy, len, byte_order,
3885 MAKE_THUMB_ADDR (regval));
3890 /* Copy the argument to general registers or the stack in
3891 register-sized pieces. Large arguments are split between
3892 registers and stack. */
3895 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3897 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3899 /* The argument is being passed in a general purpose
3902 = extract_unsigned_integer (val, partial_len, byte_order);
3903 if (byte_order == BFD_ENDIAN_BIG)
3904 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3906 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3908 gdbarch_register_name
3910 phex (regval, INT_REGISTER_SIZE));
3911 regcache_cooked_write_unsigned (regcache, argreg, regval);
3916 /* Push the arguments onto the stack. */
3918 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3920 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3921 nstack += INT_REGISTER_SIZE;
3928 /* If we have an odd number of words to push, then decrement the stack
3929 by one word now, so first stack argument will be dword aligned. */
3936 write_memory (sp, si->data, si->len);
3937 si = pop_stack_item (si);
3940 /* Finally, update teh SP register. */
3941 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3947 /* Always align the frame to an 8-byte boundary. This is required on
3948 some platforms and harmless on the rest. */
3951 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3953 /* Align the stack to eight bytes. */
3954 return sp & ~ (CORE_ADDR) 7;
3958 print_fpu_flags (struct ui_file *file, int flags)
3960 if (flags & (1 << 0))
3961 fputs_filtered ("IVO ", file);
3962 if (flags & (1 << 1))
3963 fputs_filtered ("DVZ ", file);
3964 if (flags & (1 << 2))
3965 fputs_filtered ("OFL ", file);
3966 if (flags & (1 << 3))
3967 fputs_filtered ("UFL ", file);
3968 if (flags & (1 << 4))
3969 fputs_filtered ("INX ", file);
3970 fputc_filtered ('\n', file);
3973 /* Print interesting information about the floating point processor
3974 (if present) or emulator. */
3976 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3977 struct frame_info *frame, const char *args)
3979 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3982 type = (status >> 24) & 127;
3983 if (status & (1 << 31))
3984 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3986 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3987 /* i18n: [floating point unit] mask */
3988 fputs_filtered (_("mask: "), file);
3989 print_fpu_flags (file, status >> 16);
3990 /* i18n: [floating point unit] flags */
3991 fputs_filtered (_("flags: "), file);
3992 print_fpu_flags (file, status);
3995 /* Construct the ARM extended floating point type. */
3996 static struct type *
3997 arm_ext_type (struct gdbarch *gdbarch)
3999 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4001 if (!tdep->arm_ext_type)
4003 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4004 floatformats_arm_ext);
4006 return tdep->arm_ext_type;
4009 static struct type *
4010 arm_neon_double_type (struct gdbarch *gdbarch)
4012 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4014 if (tdep->neon_double_type == NULL)
4016 struct type *t, *elem;
4018 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4020 elem = builtin_type (gdbarch)->builtin_uint8;
4021 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4022 elem = builtin_type (gdbarch)->builtin_uint16;
4023 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4024 elem = builtin_type (gdbarch)->builtin_uint32;
4025 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4026 elem = builtin_type (gdbarch)->builtin_uint64;
4027 append_composite_type_field (t, "u64", elem);
4028 elem = builtin_type (gdbarch)->builtin_float;
4029 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4030 elem = builtin_type (gdbarch)->builtin_double;
4031 append_composite_type_field (t, "f64", elem);
4033 TYPE_VECTOR (t) = 1;
4034 TYPE_NAME (t) = "neon_d";
4035 tdep->neon_double_type = t;
4038 return tdep->neon_double_type;
4041 /* FIXME: The vector types are not correctly ordered on big-endian
4042 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4043 bits of d0 - regardless of what unit size is being held in d0. So
4044 the offset of the first uint8 in d0 is 7, but the offset of the
4045 first float is 4. This code works as-is for little-endian
4048 static struct type *
4049 arm_neon_quad_type (struct gdbarch *gdbarch)
4051 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4053 if (tdep->neon_quad_type == NULL)
4055 struct type *t, *elem;
4057 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4059 elem = builtin_type (gdbarch)->builtin_uint8;
4060 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4061 elem = builtin_type (gdbarch)->builtin_uint16;
4062 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint32;
4064 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint64;
4066 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_float;
4068 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4069 elem = builtin_type (gdbarch)->builtin_double;
4070 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4072 TYPE_VECTOR (t) = 1;
4073 TYPE_NAME (t) = "neon_q";
4074 tdep->neon_quad_type = t;
4077 return tdep->neon_quad_type;
4080 /* Return the GDB type object for the "standard" data type of data in
4083 static struct type *
4084 arm_register_type (struct gdbarch *gdbarch, int regnum)
4086 int num_regs = gdbarch_num_regs (gdbarch);
4088 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4089 && regnum >= num_regs && regnum < num_regs + 32)
4090 return builtin_type (gdbarch)->builtin_float;
4092 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4093 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4094 return arm_neon_quad_type (gdbarch);
4096 /* If the target description has register information, we are only
4097 in this function so that we can override the types of
4098 double-precision registers for NEON. */
4099 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4101 struct type *t = tdesc_register_type (gdbarch, regnum);
4103 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4104 && TYPE_CODE (t) == TYPE_CODE_FLT
4105 && gdbarch_tdep (gdbarch)->have_neon)
4106 return arm_neon_double_type (gdbarch);
4111 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4113 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4114 return builtin_type (gdbarch)->builtin_void;
4116 return arm_ext_type (gdbarch);
4118 else if (regnum == ARM_SP_REGNUM)
4119 return builtin_type (gdbarch)->builtin_data_ptr;
4120 else if (regnum == ARM_PC_REGNUM)
4121 return builtin_type (gdbarch)->builtin_func_ptr;
4122 else if (regnum >= ARRAY_SIZE (arm_register_names))
4123 /* These registers are only supported on targets which supply
4124 an XML description. */
4125 return builtin_type (gdbarch)->builtin_int0;
4127 return builtin_type (gdbarch)->builtin_uint32;
4130 /* Map a DWARF register REGNUM onto the appropriate GDB register
4134 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4136 /* Core integer regs. */
4137 if (reg >= 0 && reg <= 15)
4140 /* Legacy FPA encoding. These were once used in a way which
4141 overlapped with VFP register numbering, so their use is
4142 discouraged, but GDB doesn't support the ARM toolchain
4143 which used them for VFP. */
4144 if (reg >= 16 && reg <= 23)
4145 return ARM_F0_REGNUM + reg - 16;
4147 /* New assignments for the FPA registers. */
4148 if (reg >= 96 && reg <= 103)
4149 return ARM_F0_REGNUM + reg - 96;
4151 /* WMMX register assignments. */
4152 if (reg >= 104 && reg <= 111)
4153 return ARM_WCGR0_REGNUM + reg - 104;
4155 if (reg >= 112 && reg <= 127)
4156 return ARM_WR0_REGNUM + reg - 112;
4158 if (reg >= 192 && reg <= 199)
4159 return ARM_WC0_REGNUM + reg - 192;
4161 /* VFP v2 registers. A double precision value is actually
4162 in d1 rather than s2, but the ABI only defines numbering
4163 for the single precision registers. This will "just work"
4164 in GDB for little endian targets (we'll read eight bytes,
4165 starting in s0 and then progressing to s1), but will be
4166 reversed on big endian targets with VFP. This won't
4167 be a problem for the new Neon quad registers; you're supposed
4168 to use DW_OP_piece for those. */
4169 if (reg >= 64 && reg <= 95)
4173 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4174 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4178 /* VFP v3 / Neon registers. This range is also used for VFP v2
4179 registers, except that it now describes d0 instead of s0. */
4180 if (reg >= 256 && reg <= 287)
4184 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4185 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4192 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4194 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4197 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4199 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4200 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4202 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4203 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4205 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4206 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4208 if (reg < NUM_GREGS)
4209 return SIM_ARM_R0_REGNUM + reg;
4212 if (reg < NUM_FREGS)
4213 return SIM_ARM_FP0_REGNUM + reg;
4216 if (reg < NUM_SREGS)
4217 return SIM_ARM_FPS_REGNUM + reg;
4220 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4223 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4224 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4225 It is thought that this is is the floating-point register format on
4226 little-endian systems. */
4229 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4230 void *dbl, int endianess)
4234 if (endianess == BFD_ENDIAN_BIG)
4235 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4237 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4239 floatformat_from_doublest (fmt, &d, dbl);
4243 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4248 floatformat_to_doublest (fmt, ptr, &d);
4249 if (endianess == BFD_ENDIAN_BIG)
4250 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4252 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4257 condition_true (unsigned long cond, unsigned long status_reg)
4259 if (cond == INST_AL || cond == INST_NV)
4265 return ((status_reg & FLAG_Z) != 0);
4267 return ((status_reg & FLAG_Z) == 0);
4269 return ((status_reg & FLAG_C) != 0);
4271 return ((status_reg & FLAG_C) == 0);
4273 return ((status_reg & FLAG_N) != 0);
4275 return ((status_reg & FLAG_N) == 0);
4277 return ((status_reg & FLAG_V) != 0);
4279 return ((status_reg & FLAG_V) == 0);
4281 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4283 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4285 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4287 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4289 return (((status_reg & FLAG_Z) == 0)
4290 && (((status_reg & FLAG_N) == 0)
4291 == ((status_reg & FLAG_V) == 0)));
4293 return (((status_reg & FLAG_Z) != 0)
4294 || (((status_reg & FLAG_N) == 0)
4295 != ((status_reg & FLAG_V) == 0)));
4300 static unsigned long
4301 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4302 unsigned long pc_val, unsigned long status_reg)
4304 unsigned long res, shift;
4305 int rm = bits (inst, 0, 3);
4306 unsigned long shifttype = bits (inst, 5, 6);
4310 int rs = bits (inst, 8, 11);
4311 shift = (rs == 15 ? pc_val + 8
4312 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4315 shift = bits (inst, 7, 11);
4317 res = (rm == ARM_PC_REGNUM
4318 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4319 : get_frame_register_unsigned (frame, rm));
4324 res = shift >= 32 ? 0 : res << shift;
4328 res = shift >= 32 ? 0 : res >> shift;
4334 res = ((res & 0x80000000L)
4335 ? ~((~res) >> shift) : res >> shift);
4338 case 3: /* ROR/RRX */
4341 res = (res >> 1) | (carry ? 0x80000000L : 0);
4343 res = (res >> shift) | (res << (32 - shift));
4347 return res & 0xffffffff;
4350 /* Return number of 1-bits in VAL. */
4353 bitcount (unsigned long val)
4356 for (nbits = 0; val != 0; nbits++)
4357 val &= val - 1; /* Delete rightmost 1-bit in val. */
4361 /* Return the size in bytes of the complete Thumb instruction whose
4362 first halfword is INST1. */
4365 thumb_insn_size (unsigned short inst1)
4367 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4374 thumb_advance_itstate (unsigned int itstate)
4376 /* Preserve IT[7:5], the first three bits of the condition. Shift
4377 the upcoming condition flags left by one bit. */
4378 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4380 /* If we have finished the IT block, clear the state. */
4381 if ((itstate & 0x0f) == 0)
4387 /* Find the next PC after the current instruction executes. In some
4388 cases we can not statically determine the answer (see the IT state
4389 handling in this function); in that case, a breakpoint may be
4390 inserted in addition to the returned PC, which will be used to set
4391 another breakpoint by our caller. */
4394 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4396 struct gdbarch *gdbarch = get_frame_arch (frame);
4397 struct address_space *aspace = get_frame_address_space (frame);
4398 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4399 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4400 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4401 unsigned short inst1;
4402 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4403 unsigned long offset;
4404 ULONGEST status, itstate;
4406 nextpc = MAKE_THUMB_ADDR (nextpc);
4407 pc_val = MAKE_THUMB_ADDR (pc_val);
4409 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4411 /* Thumb-2 conditional execution support. There are eight bits in
4412 the CPSR which describe conditional execution state. Once
4413 reconstructed (they're in a funny order), the low five bits
4414 describe the low bit of the condition for each instruction and
4415 how many instructions remain. The high three bits describe the
4416 base condition. One of the low four bits will be set if an IT
4417 block is active. These bits read as zero on earlier
4419 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4420 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4422 /* If-Then handling. On GNU/Linux, where this routine is used, we
4423 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4424 can disable execution of the undefined instruction. So we might
4425 miss the breakpoint if we set it on a skipped conditional
4426 instruction. Because conditional instructions can change the
4427 flags, affecting the execution of further instructions, we may
4428 need to set two breakpoints. */
4430 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4432 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4434 /* An IT instruction. Because this instruction does not
4435 modify the flags, we can accurately predict the next
4436 executed instruction. */
4437 itstate = inst1 & 0x00ff;
4438 pc += thumb_insn_size (inst1);
4440 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4442 inst1 = read_memory_unsigned_integer (pc, 2,
4443 byte_order_for_code);
4444 pc += thumb_insn_size (inst1);
4445 itstate = thumb_advance_itstate (itstate);
4448 return MAKE_THUMB_ADDR (pc);
4450 else if (itstate != 0)
4452 /* We are in a conditional block. Check the condition. */
4453 if (! condition_true (itstate >> 4, status))
4455 /* Advance to the next executed instruction. */
4456 pc += thumb_insn_size (inst1);
4457 itstate = thumb_advance_itstate (itstate);
4459 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4461 inst1 = read_memory_unsigned_integer (pc, 2,
4462 byte_order_for_code);
4463 pc += thumb_insn_size (inst1);
4464 itstate = thumb_advance_itstate (itstate);
4467 return MAKE_THUMB_ADDR (pc);
4469 else if ((itstate & 0x0f) == 0x08)
4471 /* This is the last instruction of the conditional
4472 block, and it is executed. We can handle it normally
4473 because the following instruction is not conditional,
4474 and we must handle it normally because it is
4475 permitted to branch. Fall through. */
4481 /* There are conditional instructions after this one.
4482 If this instruction modifies the flags, then we can
4483 not predict what the next executed instruction will
4484 be. Fortunately, this instruction is architecturally
4485 forbidden to branch; we know it will fall through.
4486 Start by skipping past it. */
4487 pc += thumb_insn_size (inst1);
4488 itstate = thumb_advance_itstate (itstate);
4490 /* Set a breakpoint on the following instruction. */
4491 gdb_assert ((itstate & 0x0f) != 0);
4492 arm_insert_single_step_breakpoint (gdbarch, aspace,
4493 MAKE_THUMB_ADDR (pc));
4494 cond_negated = (itstate >> 4) & 1;
4496 /* Skip all following instructions with the same
4497 condition. If there is a later instruction in the IT
4498 block with the opposite condition, set the other
4499 breakpoint there. If not, then set a breakpoint on
4500 the instruction after the IT block. */
4503 inst1 = read_memory_unsigned_integer (pc, 2,
4504 byte_order_for_code);
4505 pc += thumb_insn_size (inst1);
4506 itstate = thumb_advance_itstate (itstate);
4508 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4510 return MAKE_THUMB_ADDR (pc);
4514 else if (itstate & 0x0f)
4516 /* We are in a conditional block. Check the condition. */
4517 int cond = itstate >> 4;
4519 if (! condition_true (cond, status))
4520 /* Advance to the next instruction. All the 32-bit
4521 instructions share a common prefix. */
4522 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4524 /* Otherwise, handle the instruction normally. */
4527 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4531 /* Fetch the saved PC from the stack. It's stored above
4532 all of the other registers. */
4533 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4534 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4535 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4537 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4539 unsigned long cond = bits (inst1, 8, 11);
4540 if (cond == 0x0f) /* 0x0f = SWI */
4542 struct gdbarch_tdep *tdep;
4543 tdep = gdbarch_tdep (gdbarch);
4545 if (tdep->syscall_next_pc != NULL)
4546 nextpc = tdep->syscall_next_pc (frame);
4549 else if (cond != 0x0f && condition_true (cond, status))
4550 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4552 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4554 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4556 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4558 unsigned short inst2;
4559 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4561 /* Default to the next instruction. */
4563 nextpc = MAKE_THUMB_ADDR (nextpc);
4565 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4567 /* Branches and miscellaneous control instructions. */
4569 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4572 int j1, j2, imm1, imm2;
4574 imm1 = sbits (inst1, 0, 10);
4575 imm2 = bits (inst2, 0, 10);
4576 j1 = bit (inst2, 13);
4577 j2 = bit (inst2, 11);
4579 offset = ((imm1 << 12) + (imm2 << 1));
4580 offset ^= ((!j2) << 22) | ((!j1) << 23);
4582 nextpc = pc_val + offset;
4583 /* For BLX make sure to clear the low bits. */
4584 if (bit (inst2, 12) == 0)
4585 nextpc = nextpc & 0xfffffffc;
4587 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4589 /* SUBS PC, LR, #imm8. */
4590 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4591 nextpc -= inst2 & 0x00ff;
4593 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4595 /* Conditional branch. */
4596 if (condition_true (bits (inst1, 6, 9), status))
4598 int sign, j1, j2, imm1, imm2;
4600 sign = sbits (inst1, 10, 10);
4601 imm1 = bits (inst1, 0, 5);
4602 imm2 = bits (inst2, 0, 10);
4603 j1 = bit (inst2, 13);
4604 j2 = bit (inst2, 11);
4606 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4607 offset += (imm1 << 12) + (imm2 << 1);
4609 nextpc = pc_val + offset;
4613 else if ((inst1 & 0xfe50) == 0xe810)
4615 /* Load multiple or RFE. */
4616 int rn, offset, load_pc = 1;
4618 rn = bits (inst1, 0, 3);
4619 if (bit (inst1, 7) && !bit (inst1, 8))
4622 if (!bit (inst2, 15))
4624 offset = bitcount (inst2) * 4 - 4;
4626 else if (!bit (inst1, 7) && bit (inst1, 8))
4629 if (!bit (inst2, 15))
4633 else if (bit (inst1, 7) && bit (inst1, 8))
4638 else if (!bit (inst1, 7) && !bit (inst1, 8))
4648 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4649 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4652 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4654 /* MOV PC or MOVS PC. */
4655 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4656 nextpc = MAKE_THUMB_ADDR (nextpc);
4658 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4662 int rn, load_pc = 1;
4664 rn = bits (inst1, 0, 3);
4665 base = get_frame_register_unsigned (frame, rn);
4666 if (rn == ARM_PC_REGNUM)
4668 base = (base + 4) & ~(CORE_ADDR) 0x3;
4670 base += bits (inst2, 0, 11);
4672 base -= bits (inst2, 0, 11);
4674 else if (bit (inst1, 7))
4675 base += bits (inst2, 0, 11);
4676 else if (bit (inst2, 11))
4678 if (bit (inst2, 10))
4681 base += bits (inst2, 0, 7);
4683 base -= bits (inst2, 0, 7);
4686 else if ((inst2 & 0x0fc0) == 0x0000)
4688 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4689 base += get_frame_register_unsigned (frame, rm) << shift;
4696 nextpc = get_frame_memory_unsigned (frame, base, 4);
4698 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4701 CORE_ADDR tbl_reg, table, offset, length;
4703 tbl_reg = bits (inst1, 0, 3);
4704 if (tbl_reg == 0x0f)
4705 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4707 table = get_frame_register_unsigned (frame, tbl_reg);
4709 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4710 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4711 nextpc = pc_val + length;
4713 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4716 CORE_ADDR tbl_reg, table, offset, length;
4718 tbl_reg = bits (inst1, 0, 3);
4719 if (tbl_reg == 0x0f)
4720 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4722 table = get_frame_register_unsigned (frame, tbl_reg);
4724 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4725 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4726 nextpc = pc_val + length;
4729 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4731 if (bits (inst1, 3, 6) == 0x0f)
4732 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4734 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4736 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4738 if (bits (inst1, 3, 6) == 0x0f)
4741 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4743 nextpc = MAKE_THUMB_ADDR (nextpc);
4745 else if ((inst1 & 0xf500) == 0xb100)
4748 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4749 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4751 if (bit (inst1, 11) && reg != 0)
4752 nextpc = pc_val + imm;
4753 else if (!bit (inst1, 11) && reg == 0)
4754 nextpc = pc_val + imm;
4759 /* Get the raw next address. PC is the current program counter, in
4760 FRAME, which is assumed to be executing in ARM mode.
4762 The value returned has the execution state of the next instruction
4763 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4764 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4768 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4770 struct gdbarch *gdbarch = get_frame_arch (frame);
4771 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4772 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4773 unsigned long pc_val;
4774 unsigned long this_instr;
4775 unsigned long status;
4778 pc_val = (unsigned long) pc;
4779 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4781 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4782 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4784 if (bits (this_instr, 28, 31) == INST_NV)
4785 switch (bits (this_instr, 24, 27))
4790 /* Branch with Link and change to Thumb. */
4791 nextpc = BranchDest (pc, this_instr);
4792 nextpc |= bit (this_instr, 24) << 1;
4793 nextpc = MAKE_THUMB_ADDR (nextpc);
4799 /* Coprocessor register transfer. */
4800 if (bits (this_instr, 12, 15) == 15)
4801 error (_("Invalid update to pc in instruction"));
4804 else if (condition_true (bits (this_instr, 28, 31), status))
4806 switch (bits (this_instr, 24, 27))
4809 case 0x1: /* data processing */
4813 unsigned long operand1, operand2, result = 0;
4817 if (bits (this_instr, 12, 15) != 15)
4820 if (bits (this_instr, 22, 25) == 0
4821 && bits (this_instr, 4, 7) == 9) /* multiply */
4822 error (_("Invalid update to pc in instruction"));
4824 /* BX <reg>, BLX <reg> */
4825 if (bits (this_instr, 4, 27) == 0x12fff1
4826 || bits (this_instr, 4, 27) == 0x12fff3)
4828 rn = bits (this_instr, 0, 3);
4829 nextpc = ((rn == ARM_PC_REGNUM)
4831 : get_frame_register_unsigned (frame, rn));
4836 /* Multiply into PC. */
4837 c = (status & FLAG_C) ? 1 : 0;
4838 rn = bits (this_instr, 16, 19);
4839 operand1 = ((rn == ARM_PC_REGNUM)
4841 : get_frame_register_unsigned (frame, rn));
4843 if (bit (this_instr, 25))
4845 unsigned long immval = bits (this_instr, 0, 7);
4846 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4847 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4850 else /* operand 2 is a shifted register. */
4851 operand2 = shifted_reg_val (frame, this_instr, c,
4854 switch (bits (this_instr, 21, 24))
4857 result = operand1 & operand2;
4861 result = operand1 ^ operand2;
4865 result = operand1 - operand2;
4869 result = operand2 - operand1;
4873 result = operand1 + operand2;
4877 result = operand1 + operand2 + c;
4881 result = operand1 - operand2 + c;
4885 result = operand2 - operand1 + c;
4891 case 0xb: /* tst, teq, cmp, cmn */
4892 result = (unsigned long) nextpc;
4896 result = operand1 | operand2;
4900 /* Always step into a function. */
4905 result = operand1 & ~operand2;
4913 /* In 26-bit APCS the bottom two bits of the result are
4914 ignored, and we always end up in ARM state. */
4916 nextpc = arm_addr_bits_remove (gdbarch, result);
4924 case 0x5: /* data transfer */
4927 if (bit (this_instr, 20))
4930 if (bits (this_instr, 12, 15) == 15)
4936 if (bit (this_instr, 22))
4937 error (_("Invalid update to pc in instruction"));
4939 /* byte write to PC */
4940 rn = bits (this_instr, 16, 19);
4941 base = ((rn == ARM_PC_REGNUM)
4943 : get_frame_register_unsigned (frame, rn));
4945 if (bit (this_instr, 24))
4948 int c = (status & FLAG_C) ? 1 : 0;
4949 unsigned long offset =
4950 (bit (this_instr, 25)
4951 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4952 : bits (this_instr, 0, 11));
4954 if (bit (this_instr, 23))
4960 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4967 case 0x9: /* block transfer */
4968 if (bit (this_instr, 20))
4971 if (bit (this_instr, 15))
4975 unsigned long rn_val
4976 = get_frame_register_unsigned (frame,
4977 bits (this_instr, 16, 19));
4979 if (bit (this_instr, 23))
4982 unsigned long reglist = bits (this_instr, 0, 14);
4983 offset = bitcount (reglist) * 4;
4984 if (bit (this_instr, 24)) /* pre */
4987 else if (bit (this_instr, 24))
4991 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4998 case 0xb: /* branch & link */
4999 case 0xa: /* branch */
5001 nextpc = BranchDest (pc, this_instr);
5007 case 0xe: /* coproc ops */
5011 struct gdbarch_tdep *tdep;
5012 tdep = gdbarch_tdep (gdbarch);
5014 if (tdep->syscall_next_pc != NULL)
5015 nextpc = tdep->syscall_next_pc (frame);
5021 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5029 /* Determine next PC after current instruction executes. Will call either
5030 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5031 loop is detected. */
5034 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5038 if (arm_frame_is_thumb (frame))
5039 nextpc = thumb_get_next_pc_raw (frame, pc);
5041 nextpc = arm_get_next_pc_raw (frame, pc);
5046 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5047 of the appropriate mode (as encoded in the PC value), even if this
5048 differs from what would be expected according to the symbol tables. */
5051 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5052 struct address_space *aspace,
5055 struct cleanup *old_chain
5056 = make_cleanup_restore_integer (&arm_override_mode);
5058 arm_override_mode = IS_THUMB_ADDR (pc);
5059 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5061 insert_single_step_breakpoint (gdbarch, aspace, pc);
5063 do_cleanups (old_chain);
5066 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5067 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5068 is found, attempt to step through it. A breakpoint is placed at the end of
5072 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5074 struct gdbarch *gdbarch = get_frame_arch (frame);
5075 struct address_space *aspace = get_frame_address_space (frame);
5076 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5077 CORE_ADDR pc = get_frame_pc (frame);
5078 CORE_ADDR breaks[2] = {-1, -1};
5080 unsigned short insn1, insn2;
5083 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5084 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5085 ULONGEST status, itstate;
5087 /* We currently do not support atomic sequences within an IT block. */
5088 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5089 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5093 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5094 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5096 if (thumb_insn_size (insn1) != 4)
5099 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5101 if (!((insn1 & 0xfff0) == 0xe850
5102 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5105 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5107 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5109 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5112 if (thumb_insn_size (insn1) != 4)
5114 /* Assume that there is at most one conditional branch in the
5115 atomic sequence. If a conditional branch is found, put a
5116 breakpoint in its destination address. */
5117 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5119 if (last_breakpoint > 0)
5120 return 0; /* More than one conditional branch found,
5121 fallback to the standard code. */
5123 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5127 /* We do not support atomic sequences that use any *other*
5128 instructions but conditional branches to change the PC.
5129 Fall back to standard code to avoid losing control of
5131 else if (thumb_instruction_changes_pc (insn1))
5136 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5139 /* Assume that there is at most one conditional branch in the
5140 atomic sequence. If a conditional branch is found, put a
5141 breakpoint in its destination address. */
5142 if ((insn1 & 0xf800) == 0xf000
5143 && (insn2 & 0xd000) == 0x8000
5144 && (insn1 & 0x0380) != 0x0380)
5146 int sign, j1, j2, imm1, imm2;
5147 unsigned int offset;
5149 sign = sbits (insn1, 10, 10);
5150 imm1 = bits (insn1, 0, 5);
5151 imm2 = bits (insn2, 0, 10);
5152 j1 = bit (insn2, 13);
5153 j2 = bit (insn2, 11);
5155 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5156 offset += (imm1 << 12) + (imm2 << 1);
5158 if (last_breakpoint > 0)
5159 return 0; /* More than one conditional branch found,
5160 fallback to the standard code. */
5162 breaks[1] = loc + offset;
5166 /* We do not support atomic sequences that use any *other*
5167 instructions but conditional branches to change the PC.
5168 Fall back to standard code to avoid losing control of
5170 else if (thumb2_instruction_changes_pc (insn1, insn2))
5173 /* If we find a strex{,b,h,d}, we're done. */
5174 if ((insn1 & 0xfff0) == 0xe840
5175 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5180 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5181 if (insn_count == atomic_sequence_length)
5184 /* Insert a breakpoint right after the end of the atomic sequence. */
5187 /* Check for duplicated breakpoints. Check also for a breakpoint
5188 placed (branch instruction's destination) anywhere in sequence. */
5190 && (breaks[1] == breaks[0]
5191 || (breaks[1] >= pc && breaks[1] < loc)))
5192 last_breakpoint = 0;
5194 /* Effectively inserts the breakpoints. */
5195 for (index = 0; index <= last_breakpoint; index++)
5196 arm_insert_single_step_breakpoint (gdbarch, aspace,
5197 MAKE_THUMB_ADDR (breaks[index]));
5203 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5205 struct gdbarch *gdbarch = get_frame_arch (frame);
5206 struct address_space *aspace = get_frame_address_space (frame);
5207 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5208 CORE_ADDR pc = get_frame_pc (frame);
5209 CORE_ADDR breaks[2] = {-1, -1};
5214 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5215 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5217 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5218 Note that we do not currently support conditionally executed atomic
5220 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5222 if ((insn & 0xff9000f0) != 0xe1900090)
5225 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5227 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5229 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5232 /* Assume that there is at most one conditional branch in the atomic
5233 sequence. If a conditional branch is found, put a breakpoint in
5234 its destination address. */
5235 if (bits (insn, 24, 27) == 0xa)
5237 if (last_breakpoint > 0)
5238 return 0; /* More than one conditional branch found, fallback
5239 to the standard single-step code. */
5241 breaks[1] = BranchDest (loc - 4, insn);
5245 /* We do not support atomic sequences that use any *other* instructions
5246 but conditional branches to change the PC. Fall back to standard
5247 code to avoid losing control of execution. */
5248 else if (arm_instruction_changes_pc (insn))
5251 /* If we find a strex{,b,h,d}, we're done. */
5252 if ((insn & 0xff9000f0) == 0xe1800090)
5256 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5257 if (insn_count == atomic_sequence_length)
5260 /* Insert a breakpoint right after the end of the atomic sequence. */
5263 /* Check for duplicated breakpoints. Check also for a breakpoint
5264 placed (branch instruction's destination) anywhere in sequence. */
5266 && (breaks[1] == breaks[0]
5267 || (breaks[1] >= pc && breaks[1] < loc)))
5268 last_breakpoint = 0;
5270 /* Effectively inserts the breakpoints. */
5271 for (index = 0; index <= last_breakpoint; index++)
5272 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5278 arm_deal_with_atomic_sequence (struct frame_info *frame)
5280 if (arm_frame_is_thumb (frame))
5281 return thumb_deal_with_atomic_sequence_raw (frame);
5283 return arm_deal_with_atomic_sequence_raw (frame);
5286 /* single_step() is called just before we want to resume the inferior,
5287 if we want to single-step it but there is no hardware or kernel
5288 single-step support. We find the target of the coming instruction
5289 and breakpoint it. */
5292 arm_software_single_step (struct frame_info *frame)
5294 struct gdbarch *gdbarch = get_frame_arch (frame);
5295 struct address_space *aspace = get_frame_address_space (frame);
5298 if (arm_deal_with_atomic_sequence (frame))
5301 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5302 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5307 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5308 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5309 NULL if an error occurs. BUF is freed. */
5312 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5313 int old_len, int new_len)
5316 int bytes_to_read = new_len - old_len;
5318 new_buf = xmalloc (new_len);
5319 memcpy (new_buf + bytes_to_read, buf, old_len);
5321 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5329 /* An IT block is at most the 2-byte IT instruction followed by
5330 four 4-byte instructions. The furthest back we must search to
5331 find an IT block that affects the current instruction is thus
5332 2 + 3 * 4 == 14 bytes. */
5333 #define MAX_IT_BLOCK_PREFIX 14
5335 /* Use a quick scan if there are more than this many bytes of
5337 #define IT_SCAN_THRESHOLD 32
5339 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5340 A breakpoint in an IT block may not be hit, depending on the
5343 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5347 CORE_ADDR boundary, func_start;
5349 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5350 int i, any, last_it, last_it_count;
5352 /* If we are using BKPT breakpoints, none of this is necessary. */
5353 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5356 /* ARM mode does not have this problem. */
5357 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5360 /* We are setting a breakpoint in Thumb code that could potentially
5361 contain an IT block. The first step is to find how much Thumb
5362 code there is; we do not need to read outside of known Thumb
5364 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5366 /* Thumb-2 code must have mapping symbols to have a chance. */
5369 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5371 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5372 && func_start > boundary)
5373 boundary = func_start;
5375 /* Search for a candidate IT instruction. We have to do some fancy
5376 footwork to distinguish a real IT instruction from the second
5377 half of a 32-bit instruction, but there is no need for that if
5378 there's no candidate. */
5379 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5381 /* No room for an IT instruction. */
5384 buf = xmalloc (buf_len);
5385 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5388 for (i = 0; i < buf_len; i += 2)
5390 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5391 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5403 /* OK, the code bytes before this instruction contain at least one
5404 halfword which resembles an IT instruction. We know that it's
5405 Thumb code, but there are still two possibilities. Either the
5406 halfword really is an IT instruction, or it is the second half of
5407 a 32-bit Thumb instruction. The only way we can tell is to
5408 scan forwards from a known instruction boundary. */
5409 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5413 /* There's a lot of code before this instruction. Start with an
5414 optimistic search; it's easy to recognize halfwords that can
5415 not be the start of a 32-bit instruction, and use that to
5416 lock on to the instruction boundaries. */
5417 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5420 buf_len = IT_SCAN_THRESHOLD;
5423 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5425 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5426 if (thumb_insn_size (inst1) == 2)
5433 /* At this point, if DEFINITE, BUF[I] is the first place we
5434 are sure that we know the instruction boundaries, and it is far
5435 enough from BPADDR that we could not miss an IT instruction
5436 affecting BPADDR. If ! DEFINITE, give up - start from a
5440 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5444 buf_len = bpaddr - boundary;
5450 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5453 buf_len = bpaddr - boundary;
5457 /* Scan forwards. Find the last IT instruction before BPADDR. */
5462 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5464 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5469 else if (inst1 & 0x0002)
5471 else if (inst1 & 0x0004)
5476 i += thumb_insn_size (inst1);
5482 /* There wasn't really an IT instruction after all. */
5485 if (last_it_count < 1)
5486 /* It was too far away. */
5489 /* This really is a trouble spot. Move the breakpoint to the IT
5491 return bpaddr - buf_len + last_it;
5494 /* ARM displaced stepping support.
5496 Generally ARM displaced stepping works as follows:
5498 1. When an instruction is to be single-stepped, it is first decoded by
5499 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5500 Depending on the type of instruction, it is then copied to a scratch
5501 location, possibly in a modified form. The copy_* set of functions
5502 performs such modification, as necessary. A breakpoint is placed after
5503 the modified instruction in the scratch space to return control to GDB.
5504 Note in particular that instructions which modify the PC will no longer
5505 do so after modification.
5507 2. The instruction is single-stepped, by setting the PC to the scratch
5508 location address, and resuming. Control returns to GDB when the
5511 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5512 function used for the current instruction. This function's job is to
5513 put the CPU/memory state back to what it would have been if the
5514 instruction had been executed unmodified in its original location. */
5516 /* NOP instruction (mov r0, r0). */
5517 #define ARM_NOP 0xe1a00000
5518 #define THUMB_NOP 0x4600
5520 /* Helper for register reads for displaced stepping. In particular, this
5521 returns the PC as it would be seen by the instruction at its original
5525 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5529 CORE_ADDR from = dsc->insn_addr;
5531 if (regno == ARM_PC_REGNUM)
5533 /* Compute pipeline offset:
5534 - When executing an ARM instruction, PC reads as the address of the
5535 current instruction plus 8.
5536 - When executing a Thumb instruction, PC reads as the address of the
5537 current instruction plus 4. */
5544 if (debug_displaced)
5545 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5546 (unsigned long) from);
5547 return (ULONGEST) from;
5551 regcache_cooked_read_unsigned (regs, regno, &ret);
5552 if (debug_displaced)
5553 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5554 regno, (unsigned long) ret);
5560 displaced_in_arm_mode (struct regcache *regs)
5563 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5565 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5567 return (ps & t_bit) == 0;
5570 /* Write to the PC as from a branch instruction. */
5573 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5577 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5578 architecture versions < 6. */
5579 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5580 val & ~(ULONGEST) 0x3);
5582 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5583 val & ~(ULONGEST) 0x1);
5586 /* Write to the PC as from a branch-exchange instruction. */
5589 bx_write_pc (struct regcache *regs, ULONGEST val)
5592 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5594 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5598 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5601 else if ((val & 2) == 0)
5603 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5604 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5608 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5609 mode, align dest to 4 bytes). */
5610 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5611 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5612 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5616 /* Write to the PC as if from a load instruction. */
5619 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5622 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5623 bx_write_pc (regs, val);
5625 branch_write_pc (regs, dsc, val);
5628 /* Write to the PC as if from an ALU instruction. */
5631 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5634 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5635 bx_write_pc (regs, val);
5637 branch_write_pc (regs, dsc, val);
5640 /* Helper for writing to registers for displaced stepping. Writing to the PC
5641 has a varying effects depending on the instruction which does the write:
5642 this is controlled by the WRITE_PC argument. */
5645 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5646 int regno, ULONGEST val, enum pc_write_style write_pc)
5648 if (regno == ARM_PC_REGNUM)
5650 if (debug_displaced)
5651 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5652 (unsigned long) val);
5655 case BRANCH_WRITE_PC:
5656 branch_write_pc (regs, dsc, val);
5660 bx_write_pc (regs, val);
5664 load_write_pc (regs, dsc, val);
5668 alu_write_pc (regs, dsc, val);
5671 case CANNOT_WRITE_PC:
5672 warning (_("Instruction wrote to PC in an unexpected way when "
5673 "single-stepping"));
5677 internal_error (__FILE__, __LINE__,
5678 _("Invalid argument to displaced_write_reg"));
5681 dsc->wrote_to_pc = 1;
5685 if (debug_displaced)
5686 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5687 regno, (unsigned long) val);
5688 regcache_cooked_write_unsigned (regs, regno, val);
5692 /* This function is used to concisely determine if an instruction INSN
5693 references PC. Register fields of interest in INSN should have the
5694 corresponding fields of BITMASK set to 0b1111. The function
5695 returns return 1 if any of these fields in INSN reference the PC
5696 (also 0b1111, r15), else it returns 0. */
5699 insn_references_pc (uint32_t insn, uint32_t bitmask)
5701 uint32_t lowbit = 1;
5703 while (bitmask != 0)
5707 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5713 mask = lowbit * 0xf;
5715 if ((insn & mask) == mask)
5724 /* The simplest copy function. Many instructions have the same effect no
5725 matter what address they are executed at: in those cases, use this. */
5728 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5729 const char *iname, struct displaced_step_closure *dsc)
5731 if (debug_displaced)
5732 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5733 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5736 dsc->modinsn[0] = insn;
5742 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5743 uint16_t insn2, const char *iname,
5744 struct displaced_step_closure *dsc)
5746 if (debug_displaced)
5747 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5748 "opcode/class '%s' unmodified\n", insn1, insn2,
5751 dsc->modinsn[0] = insn1;
5752 dsc->modinsn[1] = insn2;
5758 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5761 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5763 struct displaced_step_closure *dsc)
5765 if (debug_displaced)
5766 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5767 "opcode/class '%s' unmodified\n", insn,
5770 dsc->modinsn[0] = insn;
5775 /* Preload instructions with immediate offset. */
5778 cleanup_preload (struct gdbarch *gdbarch,
5779 struct regcache *regs, struct displaced_step_closure *dsc)
5781 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5782 if (!dsc->u.preload.immed)
5783 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5787 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5788 struct displaced_step_closure *dsc, unsigned int rn)
5791 /* Preload instructions:
5793 {pli/pld} [rn, #+/-imm]
5795 {pli/pld} [r0, #+/-imm]. */
5797 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5798 rn_val = displaced_read_reg (regs, dsc, rn);
5799 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5800 dsc->u.preload.immed = 1;
5802 dsc->cleanup = &cleanup_preload;
5806 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5807 struct displaced_step_closure *dsc)
5809 unsigned int rn = bits (insn, 16, 19);
5811 if (!insn_references_pc (insn, 0x000f0000ul))
5812 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5814 if (debug_displaced)
5815 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5816 (unsigned long) insn);
5818 dsc->modinsn[0] = insn & 0xfff0ffff;
5820 install_preload (gdbarch, regs, dsc, rn);
5826 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5827 struct regcache *regs, struct displaced_step_closure *dsc)
5829 unsigned int rn = bits (insn1, 0, 3);
5830 unsigned int u_bit = bit (insn1, 7);
5831 int imm12 = bits (insn2, 0, 11);
5834 if (rn != ARM_PC_REGNUM)
5835 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5837 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5838 PLD (literal) Encoding T1. */
5839 if (debug_displaced)
5840 fprintf_unfiltered (gdb_stdlog,
5841 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5842 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5848 /* Rewrite instruction {pli/pld} PC imm12 into:
5849 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5853 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5855 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5856 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5858 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5860 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5861 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5862 dsc->u.preload.immed = 0;
5864 /* {pli/pld} [r0, r1] */
5865 dsc->modinsn[0] = insn1 & 0xfff0;
5866 dsc->modinsn[1] = 0xf001;
5869 dsc->cleanup = &cleanup_preload;
5873 /* Preload instructions with register offset. */
5876 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5877 struct displaced_step_closure *dsc, unsigned int rn,
5880 ULONGEST rn_val, rm_val;
5882 /* Preload register-offset instructions:
5884 {pli/pld} [rn, rm {, shift}]
5886 {pli/pld} [r0, r1 {, shift}]. */
5888 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5889 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5890 rn_val = displaced_read_reg (regs, dsc, rn);
5891 rm_val = displaced_read_reg (regs, dsc, rm);
5892 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5893 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5894 dsc->u.preload.immed = 0;
5896 dsc->cleanup = &cleanup_preload;
5900 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5901 struct regcache *regs,
5902 struct displaced_step_closure *dsc)
5904 unsigned int rn = bits (insn, 16, 19);
5905 unsigned int rm = bits (insn, 0, 3);
5908 if (!insn_references_pc (insn, 0x000f000ful))
5909 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5911 if (debug_displaced)
5912 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5913 (unsigned long) insn);
5915 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5917 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5921 /* Copy/cleanup coprocessor load and store instructions. */
5924 cleanup_copro_load_store (struct gdbarch *gdbarch,
5925 struct regcache *regs,
5926 struct displaced_step_closure *dsc)
5928 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5930 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5932 if (dsc->u.ldst.writeback)
5933 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5937 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5938 struct displaced_step_closure *dsc,
5939 int writeback, unsigned int rn)
5943 /* Coprocessor load/store instructions:
5945 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5947 {stc/stc2} [r0, #+/-imm].
5949 ldc/ldc2 are handled identically. */
5951 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5952 rn_val = displaced_read_reg (regs, dsc, rn);
5953 /* PC should be 4-byte aligned. */
5954 rn_val = rn_val & 0xfffffffc;
5955 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5957 dsc->u.ldst.writeback = writeback;
5958 dsc->u.ldst.rn = rn;
5960 dsc->cleanup = &cleanup_copro_load_store;
5964 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5965 struct regcache *regs,
5966 struct displaced_step_closure *dsc)
5968 unsigned int rn = bits (insn, 16, 19);
5970 if (!insn_references_pc (insn, 0x000f0000ul))
5971 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5973 if (debug_displaced)
5974 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5975 "load/store insn %.8lx\n", (unsigned long) insn);
5977 dsc->modinsn[0] = insn & 0xfff0ffff;
5979 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5985 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5986 uint16_t insn2, struct regcache *regs,
5987 struct displaced_step_closure *dsc)
5989 unsigned int rn = bits (insn1, 0, 3);
5991 if (rn != ARM_PC_REGNUM)
5992 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5993 "copro load/store", dsc);
5995 if (debug_displaced)
5996 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5997 "load/store insn %.4x%.4x\n", insn1, insn2);
5999 dsc->modinsn[0] = insn1 & 0xfff0;
6000 dsc->modinsn[1] = insn2;
6003 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6004 doesn't support writeback, so pass 0. */
6005 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6010 /* Clean up branch instructions (actually perform the branch, by setting
6014 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6015 struct displaced_step_closure *dsc)
6017 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6018 int branch_taken = condition_true (dsc->u.branch.cond, status);
6019 enum pc_write_style write_pc = dsc->u.branch.exchange
6020 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6025 if (dsc->u.branch.link)
6027 /* The value of LR should be the next insn of current one. In order
6028 not to confuse logic hanlding later insn `bx lr', if current insn mode
6029 is Thumb, the bit 0 of LR value should be set to 1. */
6030 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6033 next_insn_addr |= 0x1;
6035 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6039 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6042 /* Copy B/BL/BLX instructions with immediate destinations. */
6045 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6046 struct displaced_step_closure *dsc,
6047 unsigned int cond, int exchange, int link, long offset)
6049 /* Implement "BL<cond> <label>" as:
6051 Preparation: cond <- instruction condition
6052 Insn: mov r0, r0 (nop)
6053 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6055 B<cond> similar, but don't set r14 in cleanup. */
6057 dsc->u.branch.cond = cond;
6058 dsc->u.branch.link = link;
6059 dsc->u.branch.exchange = exchange;
6061 dsc->u.branch.dest = dsc->insn_addr;
6062 if (link && exchange)
6063 /* For BLX, offset is computed from the Align (PC, 4). */
6064 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6067 dsc->u.branch.dest += 4 + offset;
6069 dsc->u.branch.dest += 8 + offset;
6071 dsc->cleanup = &cleanup_branch;
6074 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6075 struct regcache *regs, struct displaced_step_closure *dsc)
6077 unsigned int cond = bits (insn, 28, 31);
6078 int exchange = (cond == 0xf);
6079 int link = exchange || bit (insn, 24);
6082 if (debug_displaced)
6083 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6084 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6085 (unsigned long) insn);
6087 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6088 then arrange the switch into Thumb mode. */
6089 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6091 offset = bits (insn, 0, 23) << 2;
6093 if (bit (offset, 25))
6094 offset = offset | ~0x3ffffff;
6096 dsc->modinsn[0] = ARM_NOP;
6098 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6103 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6104 uint16_t insn2, struct regcache *regs,
6105 struct displaced_step_closure *dsc)
6107 int link = bit (insn2, 14);
6108 int exchange = link && !bit (insn2, 12);
6111 int j1 = bit (insn2, 13);
6112 int j2 = bit (insn2, 11);
6113 int s = sbits (insn1, 10, 10);
6114 int i1 = !(j1 ^ bit (insn1, 10));
6115 int i2 = !(j2 ^ bit (insn1, 10));
6117 if (!link && !exchange) /* B */
6119 offset = (bits (insn2, 0, 10) << 1);
6120 if (bit (insn2, 12)) /* Encoding T4 */
6122 offset |= (bits (insn1, 0, 9) << 12)
6128 else /* Encoding T3 */
6130 offset |= (bits (insn1, 0, 5) << 12)
6134 cond = bits (insn1, 6, 9);
6139 offset = (bits (insn1, 0, 9) << 12);
6140 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6141 offset |= exchange ?
6142 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6145 if (debug_displaced)
6146 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6147 "%.4x %.4x with offset %.8lx\n",
6148 link ? (exchange) ? "blx" : "bl" : "b",
6149 insn1, insn2, offset);
6151 dsc->modinsn[0] = THUMB_NOP;
6153 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6157 /* Copy B Thumb instructions. */
6159 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6160 struct displaced_step_closure *dsc)
6162 unsigned int cond = 0;
6164 unsigned short bit_12_15 = bits (insn, 12, 15);
6165 CORE_ADDR from = dsc->insn_addr;
6167 if (bit_12_15 == 0xd)
6169 /* offset = SignExtend (imm8:0, 32) */
6170 offset = sbits ((insn << 1), 0, 8);
6171 cond = bits (insn, 8, 11);
6173 else if (bit_12_15 == 0xe) /* Encoding T2 */
6175 offset = sbits ((insn << 1), 0, 11);
6179 if (debug_displaced)
6180 fprintf_unfiltered (gdb_stdlog,
6181 "displaced: copying b immediate insn %.4x "
6182 "with offset %d\n", insn, offset);
6184 dsc->u.branch.cond = cond;
6185 dsc->u.branch.link = 0;
6186 dsc->u.branch.exchange = 0;
6187 dsc->u.branch.dest = from + 4 + offset;
6189 dsc->modinsn[0] = THUMB_NOP;
6191 dsc->cleanup = &cleanup_branch;
6196 /* Copy BX/BLX with register-specified destinations. */
6199 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6200 struct displaced_step_closure *dsc, int link,
6201 unsigned int cond, unsigned int rm)
6203 /* Implement {BX,BLX}<cond> <reg>" as:
6205 Preparation: cond <- instruction condition
6206 Insn: mov r0, r0 (nop)
6207 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6209 Don't set r14 in cleanup for BX. */
6211 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6213 dsc->u.branch.cond = cond;
6214 dsc->u.branch.link = link;
6216 dsc->u.branch.exchange = 1;
6218 dsc->cleanup = &cleanup_branch;
6222 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6223 struct regcache *regs, struct displaced_step_closure *dsc)
6225 unsigned int cond = bits (insn, 28, 31);
6228 int link = bit (insn, 5);
6229 unsigned int rm = bits (insn, 0, 3);
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6233 (unsigned long) insn);
6235 dsc->modinsn[0] = ARM_NOP;
6237 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6242 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6243 struct regcache *regs,
6244 struct displaced_step_closure *dsc)
6246 int link = bit (insn, 7);
6247 unsigned int rm = bits (insn, 3, 6);
6249 if (debug_displaced)
6250 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6251 (unsigned short) insn);
6253 dsc->modinsn[0] = THUMB_NOP;
6255 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6261 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6264 cleanup_alu_imm (struct gdbarch *gdbarch,
6265 struct regcache *regs, struct displaced_step_closure *dsc)
6267 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6268 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6269 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6270 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6274 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6275 struct displaced_step_closure *dsc)
6277 unsigned int rn = bits (insn, 16, 19);
6278 unsigned int rd = bits (insn, 12, 15);
6279 unsigned int op = bits (insn, 21, 24);
6280 int is_mov = (op == 0xd);
6281 ULONGEST rd_val, rn_val;
6283 if (!insn_references_pc (insn, 0x000ff000ul))
6284 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6286 if (debug_displaced)
6287 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6288 "%.8lx\n", is_mov ? "move" : "ALU",
6289 (unsigned long) insn);
6291 /* Instruction is of form:
6293 <op><cond> rd, [rn,] #imm
6297 Preparation: tmp1, tmp2 <- r0, r1;
6299 Insn: <op><cond> r0, r1, #imm
6300 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6303 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6304 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6305 rn_val = displaced_read_reg (regs, dsc, rn);
6306 rd_val = displaced_read_reg (regs, dsc, rd);
6307 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6308 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6312 dsc->modinsn[0] = insn & 0xfff00fff;
6314 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6316 dsc->cleanup = &cleanup_alu_imm;
6322 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6323 uint16_t insn2, struct regcache *regs,
6324 struct displaced_step_closure *dsc)
6326 unsigned int op = bits (insn1, 5, 8);
6327 unsigned int rn, rm, rd;
6328 ULONGEST rd_val, rn_val;
6330 rn = bits (insn1, 0, 3); /* Rn */
6331 rm = bits (insn2, 0, 3); /* Rm */
6332 rd = bits (insn2, 8, 11); /* Rd */
6334 /* This routine is only called for instruction MOV. */
6335 gdb_assert (op == 0x2 && rn == 0xf);
6337 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6338 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6340 if (debug_displaced)
6341 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6342 "ALU", insn1, insn2);
6344 /* Instruction is of form:
6346 <op><cond> rd, [rn,] #imm
6350 Preparation: tmp1, tmp2 <- r0, r1;
6352 Insn: <op><cond> r0, r1, #imm
6353 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6356 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6357 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6358 rn_val = displaced_read_reg (regs, dsc, rn);
6359 rd_val = displaced_read_reg (regs, dsc, rd);
6360 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6361 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6364 dsc->modinsn[0] = insn1;
6365 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6368 dsc->cleanup = &cleanup_alu_imm;
6373 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6376 cleanup_alu_reg (struct gdbarch *gdbarch,
6377 struct regcache *regs, struct displaced_step_closure *dsc)
6382 rd_val = displaced_read_reg (regs, dsc, 0);
6384 for (i = 0; i < 3; i++)
6385 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6387 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6391 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6392 struct displaced_step_closure *dsc,
6393 unsigned int rd, unsigned int rn, unsigned int rm)
6395 ULONGEST rd_val, rn_val, rm_val;
6397 /* Instruction is of form:
6399 <op><cond> rd, [rn,] rm [, <shift>]
6403 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6404 r0, r1, r2 <- rd, rn, rm
6405 Insn: <op><cond> r0, r1, r2 [, <shift>]
6406 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6409 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6410 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6411 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6412 rd_val = displaced_read_reg (regs, dsc, rd);
6413 rn_val = displaced_read_reg (regs, dsc, rn);
6414 rm_val = displaced_read_reg (regs, dsc, rm);
6415 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6416 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6417 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6420 dsc->cleanup = &cleanup_alu_reg;
6424 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6425 struct displaced_step_closure *dsc)
6427 unsigned int op = bits (insn, 21, 24);
6428 int is_mov = (op == 0xd);
6430 if (!insn_references_pc (insn, 0x000ff00ful))
6431 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6433 if (debug_displaced)
6434 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6435 is_mov ? "move" : "ALU", (unsigned long) insn);
6438 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6440 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6442 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6448 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6449 struct regcache *regs,
6450 struct displaced_step_closure *dsc)
6452 unsigned rn, rm, rd;
6454 rd = bits (insn, 3, 6);
6455 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6458 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6459 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6461 if (debug_displaced)
6462 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6463 "ALU", (unsigned short) insn);
6465 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6467 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6472 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6475 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6476 struct regcache *regs,
6477 struct displaced_step_closure *dsc)
6479 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6482 for (i = 0; i < 4; i++)
6483 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6485 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6489 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6490 struct displaced_step_closure *dsc,
6491 unsigned int rd, unsigned int rn, unsigned int rm,
6495 ULONGEST rd_val, rn_val, rm_val, rs_val;
6497 /* Instruction is of form:
6499 <op><cond> rd, [rn,] rm, <shift> rs
6503 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6504 r0, r1, r2, r3 <- rd, rn, rm, rs
6505 Insn: <op><cond> r0, r1, r2, <shift> r3
6507 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6511 for (i = 0; i < 4; i++)
6512 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6514 rd_val = displaced_read_reg (regs, dsc, rd);
6515 rn_val = displaced_read_reg (regs, dsc, rn);
6516 rm_val = displaced_read_reg (regs, dsc, rm);
6517 rs_val = displaced_read_reg (regs, dsc, rs);
6518 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6519 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6520 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6521 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6523 dsc->cleanup = &cleanup_alu_shifted_reg;
6527 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6528 struct regcache *regs,
6529 struct displaced_step_closure *dsc)
6531 unsigned int op = bits (insn, 21, 24);
6532 int is_mov = (op == 0xd);
6533 unsigned int rd, rn, rm, rs;
6535 if (!insn_references_pc (insn, 0x000fff0ful))
6536 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6538 if (debug_displaced)
6539 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6540 "%.8lx\n", is_mov ? "move" : "ALU",
6541 (unsigned long) insn);
6543 rn = bits (insn, 16, 19);
6544 rm = bits (insn, 0, 3);
6545 rs = bits (insn, 8, 11);
6546 rd = bits (insn, 12, 15);
6549 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6551 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6553 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6558 /* Clean up load instructions. */
6561 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6562 struct displaced_step_closure *dsc)
6564 ULONGEST rt_val, rt_val2 = 0, rn_val;
6566 rt_val = displaced_read_reg (regs, dsc, 0);
6567 if (dsc->u.ldst.xfersize == 8)
6568 rt_val2 = displaced_read_reg (regs, dsc, 1);
6569 rn_val = displaced_read_reg (regs, dsc, 2);
6571 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6572 if (dsc->u.ldst.xfersize > 4)
6573 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6574 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6575 if (!dsc->u.ldst.immed)
6576 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6578 /* Handle register writeback. */
6579 if (dsc->u.ldst.writeback)
6580 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6581 /* Put result in right place. */
6582 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6583 if (dsc->u.ldst.xfersize == 8)
6584 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6587 /* Clean up store instructions. */
6590 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6591 struct displaced_step_closure *dsc)
6593 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6595 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6596 if (dsc->u.ldst.xfersize > 4)
6597 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6598 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6599 if (!dsc->u.ldst.immed)
6600 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6601 if (!dsc->u.ldst.restore_r4)
6602 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6605 if (dsc->u.ldst.writeback)
6606 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6609 /* Copy "extra" load/store instructions. These are halfword/doubleword
6610 transfers, which have a different encoding to byte/word transfers. */
6613 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6614 struct regcache *regs, struct displaced_step_closure *dsc)
6616 unsigned int op1 = bits (insn, 20, 24);
6617 unsigned int op2 = bits (insn, 5, 6);
6618 unsigned int rt = bits (insn, 12, 15);
6619 unsigned int rn = bits (insn, 16, 19);
6620 unsigned int rm = bits (insn, 0, 3);
6621 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6622 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6623 int immed = (op1 & 0x4) != 0;
6625 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6627 if (!insn_references_pc (insn, 0x000ff00ful))
6628 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6630 if (debug_displaced)
6631 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6632 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6633 (unsigned long) insn);
6635 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6638 internal_error (__FILE__, __LINE__,
6639 _("copy_extra_ld_st: instruction decode error"));
6641 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6642 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6643 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6645 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6647 rt_val = displaced_read_reg (regs, dsc, rt);
6648 if (bytesize[opcode] == 8)
6649 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6650 rn_val = displaced_read_reg (regs, dsc, rn);
6652 rm_val = displaced_read_reg (regs, dsc, rm);
6654 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6655 if (bytesize[opcode] == 8)
6656 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6657 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6659 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6662 dsc->u.ldst.xfersize = bytesize[opcode];
6663 dsc->u.ldst.rn = rn;
6664 dsc->u.ldst.immed = immed;
6665 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6666 dsc->u.ldst.restore_r4 = 0;
6669 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6671 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6672 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6674 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6676 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6677 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6679 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6684 /* Copy byte/half word/word loads and stores. */
6687 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6688 struct displaced_step_closure *dsc, int load,
6689 int immed, int writeback, int size, int usermode,
6690 int rt, int rm, int rn)
6692 ULONGEST rt_val, rn_val, rm_val = 0;
6694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6695 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6697 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6699 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6701 rt_val = displaced_read_reg (regs, dsc, rt);
6702 rn_val = displaced_read_reg (regs, dsc, rn);
6704 rm_val = displaced_read_reg (regs, dsc, rm);
6706 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6707 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6709 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6711 dsc->u.ldst.xfersize = size;
6712 dsc->u.ldst.rn = rn;
6713 dsc->u.ldst.immed = immed;
6714 dsc->u.ldst.writeback = writeback;
6716 /* To write PC we can do:
6718 Before this sequence of instructions:
6719 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6720 r2 is the Rn value got from dispalced_read_reg.
6722 Insn1: push {pc} Write address of STR instruction + offset on stack
6723 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6724 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6725 = addr(Insn1) + offset - addr(Insn3) - 8
6727 Insn4: add r4, r4, #8 r4 = offset - 8
6728 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6730 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6732 Otherwise we don't know what value to write for PC, since the offset is
6733 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6734 of this can be found in Section "Saving from r15" in
6735 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6737 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6742 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6743 uint16_t insn2, struct regcache *regs,
6744 struct displaced_step_closure *dsc, int size)
6746 unsigned int u_bit = bit (insn1, 7);
6747 unsigned int rt = bits (insn2, 12, 15);
6748 int imm12 = bits (insn2, 0, 11);
6751 if (debug_displaced)
6752 fprintf_unfiltered (gdb_stdlog,
6753 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6754 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6760 /* Rewrite instruction LDR Rt imm12 into:
6762 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6766 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6769 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6770 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6771 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6773 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6775 pc_val = pc_val & 0xfffffffc;
6777 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6778 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6782 dsc->u.ldst.xfersize = size;
6783 dsc->u.ldst.immed = 0;
6784 dsc->u.ldst.writeback = 0;
6785 dsc->u.ldst.restore_r4 = 0;
6787 /* LDR R0, R2, R3 */
6788 dsc->modinsn[0] = 0xf852;
6789 dsc->modinsn[1] = 0x3;
6792 dsc->cleanup = &cleanup_load;
6798 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6799 uint16_t insn2, struct regcache *regs,
6800 struct displaced_step_closure *dsc,
6801 int writeback, int immed)
6803 unsigned int rt = bits (insn2, 12, 15);
6804 unsigned int rn = bits (insn1, 0, 3);
6805 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6806 /* In LDR (register), there is also a register Rm, which is not allowed to
6807 be PC, so we don't have to check it. */
6809 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6813 if (debug_displaced)
6814 fprintf_unfiltered (gdb_stdlog,
6815 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6816 rt, rn, insn1, insn2);
6818 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6821 dsc->u.ldst.restore_r4 = 0;
6824 /* ldr[b]<cond> rt, [rn, #imm], etc.
6826 ldr[b]<cond> r0, [r2, #imm]. */
6828 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6829 dsc->modinsn[1] = insn2 & 0x0fff;
6832 /* ldr[b]<cond> rt, [rn, rm], etc.
6834 ldr[b]<cond> r0, [r2, r3]. */
6836 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6837 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6847 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6848 struct regcache *regs,
6849 struct displaced_step_closure *dsc,
6850 int load, int size, int usermode)
6852 int immed = !bit (insn, 25);
6853 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6854 unsigned int rt = bits (insn, 12, 15);
6855 unsigned int rn = bits (insn, 16, 19);
6856 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6858 if (!insn_references_pc (insn, 0x000ff00ful))
6859 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6861 if (debug_displaced)
6862 fprintf_unfiltered (gdb_stdlog,
6863 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6864 load ? (size == 1 ? "ldrb" : "ldr")
6865 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6867 (unsigned long) insn);
6869 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6870 usermode, rt, rm, rn);
6872 if (load || rt != ARM_PC_REGNUM)
6874 dsc->u.ldst.restore_r4 = 0;
6877 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6879 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6880 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6882 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6884 {ldr,str}[b]<cond> r0, [r2, r3]. */
6885 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6889 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6890 dsc->u.ldst.restore_r4 = 1;
6891 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6892 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6893 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6894 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6895 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6899 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6901 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6906 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6911 /* Cleanup LDM instructions with fully-populated register list. This is an
6912 unfortunate corner case: it's impossible to implement correctly by modifying
6913 the instruction. The issue is as follows: we have an instruction,
6917 which we must rewrite to avoid loading PC. A possible solution would be to
6918 do the load in two halves, something like (with suitable cleanup
6922 ldm[id][ab] r8!, {r0-r7}
6924 ldm[id][ab] r8, {r7-r14}
6927 but at present there's no suitable place for <temp>, since the scratch space
6928 is overwritten before the cleanup routine is called. For now, we simply
6929 emulate the instruction. */
6932 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6933 struct displaced_step_closure *dsc)
6935 int inc = dsc->u.block.increment;
6936 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6937 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6938 uint32_t regmask = dsc->u.block.regmask;
6939 int regno = inc ? 0 : 15;
6940 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6941 int exception_return = dsc->u.block.load && dsc->u.block.user
6942 && (regmask & 0x8000) != 0;
6943 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6944 int do_transfer = condition_true (dsc->u.block.cond, status);
6945 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6950 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6951 sensible we can do here. Complain loudly. */
6952 if (exception_return)
6953 error (_("Cannot single-step exception return"));
6955 /* We don't handle any stores here for now. */
6956 gdb_assert (dsc->u.block.load != 0);
6958 if (debug_displaced)
6959 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6960 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6961 dsc->u.block.increment ? "inc" : "dec",
6962 dsc->u.block.before ? "before" : "after");
6969 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6972 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6975 xfer_addr += bump_before;
6977 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6978 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6980 xfer_addr += bump_after;
6982 regmask &= ~(1 << regno);
6985 if (dsc->u.block.writeback)
6986 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6990 /* Clean up an STM which included the PC in the register list. */
6993 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6994 struct displaced_step_closure *dsc)
6996 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6997 int store_executed = condition_true (dsc->u.block.cond, status);
6998 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6999 CORE_ADDR stm_insn_addr;
7002 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7004 /* If condition code fails, there's nothing else to do. */
7005 if (!store_executed)
7008 if (dsc->u.block.increment)
7010 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7012 if (dsc->u.block.before)
7017 pc_stored_at = dsc->u.block.xfer_addr;
7019 if (dsc->u.block.before)
7023 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7024 stm_insn_addr = dsc->scratch_base;
7025 offset = pc_val - stm_insn_addr;
7027 if (debug_displaced)
7028 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7029 "STM instruction\n", offset);
7031 /* Rewrite the stored PC to the proper value for the non-displaced original
7033 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7034 dsc->insn_addr + offset);
7037 /* Clean up an LDM which includes the PC in the register list. We clumped all
7038 the registers in the transferred list into a contiguous range r0...rX (to
7039 avoid loading PC directly and losing control of the debugged program), so we
7040 must undo that here. */
7043 cleanup_block_load_pc (struct gdbarch *gdbarch,
7044 struct regcache *regs,
7045 struct displaced_step_closure *dsc)
7047 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7048 int load_executed = condition_true (dsc->u.block.cond, status);
7049 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7050 unsigned int regs_loaded = bitcount (mask);
7051 unsigned int num_to_shuffle = regs_loaded, clobbered;
7053 /* The method employed here will fail if the register list is fully populated
7054 (we need to avoid loading PC directly). */
7055 gdb_assert (num_to_shuffle < 16);
7060 clobbered = (1 << num_to_shuffle) - 1;
7062 while (num_to_shuffle > 0)
7064 if ((mask & (1 << write_reg)) != 0)
7066 unsigned int read_reg = num_to_shuffle - 1;
7068 if (read_reg != write_reg)
7070 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7071 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7074 "loaded register r%d to r%d\n"), read_reg,
7077 else if (debug_displaced)
7078 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7079 "r%d already in the right place\n"),
7082 clobbered &= ~(1 << write_reg);
7090 /* Restore any registers we scribbled over. */
7091 for (write_reg = 0; clobbered != 0; write_reg++)
7093 if ((clobbered & (1 << write_reg)) != 0)
7095 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7097 if (debug_displaced)
7098 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7099 "clobbered register r%d\n"), write_reg);
7100 clobbered &= ~(1 << write_reg);
7104 /* Perform register writeback manually. */
7105 if (dsc->u.block.writeback)
7107 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7109 if (dsc->u.block.increment)
7110 new_rn_val += regs_loaded * 4;
7112 new_rn_val -= regs_loaded * 4;
7114 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7119 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7120 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7123 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7124 struct regcache *regs,
7125 struct displaced_step_closure *dsc)
7127 int load = bit (insn, 20);
7128 int user = bit (insn, 22);
7129 int increment = bit (insn, 23);
7130 int before = bit (insn, 24);
7131 int writeback = bit (insn, 21);
7132 int rn = bits (insn, 16, 19);
7134 /* Block transfers which don't mention PC can be run directly
7136 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7137 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7139 if (rn == ARM_PC_REGNUM)
7141 warning (_("displaced: Unpredictable LDM or STM with "
7142 "base register r15"));
7143 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7146 if (debug_displaced)
7147 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7148 "%.8lx\n", (unsigned long) insn);
7150 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7151 dsc->u.block.rn = rn;
7153 dsc->u.block.load = load;
7154 dsc->u.block.user = user;
7155 dsc->u.block.increment = increment;
7156 dsc->u.block.before = before;
7157 dsc->u.block.writeback = writeback;
7158 dsc->u.block.cond = bits (insn, 28, 31);
7160 dsc->u.block.regmask = insn & 0xffff;
7164 if ((insn & 0xffff) == 0xffff)
7166 /* LDM with a fully-populated register list. This case is
7167 particularly tricky. Implement for now by fully emulating the
7168 instruction (which might not behave perfectly in all cases, but
7169 these instructions should be rare enough for that not to matter
7171 dsc->modinsn[0] = ARM_NOP;
7173 dsc->cleanup = &cleanup_block_load_all;
7177 /* LDM of a list of registers which includes PC. Implement by
7178 rewriting the list of registers to be transferred into a
7179 contiguous chunk r0...rX before doing the transfer, then shuffling
7180 registers into the correct places in the cleanup routine. */
7181 unsigned int regmask = insn & 0xffff;
7182 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7183 unsigned int to = 0, from = 0, i, new_rn;
7185 for (i = 0; i < num_in_list; i++)
7186 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7188 /* Writeback makes things complicated. We need to avoid clobbering
7189 the base register with one of the registers in our modified
7190 register list, but just using a different register can't work in
7193 ldm r14!, {r0-r13,pc}
7195 which would need to be rewritten as:
7199 but that can't work, because there's no free register for N.
7201 Solve this by turning off the writeback bit, and emulating
7202 writeback manually in the cleanup routine. */
7207 new_regmask = (1 << num_in_list) - 1;
7209 if (debug_displaced)
7210 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7211 "{..., pc}: original reg list %.4x, modified "
7212 "list %.4x\n"), rn, writeback ? "!" : "",
7213 (int) insn & 0xffff, new_regmask);
7215 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7217 dsc->cleanup = &cleanup_block_load_pc;
7222 /* STM of a list of registers which includes PC. Run the instruction
7223 as-is, but out of line: this will store the wrong value for the PC,
7224 so we must manually fix up the memory in the cleanup routine.
7225 Doing things this way has the advantage that we can auto-detect
7226 the offset of the PC write (which is architecture-dependent) in
7227 the cleanup routine. */
7228 dsc->modinsn[0] = insn;
7230 dsc->cleanup = &cleanup_block_store_pc;
7237 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7238 struct regcache *regs,
7239 struct displaced_step_closure *dsc)
7241 int rn = bits (insn1, 0, 3);
7242 int load = bit (insn1, 4);
7243 int writeback = bit (insn1, 5);
7245 /* Block transfers which don't mention PC can be run directly
7247 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7248 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7250 if (rn == ARM_PC_REGNUM)
7252 warning (_("displaced: Unpredictable LDM or STM with "
7253 "base register r15"));
7254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7255 "unpredictable ldm/stm", dsc);
7258 if (debug_displaced)
7259 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7260 "%.4x%.4x\n", insn1, insn2);
7262 /* Clear bit 13, since it should be always zero. */
7263 dsc->u.block.regmask = (insn2 & 0xdfff);
7264 dsc->u.block.rn = rn;
7266 dsc->u.block.load = load;
7267 dsc->u.block.user = 0;
7268 dsc->u.block.increment = bit (insn1, 7);
7269 dsc->u.block.before = bit (insn1, 8);
7270 dsc->u.block.writeback = writeback;
7271 dsc->u.block.cond = INST_AL;
7272 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7276 if (dsc->u.block.regmask == 0xffff)
7278 /* This branch is impossible to happen. */
7283 unsigned int regmask = dsc->u.block.regmask;
7284 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7285 unsigned int to = 0, from = 0, i, new_rn;
7287 for (i = 0; i < num_in_list; i++)
7288 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7293 new_regmask = (1 << num_in_list) - 1;
7295 if (debug_displaced)
7296 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7297 "{..., pc}: original reg list %.4x, modified "
7298 "list %.4x\n"), rn, writeback ? "!" : "",
7299 (int) dsc->u.block.regmask, new_regmask);
7301 dsc->modinsn[0] = insn1;
7302 dsc->modinsn[1] = (new_regmask & 0xffff);
7305 dsc->cleanup = &cleanup_block_load_pc;
7310 dsc->modinsn[0] = insn1;
7311 dsc->modinsn[1] = insn2;
7313 dsc->cleanup = &cleanup_block_store_pc;
7318 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7319 for Linux, where some SVC instructions must be treated specially. */
7322 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7323 struct displaced_step_closure *dsc)
7325 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7327 if (debug_displaced)
7328 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7329 "%.8lx\n", (unsigned long) resume_addr);
7331 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7335 /* Common copy routine for svc instruciton. */
7338 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7339 struct displaced_step_closure *dsc)
7341 /* Preparation: none.
7342 Insn: unmodified svc.
7343 Cleanup: pc <- insn_addr + insn_size. */
7345 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7347 dsc->wrote_to_pc = 1;
7349 /* Allow OS-specific code to override SVC handling. */
7350 if (dsc->u.svc.copy_svc_os)
7351 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7354 dsc->cleanup = &cleanup_svc;
7360 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7361 struct regcache *regs, struct displaced_step_closure *dsc)
7364 if (debug_displaced)
7365 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7366 (unsigned long) insn);
7368 dsc->modinsn[0] = insn;
7370 return install_svc (gdbarch, regs, dsc);
7374 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7375 struct regcache *regs, struct displaced_step_closure *dsc)
7378 if (debug_displaced)
7379 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7382 dsc->modinsn[0] = insn;
7384 return install_svc (gdbarch, regs, dsc);
7387 /* Copy undefined instructions. */
7390 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7391 struct displaced_step_closure *dsc)
7393 if (debug_displaced)
7394 fprintf_unfiltered (gdb_stdlog,
7395 "displaced: copying undefined insn %.8lx\n",
7396 (unsigned long) insn);
7398 dsc->modinsn[0] = insn;
7404 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7405 struct displaced_step_closure *dsc)
7408 if (debug_displaced)
7409 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7410 "%.4x %.4x\n", (unsigned short) insn1,
7411 (unsigned short) insn2);
7413 dsc->modinsn[0] = insn1;
7414 dsc->modinsn[1] = insn2;
7420 /* Copy unpredictable instructions. */
7423 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7424 struct displaced_step_closure *dsc)
7426 if (debug_displaced)
7427 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7428 "%.8lx\n", (unsigned long) insn);
7430 dsc->modinsn[0] = insn;
7435 /* The decode_* functions are instruction decoding helpers. They mostly follow
7436 the presentation in the ARM ARM. */
7439 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7440 struct regcache *regs,
7441 struct displaced_step_closure *dsc)
7443 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7444 unsigned int rn = bits (insn, 16, 19);
7446 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7447 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7448 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7449 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7450 else if ((op1 & 0x60) == 0x20)
7451 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7452 else if ((op1 & 0x71) == 0x40)
7453 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7455 else if ((op1 & 0x77) == 0x41)
7456 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7457 else if ((op1 & 0x77) == 0x45)
7458 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7459 else if ((op1 & 0x77) == 0x51)
7462 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7464 return arm_copy_unpred (gdbarch, insn, dsc);
7466 else if ((op1 & 0x77) == 0x55)
7467 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7468 else if (op1 == 0x57)
7471 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7472 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7473 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7474 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7475 default: return arm_copy_unpred (gdbarch, insn, dsc);
7477 else if ((op1 & 0x63) == 0x43)
7478 return arm_copy_unpred (gdbarch, insn, dsc);
7479 else if ((op2 & 0x1) == 0x0)
7480 switch (op1 & ~0x80)
7483 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7485 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7486 case 0x71: case 0x75:
7488 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7489 case 0x63: case 0x67: case 0x73: case 0x77:
7490 return arm_copy_unpred (gdbarch, insn, dsc);
7492 return arm_copy_undef (gdbarch, insn, dsc);
7495 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7499 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7500 struct regcache *regs,
7501 struct displaced_step_closure *dsc)
7503 if (bit (insn, 27) == 0)
7504 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7505 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7506 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7509 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7512 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7514 case 0x4: case 0x5: case 0x6: case 0x7:
7515 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7518 switch ((insn & 0xe00000) >> 21)
7520 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7522 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7525 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7528 return arm_copy_undef (gdbarch, insn, dsc);
7533 int rn_f = (bits (insn, 16, 19) == 0xf);
7534 switch ((insn & 0xe00000) >> 21)
7537 /* ldc/ldc2 imm (undefined for rn == pc). */
7538 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7539 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7542 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7544 case 0x4: case 0x5: case 0x6: case 0x7:
7545 /* ldc/ldc2 lit (undefined for rn != pc). */
7546 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7547 : arm_copy_undef (gdbarch, insn, dsc);
7550 return arm_copy_undef (gdbarch, insn, dsc);
7555 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7558 if (bits (insn, 16, 19) == 0xf)
7560 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7562 return arm_copy_undef (gdbarch, insn, dsc);
7566 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7568 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7572 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7574 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7577 return arm_copy_undef (gdbarch, insn, dsc);
7581 /* Decode miscellaneous instructions in dp/misc encoding space. */
7584 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7585 struct regcache *regs,
7586 struct displaced_step_closure *dsc)
7588 unsigned int op2 = bits (insn, 4, 6);
7589 unsigned int op = bits (insn, 21, 22);
7590 unsigned int op1 = bits (insn, 16, 19);
7595 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7598 if (op == 0x1) /* bx. */
7599 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7601 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7603 return arm_copy_undef (gdbarch, insn, dsc);
7607 /* Not really supported. */
7608 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7610 return arm_copy_undef (gdbarch, insn, dsc);
7614 return arm_copy_bx_blx_reg (gdbarch, insn,
7615 regs, dsc); /* blx register. */
7617 return arm_copy_undef (gdbarch, insn, dsc);
7620 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7624 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7626 /* Not really supported. */
7627 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7630 return arm_copy_undef (gdbarch, insn, dsc);
7635 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7636 struct regcache *regs,
7637 struct displaced_step_closure *dsc)
7640 switch (bits (insn, 20, 24))
7643 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7646 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7648 case 0x12: case 0x16:
7649 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7652 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7656 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7658 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7659 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7660 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7661 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7662 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7663 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7664 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7665 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7666 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7667 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7668 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7669 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7670 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7671 /* 2nd arg means "unpriveleged". */
7672 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7676 /* Should be unreachable. */
7681 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7682 struct regcache *regs,
7683 struct displaced_step_closure *dsc)
7685 int a = bit (insn, 25), b = bit (insn, 4);
7686 uint32_t op1 = bits (insn, 20, 24);
7687 int rn_f = bits (insn, 16, 19) == 0xf;
7689 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7690 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7691 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7692 else if ((!a && (op1 & 0x17) == 0x02)
7693 || (a && (op1 & 0x17) == 0x02 && !b))
7694 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7695 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7696 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7697 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7698 else if ((!a && (op1 & 0x17) == 0x03)
7699 || (a && (op1 & 0x17) == 0x03 && !b))
7700 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7701 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7702 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7704 else if ((!a && (op1 & 0x17) == 0x06)
7705 || (a && (op1 & 0x17) == 0x06 && !b))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7707 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7708 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7710 else if ((!a && (op1 & 0x17) == 0x07)
7711 || (a && (op1 & 0x17) == 0x07 && !b))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7714 /* Should be unreachable. */
7719 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7720 struct displaced_step_closure *dsc)
7722 switch (bits (insn, 20, 24))
7724 case 0x00: case 0x01: case 0x02: case 0x03:
7725 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7727 case 0x04: case 0x05: case 0x06: case 0x07:
7728 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7730 case 0x08: case 0x09: case 0x0a: case 0x0b:
7731 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7732 return arm_copy_unmodified (gdbarch, insn,
7733 "decode/pack/unpack/saturate/reverse", dsc);
7736 if (bits (insn, 5, 7) == 0) /* op2. */
7738 if (bits (insn, 12, 15) == 0xf)
7739 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7741 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7744 return arm_copy_undef (gdbarch, insn, dsc);
7746 case 0x1a: case 0x1b:
7747 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7748 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7750 return arm_copy_undef (gdbarch, insn, dsc);
7752 case 0x1c: case 0x1d:
7753 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7755 if (bits (insn, 0, 3) == 0xf)
7756 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7758 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7761 return arm_copy_undef (gdbarch, insn, dsc);
7763 case 0x1e: case 0x1f:
7764 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7765 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7767 return arm_copy_undef (gdbarch, insn, dsc);
7770 /* Should be unreachable. */
7775 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7776 struct regcache *regs,
7777 struct displaced_step_closure *dsc)
7780 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7782 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7786 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7787 struct regcache *regs,
7788 struct displaced_step_closure *dsc)
7790 unsigned int opcode = bits (insn, 20, 24);
7794 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7795 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7797 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7798 case 0x12: case 0x16:
7799 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7801 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7802 case 0x13: case 0x17:
7803 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7805 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7806 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7807 /* Note: no writeback for these instructions. Bit 25 will always be
7808 zero though (via caller), so the following works OK. */
7809 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7812 /* Should be unreachable. */
7816 /* Decode shifted register instructions. */
7819 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7820 uint16_t insn2, struct regcache *regs,
7821 struct displaced_step_closure *dsc)
7823 /* PC is only allowed to be used in instruction MOV. */
7825 unsigned int op = bits (insn1, 5, 8);
7826 unsigned int rn = bits (insn1, 0, 3);
7828 if (op == 0x2 && rn == 0xf) /* MOV */
7829 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7831 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7832 "dp (shift reg)", dsc);
7836 /* Decode extension register load/store. Exactly the same as
7837 arm_decode_ext_reg_ld_st. */
7840 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7841 uint16_t insn2, struct regcache *regs,
7842 struct displaced_step_closure *dsc)
7844 unsigned int opcode = bits (insn1, 4, 8);
7848 case 0x04: case 0x05:
7849 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7850 "vfp/neon vmov", dsc);
7852 case 0x08: case 0x0c: /* 01x00 */
7853 case 0x0a: case 0x0e: /* 01x10 */
7854 case 0x12: case 0x16: /* 10x10 */
7855 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7856 "vfp/neon vstm/vpush", dsc);
7858 case 0x09: case 0x0d: /* 01x01 */
7859 case 0x0b: case 0x0f: /* 01x11 */
7860 case 0x13: case 0x17: /* 10x11 */
7861 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7862 "vfp/neon vldm/vpop", dsc);
7864 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7865 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7867 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7868 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7871 /* Should be unreachable. */
7876 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7877 struct regcache *regs, struct displaced_step_closure *dsc)
7879 unsigned int op1 = bits (insn, 20, 25);
7880 int op = bit (insn, 4);
7881 unsigned int coproc = bits (insn, 8, 11);
7882 unsigned int rn = bits (insn, 16, 19);
7884 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7885 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7886 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7887 && (coproc & 0xe) != 0xa)
7889 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7890 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7891 && (coproc & 0xe) != 0xa)
7892 /* ldc/ldc2 imm/lit. */
7893 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7894 else if ((op1 & 0x3e) == 0x00)
7895 return arm_copy_undef (gdbarch, insn, dsc);
7896 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7897 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7898 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7899 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7900 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7901 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7902 else if ((op1 & 0x30) == 0x20 && !op)
7904 if ((coproc & 0xe) == 0xa)
7905 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7907 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7909 else if ((op1 & 0x30) == 0x20 && op)
7910 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7911 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7912 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7913 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7914 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7915 else if ((op1 & 0x30) == 0x30)
7916 return arm_copy_svc (gdbarch, insn, regs, dsc);
7918 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7922 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7923 uint16_t insn2, struct regcache *regs,
7924 struct displaced_step_closure *dsc)
7926 unsigned int coproc = bits (insn2, 8, 11);
7927 unsigned int op1 = bits (insn1, 4, 9);
7928 unsigned int bit_5_8 = bits (insn1, 5, 8);
7929 unsigned int bit_9 = bit (insn1, 9);
7930 unsigned int bit_4 = bit (insn1, 4);
7931 unsigned int rn = bits (insn1, 0, 3);
7936 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7937 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7939 else if (bit_5_8 == 0) /* UNDEFINED. */
7940 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7943 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7944 if ((coproc & 0xe) == 0xa)
7945 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7947 else /* coproc is not 101x. */
7949 if (bit_4 == 0) /* STC/STC2. */
7950 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7952 else /* LDC/LDC2 {literal, immeidate}. */
7953 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7965 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7966 struct displaced_step_closure *dsc, int rd)
7972 Preparation: Rd <- PC
7978 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7979 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7983 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7984 struct displaced_step_closure *dsc,
7985 int rd, unsigned int imm)
7988 /* Encoding T2: ADDS Rd, #imm */
7989 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7991 install_pc_relative (gdbarch, regs, dsc, rd);
7997 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7998 struct regcache *regs,
7999 struct displaced_step_closure *dsc)
8001 unsigned int rd = bits (insn, 8, 10);
8002 unsigned int imm8 = bits (insn, 0, 7);
8004 if (debug_displaced)
8005 fprintf_unfiltered (gdb_stdlog,
8006 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8009 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8013 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8014 uint16_t insn2, struct regcache *regs,
8015 struct displaced_step_closure *dsc)
8017 unsigned int rd = bits (insn2, 8, 11);
8018 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8019 extract raw immediate encoding rather than computing immediate. When
8020 generating ADD or SUB instruction, we can simply perform OR operation to
8021 set immediate into ADD. */
8022 unsigned int imm_3_8 = insn2 & 0x70ff;
8023 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8025 if (debug_displaced)
8026 fprintf_unfiltered (gdb_stdlog,
8027 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8028 rd, imm_i, imm_3_8, insn1, insn2);
8030 if (bit (insn1, 7)) /* Encoding T2 */
8032 /* Encoding T3: SUB Rd, Rd, #imm */
8033 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8034 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8036 else /* Encoding T3 */
8038 /* Encoding T3: ADD Rd, Rd, #imm */
8039 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8040 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8044 install_pc_relative (gdbarch, regs, dsc, rd);
8050 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8051 struct regcache *regs,
8052 struct displaced_step_closure *dsc)
8054 unsigned int rt = bits (insn1, 8, 10);
8056 int imm8 = (bits (insn1, 0, 7) << 2);
8057 CORE_ADDR from = dsc->insn_addr;
8063 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8065 Insn: LDR R0, [R2, R3];
8066 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8068 if (debug_displaced)
8069 fprintf_unfiltered (gdb_stdlog,
8070 "displaced: copying thumb ldr r%d [pc #%d]\n"
8073 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8074 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8075 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8076 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8077 /* The assembler calculates the required value of the offset from the
8078 Align(PC,4) value of this instruction to the label. */
8079 pc = pc & 0xfffffffc;
8081 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8082 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8085 dsc->u.ldst.xfersize = 4;
8087 dsc->u.ldst.immed = 0;
8088 dsc->u.ldst.writeback = 0;
8089 dsc->u.ldst.restore_r4 = 0;
8091 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8093 dsc->cleanup = &cleanup_load;
8098 /* Copy Thumb cbnz/cbz insruction. */
8101 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8102 struct regcache *regs,
8103 struct displaced_step_closure *dsc)
8105 int non_zero = bit (insn1, 11);
8106 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8107 CORE_ADDR from = dsc->insn_addr;
8108 int rn = bits (insn1, 0, 2);
8109 int rn_val = displaced_read_reg (regs, dsc, rn);
8111 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8112 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8113 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8114 condition is false, let it be, cleanup_branch will do nothing. */
8115 if (dsc->u.branch.cond)
8117 dsc->u.branch.cond = INST_AL;
8118 dsc->u.branch.dest = from + 4 + imm5;
8121 dsc->u.branch.dest = from + 2;
8123 dsc->u.branch.link = 0;
8124 dsc->u.branch.exchange = 0;
8126 if (debug_displaced)
8127 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8128 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8129 rn, rn_val, insn1, dsc->u.branch.dest);
8131 dsc->modinsn[0] = THUMB_NOP;
8133 dsc->cleanup = &cleanup_branch;
8137 /* Copy Table Branch Byte/Halfword */
8139 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8140 uint16_t insn2, struct regcache *regs,
8141 struct displaced_step_closure *dsc)
8143 ULONGEST rn_val, rm_val;
8144 int is_tbh = bit (insn2, 4);
8145 CORE_ADDR halfwords = 0;
8146 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8148 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8149 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8155 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8156 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8162 target_read_memory (rn_val + rm_val, buf, 1);
8163 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8166 if (debug_displaced)
8167 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8168 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8169 (unsigned int) rn_val, (unsigned int) rm_val,
8170 (unsigned int) halfwords);
8172 dsc->u.branch.cond = INST_AL;
8173 dsc->u.branch.link = 0;
8174 dsc->u.branch.exchange = 0;
8175 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8177 dsc->cleanup = &cleanup_branch;
8183 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8184 struct displaced_step_closure *dsc)
8187 int val = displaced_read_reg (regs, dsc, 7);
8188 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8191 val = displaced_read_reg (regs, dsc, 8);
8192 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8195 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8200 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8201 struct regcache *regs,
8202 struct displaced_step_closure *dsc)
8204 dsc->u.block.regmask = insn1 & 0x00ff;
8206 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8209 (1) register list is full, that is, r0-r7 are used.
8210 Prepare: tmp[0] <- r8
8212 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8213 MOV r8, r7; Move value of r7 to r8;
8214 POP {r7}; Store PC value into r7.
8216 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8218 (2) register list is not full, supposing there are N registers in
8219 register list (except PC, 0 <= N <= 7).
8220 Prepare: for each i, 0 - N, tmp[i] <- ri.
8222 POP {r0, r1, ...., rN};
8224 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8225 from tmp[] properly.
8227 if (debug_displaced)
8228 fprintf_unfiltered (gdb_stdlog,
8229 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8230 dsc->u.block.regmask, insn1);
8232 if (dsc->u.block.regmask == 0xff)
8234 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8236 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8237 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8238 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8241 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8245 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8246 unsigned int new_regmask, bit = 1;
8247 unsigned int to = 0, from = 0, i, new_rn;
8249 for (i = 0; i < num_in_list + 1; i++)
8250 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8252 new_regmask = (1 << (num_in_list + 1)) - 1;
8254 if (debug_displaced)
8255 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8256 "{..., pc}: original reg list %.4x,"
8257 " modified list %.4x\n"),
8258 (int) dsc->u.block.regmask, new_regmask);
8260 dsc->u.block.regmask |= 0x8000;
8261 dsc->u.block.writeback = 0;
8262 dsc->u.block.cond = INST_AL;
8264 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8266 dsc->cleanup = &cleanup_block_load_pc;
8273 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8274 struct regcache *regs,
8275 struct displaced_step_closure *dsc)
8277 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8278 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8281 /* 16-bit thumb instructions. */
8282 switch (op_bit_12_15)
8284 /* Shift (imme), add, subtract, move and compare. */
8285 case 0: case 1: case 2: case 3:
8286 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8287 "shift/add/sub/mov/cmp",
8291 switch (op_bit_10_11)
8293 case 0: /* Data-processing */
8294 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8298 case 1: /* Special data instructions and branch and exchange. */
8300 unsigned short op = bits (insn1, 7, 9);
8301 if (op == 6 || op == 7) /* BX or BLX */
8302 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8303 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8304 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8306 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8310 default: /* LDR (literal) */
8311 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8314 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8315 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8318 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8319 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8320 else /* Generate SP-relative address */
8321 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8323 case 11: /* Misc 16-bit instructions */
8325 switch (bits (insn1, 8, 11))
8327 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8328 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8330 case 12: case 13: /* POP */
8331 if (bit (insn1, 8)) /* PC is in register list. */
8332 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8334 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8336 case 15: /* If-Then, and hints */
8337 if (bits (insn1, 0, 3))
8338 /* If-Then makes up to four following instructions conditional.
8339 IT instruction itself is not conditional, so handle it as a
8340 common unmodified instruction. */
8341 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8344 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8352 if (op_bit_10_11 < 2) /* Store multiple registers */
8353 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8354 else /* Load multiple registers */
8355 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8357 case 13: /* Conditional branch and supervisor call */
8358 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8359 err = thumb_copy_b (gdbarch, insn1, dsc);
8361 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8363 case 14: /* Unconditional branch */
8364 err = thumb_copy_b (gdbarch, insn1, dsc);
8371 internal_error (__FILE__, __LINE__,
8372 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8376 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8377 uint16_t insn1, uint16_t insn2,
8378 struct regcache *regs,
8379 struct displaced_step_closure *dsc)
8381 int rt = bits (insn2, 12, 15);
8382 int rn = bits (insn1, 0, 3);
8383 int op1 = bits (insn1, 7, 8);
8386 switch (bits (insn1, 5, 6))
8388 case 0: /* Load byte and memory hints */
8389 if (rt == 0xf) /* PLD/PLI */
8392 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8393 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8395 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8400 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8401 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8404 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8405 "ldrb{reg, immediate}/ldrbt",
8410 case 1: /* Load halfword and memory hints. */
8411 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8412 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8413 "pld/unalloc memhint", dsc);
8417 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8420 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8424 case 2: /* Load word */
8426 int insn2_bit_8_11 = bits (insn2, 8, 11);
8429 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8430 else if (op1 == 0x1) /* Encoding T3 */
8431 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8433 else /* op1 == 0x0 */
8435 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8436 /* LDR (immediate) */
8437 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8438 dsc, bit (insn2, 8), 1);
8439 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8440 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8443 /* LDR (register) */
8444 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8450 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8457 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8458 uint16_t insn2, struct regcache *regs,
8459 struct displaced_step_closure *dsc)
8462 unsigned short op = bit (insn2, 15);
8463 unsigned int op1 = bits (insn1, 11, 12);
8469 switch (bits (insn1, 9, 10))
8474 /* Load/store {dual, execlusive}, table branch. */
8475 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8476 && bits (insn2, 5, 7) == 0)
8477 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8480 /* PC is not allowed to use in load/store {dual, exclusive}
8482 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8483 "load/store dual/ex", dsc);
8485 else /* load/store multiple */
8487 switch (bits (insn1, 7, 8))
8489 case 0: case 3: /* SRS, RFE */
8490 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8493 case 1: case 2: /* LDM/STM/PUSH/POP */
8494 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8501 /* Data-processing (shift register). */
8502 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8505 default: /* Coprocessor instructions. */
8506 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8511 case 2: /* op1 = 2 */
8512 if (op) /* Branch and misc control. */
8514 if (bit (insn2, 14) /* BLX/BL */
8515 || bit (insn2, 12) /* Unconditional branch */
8516 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8517 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8519 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8524 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8526 int op = bits (insn1, 4, 8);
8527 int rn = bits (insn1, 0, 3);
8528 if ((op == 0 || op == 0xa) && rn == 0xf)
8529 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8532 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8535 else /* Data processing (modified immeidate) */
8536 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8540 case 3: /* op1 = 3 */
8541 switch (bits (insn1, 9, 10))
8545 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8547 else /* NEON Load/Store and Store single data item */
8548 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8549 "neon elt/struct load/store",
8552 case 1: /* op1 = 3, bits (9, 10) == 1 */
8553 switch (bits (insn1, 7, 8))
8555 case 0: case 1: /* Data processing (register) */
8556 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8559 case 2: /* Multiply and absolute difference */
8560 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8561 "mul/mua/diff", dsc);
8563 case 3: /* Long multiply and divide */
8564 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8569 default: /* Coprocessor instructions */
8570 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8579 internal_error (__FILE__, __LINE__,
8580 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8585 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8586 CORE_ADDR to, struct regcache *regs,
8587 struct displaced_step_closure *dsc)
8589 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8591 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8593 if (debug_displaced)
8594 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8595 "at %.8lx\n", insn1, (unsigned long) from);
8598 dsc->insn_size = thumb_insn_size (insn1);
8599 if (thumb_insn_size (insn1) == 4)
8602 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8603 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8606 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8610 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8611 CORE_ADDR to, struct regcache *regs,
8612 struct displaced_step_closure *dsc)
8615 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8618 /* Most displaced instructions use a 1-instruction scratch space, so set this
8619 here and override below if/when necessary. */
8621 dsc->insn_addr = from;
8622 dsc->scratch_base = to;
8623 dsc->cleanup = NULL;
8624 dsc->wrote_to_pc = 0;
8626 if (!displaced_in_arm_mode (regs))
8627 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8631 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8632 if (debug_displaced)
8633 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8634 "at %.8lx\n", (unsigned long) insn,
8635 (unsigned long) from);
8637 if ((insn & 0xf0000000) == 0xf0000000)
8638 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8639 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8641 case 0x0: case 0x1: case 0x2: case 0x3:
8642 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8645 case 0x4: case 0x5: case 0x6:
8646 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8650 err = arm_decode_media (gdbarch, insn, dsc);
8653 case 0x8: case 0x9: case 0xa: case 0xb:
8654 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8657 case 0xc: case 0xd: case 0xe: case 0xf:
8658 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8663 internal_error (__FILE__, __LINE__,
8664 _("arm_process_displaced_insn: Instruction decode error"));
8667 /* Actually set up the scratch space for a displaced instruction. */
8670 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8671 CORE_ADDR to, struct displaced_step_closure *dsc)
8673 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8674 unsigned int i, len, offset;
8675 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8676 int size = dsc->is_thumb? 2 : 4;
8677 const gdb_byte *bkp_insn;
8680 /* Poke modified instruction(s). */
8681 for (i = 0; i < dsc->numinsns; i++)
8683 if (debug_displaced)
8685 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8687 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8690 fprintf_unfiltered (gdb_stdlog, "%.4x",
8691 (unsigned short)dsc->modinsn[i]);
8693 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8694 (unsigned long) to + offset);
8697 write_memory_unsigned_integer (to + offset, size,
8698 byte_order_for_code,
8703 /* Choose the correct breakpoint instruction. */
8706 bkp_insn = tdep->thumb_breakpoint;
8707 len = tdep->thumb_breakpoint_size;
8711 bkp_insn = tdep->arm_breakpoint;
8712 len = tdep->arm_breakpoint_size;
8715 /* Put breakpoint afterwards. */
8716 write_memory (to + offset, bkp_insn, len);
8718 if (debug_displaced)
8719 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8720 paddress (gdbarch, from), paddress (gdbarch, to));
8723 /* Entry point for copying an instruction into scratch space for displaced
8726 struct displaced_step_closure *
8727 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8728 CORE_ADDR from, CORE_ADDR to,
8729 struct regcache *regs)
8731 struct displaced_step_closure *dsc
8732 = xmalloc (sizeof (struct displaced_step_closure));
8733 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8734 arm_displaced_init_closure (gdbarch, from, to, dsc);
8739 /* Entry point for cleaning things up after a displaced instruction has been
8743 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8744 struct displaced_step_closure *dsc,
8745 CORE_ADDR from, CORE_ADDR to,
8746 struct regcache *regs)
8749 dsc->cleanup (gdbarch, regs, dsc);
8751 if (!dsc->wrote_to_pc)
8752 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8753 dsc->insn_addr + dsc->insn_size);
8757 #include "bfd-in2.h"
8758 #include "libcoff.h"
8761 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8763 struct gdbarch *gdbarch = info->application_data;
8765 if (arm_pc_is_thumb (gdbarch, memaddr))
8767 static asymbol *asym;
8768 static combined_entry_type ce;
8769 static struct coff_symbol_struct csym;
8770 static struct bfd fake_bfd;
8771 static bfd_target fake_target;
8773 if (csym.native == NULL)
8775 /* Create a fake symbol vector containing a Thumb symbol.
8776 This is solely so that the code in print_insn_little_arm()
8777 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8778 the presence of a Thumb symbol and switch to decoding
8779 Thumb instructions. */
8781 fake_target.flavour = bfd_target_coff_flavour;
8782 fake_bfd.xvec = &fake_target;
8783 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8785 csym.symbol.the_bfd = &fake_bfd;
8786 csym.symbol.name = "fake";
8787 asym = (asymbol *) & csym;
8790 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8791 info->symbols = &asym;
8794 info->symbols = NULL;
8796 if (info->endian == BFD_ENDIAN_BIG)
8797 return print_insn_big_arm (memaddr, info);
8799 return print_insn_little_arm (memaddr, info);
8802 /* The following define instruction sequences that will cause ARM
8803 cpu's to take an undefined instruction trap. These are used to
8804 signal a breakpoint to GDB.
8806 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8807 modes. A different instruction is required for each mode. The ARM
8808 cpu's can also be big or little endian. Thus four different
8809 instructions are needed to support all cases.
8811 Note: ARMv4 defines several new instructions that will take the
8812 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8813 not in fact add the new instructions. The new undefined
8814 instructions in ARMv4 are all instructions that had no defined
8815 behaviour in earlier chips. There is no guarantee that they will
8816 raise an exception, but may be treated as NOP's. In practice, it
8817 may only safe to rely on instructions matching:
8819 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8820 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8821 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8823 Even this may only true if the condition predicate is true. The
8824 following use a condition predicate of ALWAYS so it is always TRUE.
8826 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8827 and NetBSD all use a software interrupt rather than an undefined
8828 instruction to force a trap. This can be handled by by the
8829 abi-specific code during establishment of the gdbarch vector. */
8831 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8832 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8833 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8834 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8836 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8837 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8838 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8839 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8841 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8842 the program counter value to determine whether a 16-bit or 32-bit
8843 breakpoint should be used. It returns a pointer to a string of
8844 bytes that encode a breakpoint instruction, stores the length of
8845 the string to *lenptr, and adjusts the program counter (if
8846 necessary) to point to the actual memory location where the
8847 breakpoint should be inserted. */
8849 static const unsigned char *
8850 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8852 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8853 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8855 if (arm_pc_is_thumb (gdbarch, *pcptr))
8857 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8859 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8860 check whether we are replacing a 32-bit instruction. */
8861 if (tdep->thumb2_breakpoint != NULL)
8864 if (target_read_memory (*pcptr, buf, 2) == 0)
8866 unsigned short inst1;
8867 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8868 if (thumb_insn_size (inst1) == 4)
8870 *lenptr = tdep->thumb2_breakpoint_size;
8871 return tdep->thumb2_breakpoint;
8876 *lenptr = tdep->thumb_breakpoint_size;
8877 return tdep->thumb_breakpoint;
8881 *lenptr = tdep->arm_breakpoint_size;
8882 return tdep->arm_breakpoint;
8887 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8890 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8892 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8893 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8894 that this is not confused with a 32-bit ARM breakpoint. */
8898 /* Extract from an array REGBUF containing the (raw) register state a
8899 function return value of type TYPE, and copy that, in virtual
8900 format, into VALBUF. */
8903 arm_extract_return_value (struct type *type, struct regcache *regs,
8906 struct gdbarch *gdbarch = get_regcache_arch (regs);
8907 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8909 if (TYPE_CODE_FLT == TYPE_CODE (type))
8911 switch (gdbarch_tdep (gdbarch)->fp_model)
8915 /* The value is in register F0 in internal format. We need to
8916 extract the raw value and then convert it to the desired
8918 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8920 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8921 convert_from_extended (floatformat_from_type (type), tmpbuf,
8922 valbuf, gdbarch_byte_order (gdbarch));
8926 case ARM_FLOAT_SOFT_FPA:
8927 case ARM_FLOAT_SOFT_VFP:
8928 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8929 not using the VFP ABI code. */
8931 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8932 if (TYPE_LENGTH (type) > 4)
8933 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8934 valbuf + INT_REGISTER_SIZE);
8938 internal_error (__FILE__, __LINE__,
8939 _("arm_extract_return_value: "
8940 "Floating point model not supported"));
8944 else if (TYPE_CODE (type) == TYPE_CODE_INT
8945 || TYPE_CODE (type) == TYPE_CODE_CHAR
8946 || TYPE_CODE (type) == TYPE_CODE_BOOL
8947 || TYPE_CODE (type) == TYPE_CODE_PTR
8948 || TYPE_CODE (type) == TYPE_CODE_REF
8949 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8951 /* If the type is a plain integer, then the access is
8952 straight-forward. Otherwise we have to play around a bit
8954 int len = TYPE_LENGTH (type);
8955 int regno = ARM_A1_REGNUM;
8960 /* By using store_unsigned_integer we avoid having to do
8961 anything special for small big-endian values. */
8962 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8963 store_unsigned_integer (valbuf,
8964 (len > INT_REGISTER_SIZE
8965 ? INT_REGISTER_SIZE : len),
8967 len -= INT_REGISTER_SIZE;
8968 valbuf += INT_REGISTER_SIZE;
8973 /* For a structure or union the behaviour is as if the value had
8974 been stored to word-aligned memory and then loaded into
8975 registers with 32-bit load instruction(s). */
8976 int len = TYPE_LENGTH (type);
8977 int regno = ARM_A1_REGNUM;
8978 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8982 regcache_cooked_read (regs, regno++, tmpbuf);
8983 memcpy (valbuf, tmpbuf,
8984 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8985 len -= INT_REGISTER_SIZE;
8986 valbuf += INT_REGISTER_SIZE;
8992 /* Will a function return an aggregate type in memory or in a
8993 register? Return 0 if an aggregate type can be returned in a
8994 register, 1 if it must be returned in memory. */
8997 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9000 enum type_code code;
9002 CHECK_TYPEDEF (type);
9004 /* In the ARM ABI, "integer" like aggregate types are returned in
9005 registers. For an aggregate type to be integer like, its size
9006 must be less than or equal to INT_REGISTER_SIZE and the
9007 offset of each addressable subfield must be zero. Note that bit
9008 fields are not addressable, and all addressable subfields of
9009 unions always start at offset zero.
9011 This function is based on the behaviour of GCC 2.95.1.
9012 See: gcc/arm.c: arm_return_in_memory() for details.
9014 Note: All versions of GCC before GCC 2.95.2 do not set up the
9015 parameters correctly for a function returning the following
9016 structure: struct { float f;}; This should be returned in memory,
9017 not a register. Richard Earnshaw sent me a patch, but I do not
9018 know of any way to detect if a function like the above has been
9019 compiled with the correct calling convention. */
9021 /* All aggregate types that won't fit in a register must be returned
9023 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9028 /* The AAPCS says all aggregates not larger than a word are returned
9030 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9033 /* The only aggregate types that can be returned in a register are
9034 structs and unions. Arrays must be returned in memory. */
9035 code = TYPE_CODE (type);
9036 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9041 /* Assume all other aggregate types can be returned in a register.
9042 Run a check for structures, unions and arrays. */
9045 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9048 /* Need to check if this struct/union is "integer" like. For
9049 this to be true, its size must be less than or equal to
9050 INT_REGISTER_SIZE and the offset of each addressable
9051 subfield must be zero. Note that bit fields are not
9052 addressable, and unions always start at offset zero. If any
9053 of the subfields is a floating point type, the struct/union
9054 cannot be an integer type. */
9056 /* For each field in the object, check:
9057 1) Is it FP? --> yes, nRc = 1;
9058 2) Is it addressable (bitpos != 0) and
9059 not packed (bitsize == 0)?
9063 for (i = 0; i < TYPE_NFIELDS (type); i++)
9065 enum type_code field_type_code;
9066 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9069 /* Is it a floating point type field? */
9070 if (field_type_code == TYPE_CODE_FLT)
9076 /* If bitpos != 0, then we have to care about it. */
9077 if (TYPE_FIELD_BITPOS (type, i) != 0)
9079 /* Bitfields are not addressable. If the field bitsize is
9080 zero, then the field is not packed. Hence it cannot be
9081 a bitfield or any other packed type. */
9082 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9094 /* Write into appropriate registers a function return value of type
9095 TYPE, given in virtual format. */
9098 arm_store_return_value (struct type *type, struct regcache *regs,
9099 const gdb_byte *valbuf)
9101 struct gdbarch *gdbarch = get_regcache_arch (regs);
9102 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9104 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9106 gdb_byte buf[MAX_REGISTER_SIZE];
9108 switch (gdbarch_tdep (gdbarch)->fp_model)
9112 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9113 gdbarch_byte_order (gdbarch));
9114 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9117 case ARM_FLOAT_SOFT_FPA:
9118 case ARM_FLOAT_SOFT_VFP:
9119 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9120 not using the VFP ABI code. */
9122 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9123 if (TYPE_LENGTH (type) > 4)
9124 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9125 valbuf + INT_REGISTER_SIZE);
9129 internal_error (__FILE__, __LINE__,
9130 _("arm_store_return_value: Floating "
9131 "point model not supported"));
9135 else if (TYPE_CODE (type) == TYPE_CODE_INT
9136 || TYPE_CODE (type) == TYPE_CODE_CHAR
9137 || TYPE_CODE (type) == TYPE_CODE_BOOL
9138 || TYPE_CODE (type) == TYPE_CODE_PTR
9139 || TYPE_CODE (type) == TYPE_CODE_REF
9140 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9142 if (TYPE_LENGTH (type) <= 4)
9144 /* Values of one word or less are zero/sign-extended and
9146 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9147 LONGEST val = unpack_long (type, valbuf);
9149 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9150 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9154 /* Integral values greater than one word are stored in consecutive
9155 registers starting with r0. This will always be a multiple of
9156 the regiser size. */
9157 int len = TYPE_LENGTH (type);
9158 int regno = ARM_A1_REGNUM;
9162 regcache_cooked_write (regs, regno++, valbuf);
9163 len -= INT_REGISTER_SIZE;
9164 valbuf += INT_REGISTER_SIZE;
9170 /* For a structure or union the behaviour is as if the value had
9171 been stored to word-aligned memory and then loaded into
9172 registers with 32-bit load instruction(s). */
9173 int len = TYPE_LENGTH (type);
9174 int regno = ARM_A1_REGNUM;
9175 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9179 memcpy (tmpbuf, valbuf,
9180 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9181 regcache_cooked_write (regs, regno++, tmpbuf);
9182 len -= INT_REGISTER_SIZE;
9183 valbuf += INT_REGISTER_SIZE;
9189 /* Handle function return values. */
9191 static enum return_value_convention
9192 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9193 struct type *valtype, struct regcache *regcache,
9194 gdb_byte *readbuf, const gdb_byte *writebuf)
9196 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9197 struct type *func_type = function ? value_type (function) : NULL;
9198 enum arm_vfp_cprc_base_type vfp_base_type;
9201 if (arm_vfp_abi_for_function (gdbarch, func_type)
9202 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9204 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9205 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9207 for (i = 0; i < vfp_base_count; i++)
9209 if (reg_char == 'q')
9212 arm_neon_quad_write (gdbarch, regcache, i,
9213 writebuf + i * unit_length);
9216 arm_neon_quad_read (gdbarch, regcache, i,
9217 readbuf + i * unit_length);
9224 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9225 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9228 regcache_cooked_write (regcache, regnum,
9229 writebuf + i * unit_length);
9231 regcache_cooked_read (regcache, regnum,
9232 readbuf + i * unit_length);
9235 return RETURN_VALUE_REGISTER_CONVENTION;
9238 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9239 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9240 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9242 if (tdep->struct_return == pcc_struct_return
9243 || arm_return_in_memory (gdbarch, valtype))
9244 return RETURN_VALUE_STRUCT_CONVENTION;
9247 /* AAPCS returns complex types longer than a register in memory. */
9248 if (tdep->arm_abi != ARM_ABI_APCS
9249 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9250 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9251 return RETURN_VALUE_STRUCT_CONVENTION;
9254 arm_store_return_value (valtype, regcache, writebuf);
9257 arm_extract_return_value (valtype, regcache, readbuf);
9259 return RETURN_VALUE_REGISTER_CONVENTION;
9264 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9266 struct gdbarch *gdbarch = get_frame_arch (frame);
9267 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9268 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9270 gdb_byte buf[INT_REGISTER_SIZE];
9272 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9274 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9278 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9282 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9283 return the target PC. Otherwise return 0. */
9286 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9290 CORE_ADDR start_addr;
9292 /* Find the starting address and name of the function containing the PC. */
9293 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9295 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9297 start_addr = arm_skip_bx_reg (frame, pc);
9298 if (start_addr != 0)
9304 /* If PC is in a Thumb call or return stub, return the address of the
9305 target PC, which is in a register. The thunk functions are called
9306 _call_via_xx, where x is the register name. The possible names
9307 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9308 functions, named __ARM_call_via_r[0-7]. */
9309 if (strncmp (name, "_call_via_", 10) == 0
9310 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9312 /* Use the name suffix to determine which register contains the
9314 static char *table[15] =
9315 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9316 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9319 int offset = strlen (name) - 2;
9321 for (regno = 0; regno <= 14; regno++)
9322 if (strcmp (&name[offset], table[regno]) == 0)
9323 return get_frame_register_unsigned (frame, regno);
9326 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9327 non-interworking calls to foo. We could decode the stubs
9328 to find the target but it's easier to use the symbol table. */
9329 namelen = strlen (name);
9330 if (name[0] == '_' && name[1] == '_'
9331 && ((namelen > 2 + strlen ("_from_thumb")
9332 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9333 strlen ("_from_thumb")) == 0)
9334 || (namelen > 2 + strlen ("_from_arm")
9335 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9336 strlen ("_from_arm")) == 0)))
9339 int target_len = namelen - 2;
9340 struct bound_minimal_symbol minsym;
9341 struct objfile *objfile;
9342 struct obj_section *sec;
9344 if (name[namelen - 1] == 'b')
9345 target_len -= strlen ("_from_thumb");
9347 target_len -= strlen ("_from_arm");
9349 target_name = alloca (target_len + 1);
9350 memcpy (target_name, name + 2, target_len);
9351 target_name[target_len] = '\0';
9353 sec = find_pc_section (pc);
9354 objfile = (sec == NULL) ? NULL : sec->objfile;
9355 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9356 if (minsym.minsym != NULL)
9357 return BMSYMBOL_VALUE_ADDRESS (minsym);
9362 return 0; /* not a stub */
9366 set_arm_command (char *args, int from_tty)
9368 printf_unfiltered (_("\
9369 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9370 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9374 show_arm_command (char *args, int from_tty)
9376 cmd_show_list (showarmcmdlist, from_tty, "");
9380 arm_update_current_architecture (void)
9382 struct gdbarch_info info;
9384 /* If the current architecture is not ARM, we have nothing to do. */
9385 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9388 /* Update the architecture. */
9389 gdbarch_info_init (&info);
9391 if (!gdbarch_update_p (info))
9392 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9396 set_fp_model_sfunc (char *args, int from_tty,
9397 struct cmd_list_element *c)
9399 enum arm_float_model fp_model;
9401 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9402 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9404 arm_fp_model = fp_model;
9408 if (fp_model == ARM_FLOAT_LAST)
9409 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9412 arm_update_current_architecture ();
9416 show_fp_model (struct ui_file *file, int from_tty,
9417 struct cmd_list_element *c, const char *value)
9419 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9421 if (arm_fp_model == ARM_FLOAT_AUTO
9422 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9423 fprintf_filtered (file, _("\
9424 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9425 fp_model_strings[tdep->fp_model]);
9427 fprintf_filtered (file, _("\
9428 The current ARM floating point model is \"%s\".\n"),
9429 fp_model_strings[arm_fp_model]);
9433 arm_set_abi (char *args, int from_tty,
9434 struct cmd_list_element *c)
9436 enum arm_abi_kind arm_abi;
9438 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9439 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9441 arm_abi_global = arm_abi;
9445 if (arm_abi == ARM_ABI_LAST)
9446 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9449 arm_update_current_architecture ();
9453 arm_show_abi (struct ui_file *file, int from_tty,
9454 struct cmd_list_element *c, const char *value)
9456 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9458 if (arm_abi_global == ARM_ABI_AUTO
9459 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9460 fprintf_filtered (file, _("\
9461 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9462 arm_abi_strings[tdep->arm_abi]);
9464 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9469 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9470 struct cmd_list_element *c, const char *value)
9472 fprintf_filtered (file,
9473 _("The current execution mode assumed "
9474 "(when symbols are unavailable) is \"%s\".\n"),
9475 arm_fallback_mode_string);
9479 arm_show_force_mode (struct ui_file *file, int from_tty,
9480 struct cmd_list_element *c, const char *value)
9482 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9484 fprintf_filtered (file,
9485 _("The current execution mode assumed "
9486 "(even when symbols are available) is \"%s\".\n"),
9487 arm_force_mode_string);
9490 /* If the user changes the register disassembly style used for info
9491 register and other commands, we have to also switch the style used
9492 in opcodes for disassembly output. This function is run in the "set
9493 arm disassembly" command, and does that. */
9496 set_disassembly_style_sfunc (char *args, int from_tty,
9497 struct cmd_list_element *c)
9499 set_disassembly_style ();
9502 /* Return the ARM register name corresponding to register I. */
9504 arm_register_name (struct gdbarch *gdbarch, int i)
9506 const int num_regs = gdbarch_num_regs (gdbarch);
9508 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9509 && i >= num_regs && i < num_regs + 32)
9511 static const char *const vfp_pseudo_names[] = {
9512 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9513 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9514 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9515 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9518 return vfp_pseudo_names[i - num_regs];
9521 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9522 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9524 static const char *const neon_pseudo_names[] = {
9525 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9526 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9529 return neon_pseudo_names[i - num_regs - 32];
9532 if (i >= ARRAY_SIZE (arm_register_names))
9533 /* These registers are only supported on targets which supply
9534 an XML description. */
9537 return arm_register_names[i];
9541 set_disassembly_style (void)
9545 /* Find the style that the user wants. */
9546 for (current = 0; current < num_disassembly_options; current++)
9547 if (disassembly_style == valid_disassembly_styles[current])
9549 gdb_assert (current < num_disassembly_options);
9551 /* Synchronize the disassembler. */
9552 set_arm_regname_option (current);
9555 /* Test whether the coff symbol specific value corresponds to a Thumb
9559 coff_sym_is_thumb (int val)
9561 return (val == C_THUMBEXT
9562 || val == C_THUMBSTAT
9563 || val == C_THUMBEXTFUNC
9564 || val == C_THUMBSTATFUNC
9565 || val == C_THUMBLABEL);
9568 /* arm_coff_make_msymbol_special()
9569 arm_elf_make_msymbol_special()
9571 These functions test whether the COFF or ELF symbol corresponds to
9572 an address in thumb code, and set a "special" bit in a minimal
9573 symbol to indicate that it does. */
9576 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9578 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9579 == ST_BRANCH_TO_THUMB)
9580 MSYMBOL_SET_SPECIAL (msym);
9584 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9586 if (coff_sym_is_thumb (val))
9587 MSYMBOL_SET_SPECIAL (msym);
9591 arm_objfile_data_free (struct objfile *objfile, void *arg)
9593 struct arm_per_objfile *data = arg;
9596 for (i = 0; i < objfile->obfd->section_count; i++)
9597 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9601 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9604 const char *name = bfd_asymbol_name (sym);
9605 struct arm_per_objfile *data;
9606 VEC(arm_mapping_symbol_s) **map_p;
9607 struct arm_mapping_symbol new_map_sym;
9609 gdb_assert (name[0] == '$');
9610 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9613 data = objfile_data (objfile, arm_objfile_data_key);
9616 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9617 struct arm_per_objfile);
9618 set_objfile_data (objfile, arm_objfile_data_key, data);
9619 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9620 objfile->obfd->section_count,
9621 VEC(arm_mapping_symbol_s) *);
9623 map_p = &data->section_maps[bfd_get_section (sym)->index];
9625 new_map_sym.value = sym->value;
9626 new_map_sym.type = name[1];
9628 /* Assume that most mapping symbols appear in order of increasing
9629 value. If they were randomly distributed, it would be faster to
9630 always push here and then sort at first use. */
9631 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9633 struct arm_mapping_symbol *prev_map_sym;
9635 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9636 if (prev_map_sym->value >= sym->value)
9639 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9640 arm_compare_mapping_symbols);
9641 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9646 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9650 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9652 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9653 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9655 /* If necessary, set the T bit. */
9658 ULONGEST val, t_bit;
9659 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9660 t_bit = arm_psr_thumb_bit (gdbarch);
9661 if (arm_pc_is_thumb (gdbarch, pc))
9662 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9670 /* Read the contents of a NEON quad register, by reading from two
9671 double registers. This is used to implement the quad pseudo
9672 registers, and for argument passing in case the quad registers are
9673 missing; vectors are passed in quad registers when using the VFP
9674 ABI, even if a NEON unit is not present. REGNUM is the index of
9675 the quad register, in [0, 15]. */
9677 static enum register_status
9678 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9679 int regnum, gdb_byte *buf)
9682 gdb_byte reg_buf[8];
9683 int offset, double_regnum;
9684 enum register_status status;
9686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9690 /* d0 is always the least significant half of q0. */
9691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9696 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9697 if (status != REG_VALID)
9699 memcpy (buf + offset, reg_buf, 8);
9701 offset = 8 - offset;
9702 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9703 if (status != REG_VALID)
9705 memcpy (buf + offset, reg_buf, 8);
9710 static enum register_status
9711 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9712 int regnum, gdb_byte *buf)
9714 const int num_regs = gdbarch_num_regs (gdbarch);
9716 gdb_byte reg_buf[8];
9717 int offset, double_regnum;
9719 gdb_assert (regnum >= num_regs);
9722 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9723 /* Quad-precision register. */
9724 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9727 enum register_status status;
9729 /* Single-precision register. */
9730 gdb_assert (regnum < 32);
9732 /* s0 is always the least significant half of d0. */
9733 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9734 offset = (regnum & 1) ? 0 : 4;
9736 offset = (regnum & 1) ? 4 : 0;
9738 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9739 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9742 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9743 if (status == REG_VALID)
9744 memcpy (buf, reg_buf + offset, 4);
9749 /* Store the contents of BUF to a NEON quad register, by writing to
9750 two double registers. This is used to implement the quad pseudo
9751 registers, and for argument passing in case the quad registers are
9752 missing; vectors are passed in quad registers when using the VFP
9753 ABI, even if a NEON unit is not present. REGNUM is the index
9754 of the quad register, in [0, 15]. */
9757 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9758 int regnum, const gdb_byte *buf)
9761 int offset, double_regnum;
9763 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9764 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9767 /* d0 is always the least significant half of q0. */
9768 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9773 regcache_raw_write (regcache, double_regnum, buf + offset);
9774 offset = 8 - offset;
9775 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9779 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9780 int regnum, const gdb_byte *buf)
9782 const int num_regs = gdbarch_num_regs (gdbarch);
9784 gdb_byte reg_buf[8];
9785 int offset, double_regnum;
9787 gdb_assert (regnum >= num_regs);
9790 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9791 /* Quad-precision register. */
9792 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9795 /* Single-precision register. */
9796 gdb_assert (regnum < 32);
9798 /* s0 is always the least significant half of d0. */
9799 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9800 offset = (regnum & 1) ? 0 : 4;
9802 offset = (regnum & 1) ? 4 : 0;
9804 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9805 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9808 regcache_raw_read (regcache, double_regnum, reg_buf);
9809 memcpy (reg_buf + offset, buf, 4);
9810 regcache_raw_write (regcache, double_regnum, reg_buf);
9814 static struct value *
9815 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9817 const int *reg_p = baton;
9818 return value_of_register (*reg_p, frame);
9821 static enum gdb_osabi
9822 arm_elf_osabi_sniffer (bfd *abfd)
9824 unsigned int elfosabi;
9825 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9827 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9829 if (elfosabi == ELFOSABI_ARM)
9830 /* GNU tools use this value. Check note sections in this case,
9832 bfd_map_over_sections (abfd,
9833 generic_elf_osabi_sniff_abi_tag_sections,
9836 /* Anything else will be handled by the generic ELF sniffer. */
9841 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9842 struct reggroup *group)
9844 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9845 this, FPS register belongs to save_regroup, restore_reggroup, and
9846 all_reggroup, of course. */
9847 if (regnum == ARM_FPS_REGNUM)
9848 return (group == float_reggroup
9849 || group == save_reggroup
9850 || group == restore_reggroup
9851 || group == all_reggroup);
9853 return default_register_reggroup_p (gdbarch, regnum, group);
9857 /* For backward-compatibility we allow two 'g' packet lengths with
9858 the remote protocol depending on whether FPA registers are
9859 supplied. M-profile targets do not have FPA registers, but some
9860 stubs already exist in the wild which use a 'g' packet which
9861 supplies them albeit with dummy values. The packet format which
9862 includes FPA registers should be considered deprecated for
9863 M-profile targets. */
9866 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9868 if (gdbarch_tdep (gdbarch)->is_m)
9870 /* If we know from the executable this is an M-profile target,
9871 cater for remote targets whose register set layout is the
9872 same as the FPA layout. */
9873 register_remote_g_packet_guess (gdbarch,
9874 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9875 (16 * INT_REGISTER_SIZE)
9876 + (8 * FP_REGISTER_SIZE)
9877 + (2 * INT_REGISTER_SIZE),
9878 tdesc_arm_with_m_fpa_layout);
9880 /* The regular M-profile layout. */
9881 register_remote_g_packet_guess (gdbarch,
9882 /* r0-r12,sp,lr,pc; xpsr */
9883 (16 * INT_REGISTER_SIZE)
9884 + INT_REGISTER_SIZE,
9887 /* M-profile plus M4F VFP. */
9888 register_remote_g_packet_guess (gdbarch,
9889 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9890 (16 * INT_REGISTER_SIZE)
9891 + (16 * VFP_REGISTER_SIZE)
9892 + (2 * INT_REGISTER_SIZE),
9893 tdesc_arm_with_m_vfp_d16);
9896 /* Otherwise we don't have a useful guess. */
9900 /* Initialize the current architecture based on INFO. If possible,
9901 re-use an architecture from ARCHES, which is a list of
9902 architectures already created during this debugging session.
9904 Called e.g. at program startup, when reading a core file, and when
9905 reading a binary file. */
9907 static struct gdbarch *
9908 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9910 struct gdbarch_tdep *tdep;
9911 struct gdbarch *gdbarch;
9912 struct gdbarch_list *best_arch;
9913 enum arm_abi_kind arm_abi = arm_abi_global;
9914 enum arm_float_model fp_model = arm_fp_model;
9915 struct tdesc_arch_data *tdesc_data = NULL;
9917 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9919 int have_fpa_registers = 1;
9920 const struct target_desc *tdesc = info.target_desc;
9922 /* If we have an object to base this architecture on, try to determine
9925 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9927 int ei_osabi, e_flags;
9929 switch (bfd_get_flavour (info.abfd))
9931 case bfd_target_aout_flavour:
9932 /* Assume it's an old APCS-style ABI. */
9933 arm_abi = ARM_ABI_APCS;
9936 case bfd_target_coff_flavour:
9937 /* Assume it's an old APCS-style ABI. */
9939 arm_abi = ARM_ABI_APCS;
9942 case bfd_target_elf_flavour:
9943 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9944 e_flags = elf_elfheader (info.abfd)->e_flags;
9946 if (ei_osabi == ELFOSABI_ARM)
9948 /* GNU tools used to use this value, but do not for EABI
9949 objects. There's nowhere to tag an EABI version
9950 anyway, so assume APCS. */
9951 arm_abi = ARM_ABI_APCS;
9953 else if (ei_osabi == ELFOSABI_NONE)
9955 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9956 int attr_arch, attr_profile;
9960 case EF_ARM_EABI_UNKNOWN:
9961 /* Assume GNU tools. */
9962 arm_abi = ARM_ABI_APCS;
9965 case EF_ARM_EABI_VER4:
9966 case EF_ARM_EABI_VER5:
9967 arm_abi = ARM_ABI_AAPCS;
9968 /* EABI binaries default to VFP float ordering.
9969 They may also contain build attributes that can
9970 be used to identify if the VFP argument-passing
9972 if (fp_model == ARM_FLOAT_AUTO)
9975 switch (bfd_elf_get_obj_attr_int (info.abfd,
9980 /* "The user intended FP parameter/result
9981 passing to conform to AAPCS, base
9983 fp_model = ARM_FLOAT_SOFT_VFP;
9986 /* "The user intended FP parameter/result
9987 passing to conform to AAPCS, VFP
9989 fp_model = ARM_FLOAT_VFP;
9992 /* "The user intended FP parameter/result
9993 passing to conform to tool chain-specific
9994 conventions" - we don't know any such
9995 conventions, so leave it as "auto". */
9998 /* Attribute value not mentioned in the
9999 October 2008 ABI, so leave it as
10004 fp_model = ARM_FLOAT_SOFT_VFP;
10010 /* Leave it as "auto". */
10011 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10016 /* Detect M-profile programs. This only works if the
10017 executable file includes build attributes; GCC does
10018 copy them to the executable, but e.g. RealView does
10020 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10022 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10024 Tag_CPU_arch_profile);
10025 /* GCC specifies the profile for v6-M; RealView only
10026 specifies the profile for architectures starting with
10027 V7 (as opposed to architectures with a tag
10028 numerically greater than TAG_CPU_ARCH_V7). */
10029 if (!tdesc_has_registers (tdesc)
10030 && (attr_arch == TAG_CPU_ARCH_V6_M
10031 || attr_arch == TAG_CPU_ARCH_V6S_M
10032 || attr_profile == 'M'))
10037 if (fp_model == ARM_FLOAT_AUTO)
10039 int e_flags = elf_elfheader (info.abfd)->e_flags;
10041 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10044 /* Leave it as "auto". Strictly speaking this case
10045 means FPA, but almost nobody uses that now, and
10046 many toolchains fail to set the appropriate bits
10047 for the floating-point model they use. */
10049 case EF_ARM_SOFT_FLOAT:
10050 fp_model = ARM_FLOAT_SOFT_FPA;
10052 case EF_ARM_VFP_FLOAT:
10053 fp_model = ARM_FLOAT_VFP;
10055 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10056 fp_model = ARM_FLOAT_SOFT_VFP;
10061 if (e_flags & EF_ARM_BE8)
10062 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10067 /* Leave it as "auto". */
10072 /* Check any target description for validity. */
10073 if (tdesc_has_registers (tdesc))
10075 /* For most registers we require GDB's default names; but also allow
10076 the numeric names for sp / lr / pc, as a convenience. */
10077 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10078 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10079 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10081 const struct tdesc_feature *feature;
10084 feature = tdesc_find_feature (tdesc,
10085 "org.gnu.gdb.arm.core");
10086 if (feature == NULL)
10088 feature = tdesc_find_feature (tdesc,
10089 "org.gnu.gdb.arm.m-profile");
10090 if (feature == NULL)
10096 tdesc_data = tdesc_data_alloc ();
10099 for (i = 0; i < ARM_SP_REGNUM; i++)
10100 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10101 arm_register_names[i]);
10102 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10105 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10108 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10112 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10113 ARM_PS_REGNUM, "xpsr");
10115 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10116 ARM_PS_REGNUM, "cpsr");
10120 tdesc_data_cleanup (tdesc_data);
10124 feature = tdesc_find_feature (tdesc,
10125 "org.gnu.gdb.arm.fpa");
10126 if (feature != NULL)
10129 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10130 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10131 arm_register_names[i]);
10134 tdesc_data_cleanup (tdesc_data);
10139 have_fpa_registers = 0;
10141 feature = tdesc_find_feature (tdesc,
10142 "org.gnu.gdb.xscale.iwmmxt");
10143 if (feature != NULL)
10145 static const char *const iwmmxt_names[] = {
10146 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10147 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10148 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10149 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10153 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10155 &= tdesc_numbered_register (feature, tdesc_data, i,
10156 iwmmxt_names[i - ARM_WR0_REGNUM]);
10158 /* Check for the control registers, but do not fail if they
10160 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10161 tdesc_numbered_register (feature, tdesc_data, i,
10162 iwmmxt_names[i - ARM_WR0_REGNUM]);
10164 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10166 &= tdesc_numbered_register (feature, tdesc_data, i,
10167 iwmmxt_names[i - ARM_WR0_REGNUM]);
10171 tdesc_data_cleanup (tdesc_data);
10176 /* If we have a VFP unit, check whether the single precision registers
10177 are present. If not, then we will synthesize them as pseudo
10179 feature = tdesc_find_feature (tdesc,
10180 "org.gnu.gdb.arm.vfp");
10181 if (feature != NULL)
10183 static const char *const vfp_double_names[] = {
10184 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10185 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10186 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10187 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10190 /* Require the double precision registers. There must be either
10193 for (i = 0; i < 32; i++)
10195 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10197 vfp_double_names[i]);
10201 if (!valid_p && i == 16)
10204 /* Also require FPSCR. */
10205 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10206 ARM_FPSCR_REGNUM, "fpscr");
10209 tdesc_data_cleanup (tdesc_data);
10213 if (tdesc_unnumbered_register (feature, "s0") == 0)
10214 have_vfp_pseudos = 1;
10216 have_vfp_registers = 1;
10218 /* If we have VFP, also check for NEON. The architecture allows
10219 NEON without VFP (integer vector operations only), but GDB
10220 does not support that. */
10221 feature = tdesc_find_feature (tdesc,
10222 "org.gnu.gdb.arm.neon");
10223 if (feature != NULL)
10225 /* NEON requires 32 double-precision registers. */
10228 tdesc_data_cleanup (tdesc_data);
10232 /* If there are quad registers defined by the stub, use
10233 their type; otherwise (normally) provide them with
10234 the default type. */
10235 if (tdesc_unnumbered_register (feature, "q0") == 0)
10236 have_neon_pseudos = 1;
10243 /* If there is already a candidate, use it. */
10244 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10246 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10248 if (arm_abi != ARM_ABI_AUTO
10249 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10252 if (fp_model != ARM_FLOAT_AUTO
10253 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10256 /* There are various other properties in tdep that we do not
10257 need to check here: those derived from a target description,
10258 since gdbarches with a different target description are
10259 automatically disqualified. */
10261 /* Do check is_m, though, since it might come from the binary. */
10262 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10265 /* Found a match. */
10269 if (best_arch != NULL)
10271 if (tdesc_data != NULL)
10272 tdesc_data_cleanup (tdesc_data);
10273 return best_arch->gdbarch;
10276 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10277 gdbarch = gdbarch_alloc (&info, tdep);
10279 /* Record additional information about the architecture we are defining.
10280 These are gdbarch discriminators, like the OSABI. */
10281 tdep->arm_abi = arm_abi;
10282 tdep->fp_model = fp_model;
10284 tdep->have_fpa_registers = have_fpa_registers;
10285 tdep->have_vfp_registers = have_vfp_registers;
10286 tdep->have_vfp_pseudos = have_vfp_pseudos;
10287 tdep->have_neon_pseudos = have_neon_pseudos;
10288 tdep->have_neon = have_neon;
10290 arm_register_g_packet_guesses (gdbarch);
10293 switch (info.byte_order_for_code)
10295 case BFD_ENDIAN_BIG:
10296 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10297 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10298 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10299 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10303 case BFD_ENDIAN_LITTLE:
10304 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10305 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10306 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10307 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10312 internal_error (__FILE__, __LINE__,
10313 _("arm_gdbarch_init: bad byte order for float format"));
10316 /* On ARM targets char defaults to unsigned. */
10317 set_gdbarch_char_signed (gdbarch, 0);
10319 /* Note: for displaced stepping, this includes the breakpoint, and one word
10320 of additional scratch space. This setting isn't used for anything beside
10321 displaced stepping at present. */
10322 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10324 /* This should be low enough for everything. */
10325 tdep->lowest_pc = 0x20;
10326 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10328 /* The default, for both APCS and AAPCS, is to return small
10329 structures in registers. */
10330 tdep->struct_return = reg_struct_return;
10332 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10333 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10335 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10337 /* Frame handling. */
10338 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10339 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10340 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10342 frame_base_set_default (gdbarch, &arm_normal_base);
10344 /* Address manipulation. */
10345 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10347 /* Advance PC across function entry code. */
10348 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10350 /* Detect whether PC is in function epilogue. */
10351 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10353 /* Skip trampolines. */
10354 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10356 /* The stack grows downward. */
10357 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10359 /* Breakpoint manipulation. */
10360 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10361 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10362 arm_remote_breakpoint_from_pc);
10364 /* Information about registers, etc. */
10365 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10366 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10367 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10368 set_gdbarch_register_type (gdbarch, arm_register_type);
10369 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10371 /* This "info float" is FPA-specific. Use the generic version if we
10372 do not have FPA. */
10373 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10374 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10376 /* Internal <-> external register number maps. */
10377 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10378 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10380 set_gdbarch_register_name (gdbarch, arm_register_name);
10382 /* Returning results. */
10383 set_gdbarch_return_value (gdbarch, arm_return_value);
10386 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10388 /* Minsymbol frobbing. */
10389 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10390 set_gdbarch_coff_make_msymbol_special (gdbarch,
10391 arm_coff_make_msymbol_special);
10392 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10394 /* Thumb-2 IT block support. */
10395 set_gdbarch_adjust_breakpoint_address (gdbarch,
10396 arm_adjust_breakpoint_address);
10398 /* Virtual tables. */
10399 set_gdbarch_vbit_in_delta (gdbarch, 1);
10401 /* Hook in the ABI-specific overrides, if they have been registered. */
10402 gdbarch_init_osabi (info, gdbarch);
10404 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10406 /* Add some default predicates. */
10408 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10409 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10410 dwarf2_append_unwinders (gdbarch);
10411 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10412 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10414 /* Now we have tuned the configuration, set a few final things,
10415 based on what the OS ABI has told us. */
10417 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10418 binaries are always marked. */
10419 if (tdep->arm_abi == ARM_ABI_AUTO)
10420 tdep->arm_abi = ARM_ABI_APCS;
10422 /* Watchpoints are not steppable. */
10423 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10425 /* We used to default to FPA for generic ARM, but almost nobody
10426 uses that now, and we now provide a way for the user to force
10427 the model. So default to the most useful variant. */
10428 if (tdep->fp_model == ARM_FLOAT_AUTO)
10429 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10431 if (tdep->jb_pc >= 0)
10432 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10434 /* Floating point sizes and format. */
10435 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10436 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10438 set_gdbarch_double_format
10439 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10440 set_gdbarch_long_double_format
10441 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10445 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10446 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10449 if (have_vfp_pseudos)
10451 /* NOTE: These are the only pseudo registers used by
10452 the ARM target at the moment. If more are added, a
10453 little more care in numbering will be needed. */
10455 int num_pseudos = 32;
10456 if (have_neon_pseudos)
10458 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10459 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10460 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10465 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10467 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10469 /* Override tdesc_register_type to adjust the types of VFP
10470 registers for NEON. */
10471 set_gdbarch_register_type (gdbarch, arm_register_type);
10474 /* Add standard register aliases. We add aliases even for those
10475 nanes which are used by the current architecture - it's simpler,
10476 and does no harm, since nothing ever lists user registers. */
10477 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10478 user_reg_add (gdbarch, arm_register_aliases[i].name,
10479 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10485 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10487 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10492 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10493 (unsigned long) tdep->lowest_pc);
10496 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10499 _initialize_arm_tdep (void)
10501 struct ui_file *stb;
10503 struct cmd_list_element *new_set, *new_show;
10504 const char *setname;
10505 const char *setdesc;
10506 const char *const *regnames;
10508 static char *helptext;
10509 char regdesc[1024], *rdptr = regdesc;
10510 size_t rest = sizeof (regdesc);
10512 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10514 arm_objfile_data_key
10515 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10517 /* Add ourselves to objfile event chain. */
10518 observer_attach_new_objfile (arm_exidx_new_objfile);
10520 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10522 /* Register an ELF OS ABI sniffer for ARM binaries. */
10523 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10524 bfd_target_elf_flavour,
10525 arm_elf_osabi_sniffer);
10527 /* Initialize the standard target descriptions. */
10528 initialize_tdesc_arm_with_m ();
10529 initialize_tdesc_arm_with_m_fpa_layout ();
10530 initialize_tdesc_arm_with_m_vfp_d16 ();
10531 initialize_tdesc_arm_with_iwmmxt ();
10532 initialize_tdesc_arm_with_vfpv2 ();
10533 initialize_tdesc_arm_with_vfpv3 ();
10534 initialize_tdesc_arm_with_neon ();
10536 /* Get the number of possible sets of register names defined in opcodes. */
10537 num_disassembly_options = get_arm_regname_num_options ();
10539 /* Add root prefix command for all "set arm"/"show arm" commands. */
10540 add_prefix_cmd ("arm", no_class, set_arm_command,
10541 _("Various ARM-specific commands."),
10542 &setarmcmdlist, "set arm ", 0, &setlist);
10544 add_prefix_cmd ("arm", no_class, show_arm_command,
10545 _("Various ARM-specific commands."),
10546 &showarmcmdlist, "show arm ", 0, &showlist);
10548 /* Sync the opcode insn printer with our register viewer. */
10549 parse_arm_disassembler_option ("reg-names-std");
10551 /* Initialize the array that will be passed to
10552 add_setshow_enum_cmd(). */
10553 valid_disassembly_styles
10554 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10555 for (i = 0; i < num_disassembly_options; i++)
10557 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10558 valid_disassembly_styles[i] = setname;
10559 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10562 /* When we find the default names, tell the disassembler to use
10564 if (!strcmp (setname, "std"))
10566 disassembly_style = setname;
10567 set_arm_regname_option (i);
10570 /* Mark the end of valid options. */
10571 valid_disassembly_styles[num_disassembly_options] = NULL;
10573 /* Create the help text. */
10574 stb = mem_fileopen ();
10575 fprintf_unfiltered (stb, "%s%s%s",
10576 _("The valid values are:\n"),
10578 _("The default is \"std\"."));
10579 helptext = ui_file_xstrdup (stb, NULL);
10580 ui_file_delete (stb);
10582 add_setshow_enum_cmd("disassembler", no_class,
10583 valid_disassembly_styles, &disassembly_style,
10584 _("Set the disassembly style."),
10585 _("Show the disassembly style."),
10587 set_disassembly_style_sfunc,
10588 NULL, /* FIXME: i18n: The disassembly style is
10590 &setarmcmdlist, &showarmcmdlist);
10592 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10593 _("Set usage of ARM 32-bit mode."),
10594 _("Show usage of ARM 32-bit mode."),
10595 _("When off, a 26-bit PC will be used."),
10597 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10599 &setarmcmdlist, &showarmcmdlist);
10601 /* Add a command to allow the user to force the FPU model. */
10602 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10603 _("Set the floating point type."),
10604 _("Show the floating point type."),
10605 _("auto - Determine the FP typefrom the OS-ABI.\n\
10606 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10607 fpa - FPA co-processor (GCC compiled).\n\
10608 softvfp - Software FP with pure-endian doubles.\n\
10609 vfp - VFP co-processor."),
10610 set_fp_model_sfunc, show_fp_model,
10611 &setarmcmdlist, &showarmcmdlist);
10613 /* Add a command to allow the user to force the ABI. */
10614 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10616 _("Show the ABI."),
10617 NULL, arm_set_abi, arm_show_abi,
10618 &setarmcmdlist, &showarmcmdlist);
10620 /* Add two commands to allow the user to force the assumed
10622 add_setshow_enum_cmd ("fallback-mode", class_support,
10623 arm_mode_strings, &arm_fallback_mode_string,
10624 _("Set the mode assumed when symbols are unavailable."),
10625 _("Show the mode assumed when symbols are unavailable."),
10626 NULL, NULL, arm_show_fallback_mode,
10627 &setarmcmdlist, &showarmcmdlist);
10628 add_setshow_enum_cmd ("force-mode", class_support,
10629 arm_mode_strings, &arm_force_mode_string,
10630 _("Set the mode assumed even when symbols are available."),
10631 _("Show the mode assumed even when symbols are available."),
10632 NULL, NULL, arm_show_force_mode,
10633 &setarmcmdlist, &showarmcmdlist);
10635 /* Debugging flag. */
10636 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10637 _("Set ARM debugging."),
10638 _("Show ARM debugging."),
10639 _("When on, arm-specific debugging is enabled."),
10641 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10642 &setdebuglist, &showdebuglist);
10645 /* ARM-reversible process record data structures. */
10647 #define ARM_INSN_SIZE_BYTES 4
10648 #define THUMB_INSN_SIZE_BYTES 2
10649 #define THUMB2_INSN_SIZE_BYTES 4
10652 #define INSN_S_L_BIT_NUM 20
10654 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10657 unsigned int reg_len = LENGTH; \
10660 REGS = XNEWVEC (uint32_t, reg_len); \
10661 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10666 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10669 unsigned int mem_len = LENGTH; \
10672 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10673 memcpy(&MEMS->len, &RECORD_BUF[0], \
10674 sizeof(struct arm_mem_r) * LENGTH); \
10679 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10680 #define INSN_RECORDED(ARM_RECORD) \
10681 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10683 /* ARM memory record structure. */
10686 uint32_t len; /* Record length. */
10687 uint32_t addr; /* Memory address. */
10690 /* ARM instruction record contains opcode of current insn
10691 and execution state (before entry to decode_insn()),
10692 contains list of to-be-modified registers and
10693 memory blocks (on return from decode_insn()). */
10695 typedef struct insn_decode_record_t
10697 struct gdbarch *gdbarch;
10698 struct regcache *regcache;
10699 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10700 uint32_t arm_insn; /* Should accommodate thumb. */
10701 uint32_t cond; /* Condition code. */
10702 uint32_t opcode; /* Insn opcode. */
10703 uint32_t decode; /* Insn decode bits. */
10704 uint32_t mem_rec_count; /* No of mem records. */
10705 uint32_t reg_rec_count; /* No of reg records. */
10706 uint32_t *arm_regs; /* Registers to be saved for this record. */
10707 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10708 } insn_decode_record;
10711 /* Checks ARM SBZ and SBO mandatory fields. */
10714 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10716 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10735 enum arm_record_result
10737 ARM_RECORD_SUCCESS = 0,
10738 ARM_RECORD_FAILURE = 1
10745 } arm_record_strx_t;
10756 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10757 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10760 struct regcache *reg_cache = arm_insn_r->regcache;
10761 ULONGEST u_regval[2]= {0};
10763 uint32_t reg_src1 = 0, reg_src2 = 0;
10764 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10765 uint32_t opcode1 = 0;
10767 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10768 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10769 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10772 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10774 /* 1) Handle misc store, immediate offset. */
10775 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10776 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10777 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10778 regcache_raw_read_unsigned (reg_cache, reg_src1,
10780 if (ARM_PC_REGNUM == reg_src1)
10782 /* If R15 was used as Rn, hence current PC+8. */
10783 u_regval[0] = u_regval[0] + 8;
10785 offset_8 = (immed_high << 4) | immed_low;
10786 /* Calculate target store address. */
10787 if (14 == arm_insn_r->opcode)
10789 tgt_mem_addr = u_regval[0] + offset_8;
10793 tgt_mem_addr = u_regval[0] - offset_8;
10795 if (ARM_RECORD_STRH == str_type)
10797 record_buf_mem[0] = 2;
10798 record_buf_mem[1] = tgt_mem_addr;
10799 arm_insn_r->mem_rec_count = 1;
10801 else if (ARM_RECORD_STRD == str_type)
10803 record_buf_mem[0] = 4;
10804 record_buf_mem[1] = tgt_mem_addr;
10805 record_buf_mem[2] = 4;
10806 record_buf_mem[3] = tgt_mem_addr + 4;
10807 arm_insn_r->mem_rec_count = 2;
10810 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10812 /* 2) Store, register offset. */
10814 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10816 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10817 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10818 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10819 if (15 == reg_src2)
10821 /* If R15 was used as Rn, hence current PC+8. */
10822 u_regval[0] = u_regval[0] + 8;
10824 /* Calculate target store address, Rn +/- Rm, register offset. */
10825 if (12 == arm_insn_r->opcode)
10827 tgt_mem_addr = u_regval[0] + u_regval[1];
10831 tgt_mem_addr = u_regval[1] - u_regval[0];
10833 if (ARM_RECORD_STRH == str_type)
10835 record_buf_mem[0] = 2;
10836 record_buf_mem[1] = tgt_mem_addr;
10837 arm_insn_r->mem_rec_count = 1;
10839 else if (ARM_RECORD_STRD == str_type)
10841 record_buf_mem[0] = 4;
10842 record_buf_mem[1] = tgt_mem_addr;
10843 record_buf_mem[2] = 4;
10844 record_buf_mem[3] = tgt_mem_addr + 4;
10845 arm_insn_r->mem_rec_count = 2;
10848 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10849 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10851 /* 3) Store, immediate pre-indexed. */
10852 /* 5) Store, immediate post-indexed. */
10853 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10854 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10855 offset_8 = (immed_high << 4) | immed_low;
10856 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10857 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10858 /* Calculate target store address, Rn +/- Rm, register offset. */
10859 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10861 tgt_mem_addr = u_regval[0] + offset_8;
10865 tgt_mem_addr = u_regval[0] - offset_8;
10867 if (ARM_RECORD_STRH == str_type)
10869 record_buf_mem[0] = 2;
10870 record_buf_mem[1] = tgt_mem_addr;
10871 arm_insn_r->mem_rec_count = 1;
10873 else if (ARM_RECORD_STRD == str_type)
10875 record_buf_mem[0] = 4;
10876 record_buf_mem[1] = tgt_mem_addr;
10877 record_buf_mem[2] = 4;
10878 record_buf_mem[3] = tgt_mem_addr + 4;
10879 arm_insn_r->mem_rec_count = 2;
10881 /* Record Rn also as it changes. */
10882 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10883 arm_insn_r->reg_rec_count = 1;
10885 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10886 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10888 /* 4) Store, register pre-indexed. */
10889 /* 6) Store, register post -indexed. */
10890 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10891 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10892 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10893 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10894 /* Calculate target store address, Rn +/- Rm, register offset. */
10895 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10897 tgt_mem_addr = u_regval[0] + u_regval[1];
10901 tgt_mem_addr = u_regval[1] - u_regval[0];
10903 if (ARM_RECORD_STRH == str_type)
10905 record_buf_mem[0] = 2;
10906 record_buf_mem[1] = tgt_mem_addr;
10907 arm_insn_r->mem_rec_count = 1;
10909 else if (ARM_RECORD_STRD == str_type)
10911 record_buf_mem[0] = 4;
10912 record_buf_mem[1] = tgt_mem_addr;
10913 record_buf_mem[2] = 4;
10914 record_buf_mem[3] = tgt_mem_addr + 4;
10915 arm_insn_r->mem_rec_count = 2;
10917 /* Record Rn also as it changes. */
10918 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10919 arm_insn_r->reg_rec_count = 1;
10924 /* Handling ARM extension space insns. */
10927 arm_record_extension_space (insn_decode_record *arm_insn_r)
10929 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10930 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10931 uint32_t record_buf[8], record_buf_mem[8];
10932 uint32_t reg_src1 = 0;
10933 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10934 struct regcache *reg_cache = arm_insn_r->regcache;
10935 ULONGEST u_regval = 0;
10937 gdb_assert (!INSN_RECORDED(arm_insn_r));
10938 /* Handle unconditional insn extension space. */
10940 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10941 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10942 if (arm_insn_r->cond)
10944 /* PLD has no affect on architectural state, it just affects
10946 if (5 == ((opcode1 & 0xE0) >> 5))
10949 record_buf[0] = ARM_PS_REGNUM;
10950 record_buf[1] = ARM_LR_REGNUM;
10951 arm_insn_r->reg_rec_count = 2;
10953 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10957 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10958 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10961 /* Undefined instruction on ARM V5; need to handle if later
10962 versions define it. */
10965 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10966 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10967 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10969 /* Handle arithmetic insn extension space. */
10970 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10971 && !INSN_RECORDED(arm_insn_r))
10973 /* Handle MLA(S) and MUL(S). */
10974 if (0 <= insn_op1 && 3 >= insn_op1)
10976 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10977 record_buf[1] = ARM_PS_REGNUM;
10978 arm_insn_r->reg_rec_count = 2;
10980 else if (4 <= insn_op1 && 15 >= insn_op1)
10982 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10983 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10984 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10985 record_buf[2] = ARM_PS_REGNUM;
10986 arm_insn_r->reg_rec_count = 3;
10990 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10991 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10992 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10994 /* Handle control insn extension space. */
10996 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10997 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10999 if (!bit (arm_insn_r->arm_insn,25))
11001 if (!bits (arm_insn_r->arm_insn, 4, 7))
11003 if ((0 == insn_op1) || (2 == insn_op1))
11006 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11007 arm_insn_r->reg_rec_count = 1;
11009 else if (1 == insn_op1)
11011 /* CSPR is going to be changed. */
11012 record_buf[0] = ARM_PS_REGNUM;
11013 arm_insn_r->reg_rec_count = 1;
11015 else if (3 == insn_op1)
11017 /* SPSR is going to be changed. */
11018 /* We need to get SPSR value, which is yet to be done. */
11019 printf_unfiltered (_("Process record does not support "
11020 "instruction 0x%0x at address %s.\n"),
11021 arm_insn_r->arm_insn,
11022 paddress (arm_insn_r->gdbarch,
11023 arm_insn_r->this_addr));
11027 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11032 record_buf[0] = ARM_PS_REGNUM;
11033 arm_insn_r->reg_rec_count = 1;
11035 else if (3 == insn_op1)
11038 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11039 arm_insn_r->reg_rec_count = 1;
11042 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11045 record_buf[0] = ARM_PS_REGNUM;
11046 record_buf[1] = ARM_LR_REGNUM;
11047 arm_insn_r->reg_rec_count = 2;
11049 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11051 /* QADD, QSUB, QDADD, QDSUB */
11052 record_buf[0] = ARM_PS_REGNUM;
11053 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11054 arm_insn_r->reg_rec_count = 2;
11056 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11059 record_buf[0] = ARM_PS_REGNUM;
11060 record_buf[1] = ARM_LR_REGNUM;
11061 arm_insn_r->reg_rec_count = 2;
11063 /* Save SPSR also;how? */
11064 printf_unfiltered (_("Process record does not support "
11065 "instruction 0x%0x at address %s.\n"),
11066 arm_insn_r->arm_insn,
11067 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11070 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11071 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11072 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11073 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11076 if (0 == insn_op1 || 1 == insn_op1)
11078 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11079 /* We dont do optimization for SMULW<y> where we
11081 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11082 record_buf[1] = ARM_PS_REGNUM;
11083 arm_insn_r->reg_rec_count = 2;
11085 else if (2 == insn_op1)
11088 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11089 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11090 arm_insn_r->reg_rec_count = 2;
11092 else if (3 == insn_op1)
11095 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11096 arm_insn_r->reg_rec_count = 1;
11102 /* MSR : immediate form. */
11105 /* CSPR is going to be changed. */
11106 record_buf[0] = ARM_PS_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11109 else if (3 == insn_op1)
11111 /* SPSR is going to be changed. */
11112 /* we need to get SPSR value, which is yet to be done */
11113 printf_unfiltered (_("Process record does not support "
11114 "instruction 0x%0x at address %s.\n"),
11115 arm_insn_r->arm_insn,
11116 paddress (arm_insn_r->gdbarch,
11117 arm_insn_r->this_addr));
11123 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11124 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11125 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11127 /* Handle load/store insn extension space. */
11129 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11130 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11131 && !INSN_RECORDED(arm_insn_r))
11136 /* These insn, changes register and memory as well. */
11137 /* SWP or SWPB insn. */
11138 /* Get memory address given by Rn. */
11139 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11140 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11141 /* SWP insn ?, swaps word. */
11142 if (8 == arm_insn_r->opcode)
11144 record_buf_mem[0] = 4;
11148 /* SWPB insn, swaps only byte. */
11149 record_buf_mem[0] = 1;
11151 record_buf_mem[1] = u_regval;
11152 arm_insn_r->mem_rec_count = 1;
11153 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11154 arm_insn_r->reg_rec_count = 1;
11156 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11159 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11162 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11166 record_buf[1] = record_buf[0] + 1;
11167 arm_insn_r->reg_rec_count = 2;
11169 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11172 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11175 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11177 /* LDRH, LDRSB, LDRSH. */
11178 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11179 arm_insn_r->reg_rec_count = 1;
11184 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11185 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11186 && !INSN_RECORDED(arm_insn_r))
11189 /* Handle coprocessor insn extension space. */
11192 /* To be done for ARMv5 and later; as of now we return -1. */
11194 printf_unfiltered (_("Process record does not support instruction x%0x "
11195 "at address %s.\n"),arm_insn_r->arm_insn,
11196 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11199 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11200 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11205 /* Handling opcode 000 insns. */
11208 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11210 struct regcache *reg_cache = arm_insn_r->regcache;
11211 uint32_t record_buf[8], record_buf_mem[8];
11212 ULONGEST u_regval[2] = {0};
11214 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11215 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11216 uint32_t opcode1 = 0;
11218 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11219 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11220 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11222 /* Data processing insn /multiply insn. */
11223 if (9 == arm_insn_r->decode
11224 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11225 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11227 /* Handle multiply instructions. */
11228 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11229 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11231 /* Handle MLA and MUL. */
11232 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11233 record_buf[1] = ARM_PS_REGNUM;
11234 arm_insn_r->reg_rec_count = 2;
11236 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11238 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11239 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11240 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11241 record_buf[2] = ARM_PS_REGNUM;
11242 arm_insn_r->reg_rec_count = 3;
11245 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11246 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11248 /* Handle misc load insns, as 20th bit (L = 1). */
11249 /* LDR insn has a capability to do branching, if
11250 MOV LR, PC is precceded by LDR insn having Rn as R15
11251 in that case, it emulates branch and link insn, and hence we
11252 need to save CSPR and PC as well. I am not sure this is right
11253 place; as opcode = 010 LDR insn make this happen, if R15 was
11255 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11256 if (15 != reg_dest)
11258 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11259 arm_insn_r->reg_rec_count = 1;
11263 record_buf[0] = reg_dest;
11264 record_buf[1] = ARM_PS_REGNUM;
11265 arm_insn_r->reg_rec_count = 2;
11268 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11269 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11270 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11271 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11273 /* Handle MSR insn. */
11274 if (9 == arm_insn_r->opcode)
11276 /* CSPR is going to be changed. */
11277 record_buf[0] = ARM_PS_REGNUM;
11278 arm_insn_r->reg_rec_count = 1;
11282 /* SPSR is going to be changed. */
11283 /* How to read SPSR value? */
11284 printf_unfiltered (_("Process record does not support instruction "
11285 "0x%0x at address %s.\n"),
11286 arm_insn_r->arm_insn,
11287 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11291 else if (9 == arm_insn_r->decode
11292 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11293 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11295 /* Handling SWP, SWPB. */
11296 /* These insn, changes register and memory as well. */
11297 /* SWP or SWPB insn. */
11299 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11300 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11301 /* SWP insn ?, swaps word. */
11302 if (8 == arm_insn_r->opcode)
11304 record_buf_mem[0] = 4;
11308 /* SWPB insn, swaps only byte. */
11309 record_buf_mem[0] = 1;
11311 record_buf_mem[1] = u_regval[0];
11312 arm_insn_r->mem_rec_count = 1;
11313 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11314 arm_insn_r->reg_rec_count = 1;
11316 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11317 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11319 /* Handle BLX, branch and link/exchange. */
11320 if (9 == arm_insn_r->opcode)
11322 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11323 and R14 stores the return address. */
11324 record_buf[0] = ARM_PS_REGNUM;
11325 record_buf[1] = ARM_LR_REGNUM;
11326 arm_insn_r->reg_rec_count = 2;
11329 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11331 /* Handle enhanced software breakpoint insn, BKPT. */
11332 /* CPSR is changed to be executed in ARM state, disabling normal
11333 interrupts, entering abort mode. */
11334 /* According to high vector configuration PC is set. */
11335 /* user hit breakpoint and type reverse, in
11336 that case, we need to go back with previous CPSR and
11337 Program Counter. */
11338 record_buf[0] = ARM_PS_REGNUM;
11339 record_buf[1] = ARM_LR_REGNUM;
11340 arm_insn_r->reg_rec_count = 2;
11342 /* Save SPSR also; how? */
11343 printf_unfiltered (_("Process record does not support instruction "
11344 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11345 paddress (arm_insn_r->gdbarch,
11346 arm_insn_r->this_addr));
11349 else if (11 == arm_insn_r->decode
11350 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11352 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11354 /* Handle str(x) insn */
11355 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11358 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11359 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11361 /* Handle BX, branch and link/exchange. */
11362 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11363 record_buf[0] = ARM_PS_REGNUM;
11364 arm_insn_r->reg_rec_count = 1;
11366 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11367 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11368 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11370 /* Count leading zeros: CLZ. */
11371 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11372 arm_insn_r->reg_rec_count = 1;
11374 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11375 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11376 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11377 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11380 /* Handle MRS insn. */
11381 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11382 arm_insn_r->reg_rec_count = 1;
11384 else if (arm_insn_r->opcode <= 15)
11386 /* Normal data processing insns. */
11387 /* Out of 11 shifter operands mode, all the insn modifies destination
11388 register, which is specified by 13-16 decode. */
11389 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11390 record_buf[1] = ARM_PS_REGNUM;
11391 arm_insn_r->reg_rec_count = 2;
11398 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11399 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11403 /* Handling opcode 001 insns. */
11406 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11408 uint32_t record_buf[8], record_buf_mem[8];
11410 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11411 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11413 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11414 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11415 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11418 /* Handle MSR insn. */
11419 if (9 == arm_insn_r->opcode)
11421 /* CSPR is going to be changed. */
11422 record_buf[0] = ARM_PS_REGNUM;
11423 arm_insn_r->reg_rec_count = 1;
11427 /* SPSR is going to be changed. */
11430 else if (arm_insn_r->opcode <= 15)
11432 /* Normal data processing insns. */
11433 /* Out of 11 shifter operands mode, all the insn modifies destination
11434 register, which is specified by 13-16 decode. */
11435 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11436 record_buf[1] = ARM_PS_REGNUM;
11437 arm_insn_r->reg_rec_count = 2;
11444 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11445 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11449 /* Handling opcode 010 insns. */
11452 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11454 struct regcache *reg_cache = arm_insn_r->regcache;
11456 uint32_t reg_src1 = 0 , reg_dest = 0;
11457 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11458 uint32_t record_buf[8], record_buf_mem[8];
11460 ULONGEST u_regval = 0;
11462 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11463 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11465 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11467 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11468 /* LDR insn has a capability to do branching, if
11469 MOV LR, PC is precedded by LDR insn having Rn as R15
11470 in that case, it emulates branch and link insn, and hence we
11471 need to save CSPR and PC as well. */
11472 if (ARM_PC_REGNUM != reg_dest)
11474 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11475 arm_insn_r->reg_rec_count = 1;
11479 record_buf[0] = reg_dest;
11480 record_buf[1] = ARM_PS_REGNUM;
11481 arm_insn_r->reg_rec_count = 2;
11486 /* Store, immediate offset, immediate pre-indexed,
11487 immediate post-indexed. */
11488 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11489 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11490 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11492 if (bit (arm_insn_r->arm_insn, 23))
11494 tgt_mem_addr = u_regval + offset_12;
11498 tgt_mem_addr = u_regval - offset_12;
11501 switch (arm_insn_r->opcode)
11515 record_buf_mem[0] = 4;
11530 record_buf_mem[0] = 1;
11534 gdb_assert_not_reached ("no decoding pattern found");
11537 record_buf_mem[1] = tgt_mem_addr;
11538 arm_insn_r->mem_rec_count = 1;
11540 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11541 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11542 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11543 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11544 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11545 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11548 /* We are handling pre-indexed mode; post-indexed mode;
11549 where Rn is going to be changed. */
11550 record_buf[0] = reg_src1;
11551 arm_insn_r->reg_rec_count = 1;
11555 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11556 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11560 /* Handling opcode 011 insns. */
11563 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11565 struct regcache *reg_cache = arm_insn_r->regcache;
11567 uint32_t shift_imm = 0;
11568 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11569 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11570 uint32_t record_buf[8], record_buf_mem[8];
11573 ULONGEST u_regval[2];
11575 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11576 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11578 /* Handle enhanced store insns and LDRD DSP insn,
11579 order begins according to addressing modes for store insns
11583 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11585 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11586 /* LDR insn has a capability to do branching, if
11587 MOV LR, PC is precedded by LDR insn having Rn as R15
11588 in that case, it emulates branch and link insn, and hence we
11589 need to save CSPR and PC as well. */
11590 if (15 != reg_dest)
11592 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11593 arm_insn_r->reg_rec_count = 1;
11597 record_buf[0] = reg_dest;
11598 record_buf[1] = ARM_PS_REGNUM;
11599 arm_insn_r->reg_rec_count = 2;
11604 if (! bits (arm_insn_r->arm_insn, 4, 11))
11606 /* Store insn, register offset and register pre-indexed,
11607 register post-indexed. */
11609 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11611 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11612 regcache_raw_read_unsigned (reg_cache, reg_src1
11614 regcache_raw_read_unsigned (reg_cache, reg_src2
11616 if (15 == reg_src2)
11618 /* If R15 was used as Rn, hence current PC+8. */
11619 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11620 u_regval[0] = u_regval[0] + 8;
11622 /* Calculate target store address, Rn +/- Rm, register offset. */
11624 if (bit (arm_insn_r->arm_insn, 23))
11626 tgt_mem_addr = u_regval[0] + u_regval[1];
11630 tgt_mem_addr = u_regval[1] - u_regval[0];
11633 switch (arm_insn_r->opcode)
11647 record_buf_mem[0] = 4;
11662 record_buf_mem[0] = 1;
11666 gdb_assert_not_reached ("no decoding pattern found");
11669 record_buf_mem[1] = tgt_mem_addr;
11670 arm_insn_r->mem_rec_count = 1;
11672 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11673 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11674 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11675 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11676 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11677 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11680 /* Rn is going to be changed in pre-indexed mode and
11681 post-indexed mode as well. */
11682 record_buf[0] = reg_src2;
11683 arm_insn_r->reg_rec_count = 1;
11688 /* Store insn, scaled register offset; scaled pre-indexed. */
11689 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11691 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11693 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11694 /* Get shift_imm. */
11695 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11696 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11697 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11698 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11699 /* Offset_12 used as shift. */
11703 /* Offset_12 used as index. */
11704 offset_12 = u_regval[0] << shift_imm;
11708 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11714 if (bit (u_regval[0], 31))
11716 offset_12 = 0xFFFFFFFF;
11725 /* This is arithmetic shift. */
11726 offset_12 = s_word >> shift_imm;
11733 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11735 /* Get C flag value and shift it by 31. */
11736 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11737 | (u_regval[0]) >> 1);
11741 offset_12 = (u_regval[0] >> shift_imm) \
11743 (sizeof(uint32_t) - shift_imm));
11748 gdb_assert_not_reached ("no decoding pattern found");
11752 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11754 if (bit (arm_insn_r->arm_insn, 23))
11756 tgt_mem_addr = u_regval[1] + offset_12;
11760 tgt_mem_addr = u_regval[1] - offset_12;
11763 switch (arm_insn_r->opcode)
11777 record_buf_mem[0] = 4;
11792 record_buf_mem[0] = 1;
11796 gdb_assert_not_reached ("no decoding pattern found");
11799 record_buf_mem[1] = tgt_mem_addr;
11800 arm_insn_r->mem_rec_count = 1;
11802 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11803 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11804 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11805 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11806 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11807 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11810 /* Rn is going to be changed in register scaled pre-indexed
11811 mode,and scaled post indexed mode. */
11812 record_buf[0] = reg_src2;
11813 arm_insn_r->reg_rec_count = 1;
11818 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11819 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11823 /* Handling opcode 100 insns. */
11826 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11828 struct regcache *reg_cache = arm_insn_r->regcache;
11830 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11831 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11832 uint32_t start_address = 0, index = 0;
11833 uint32_t record_buf[24], record_buf_mem[48];
11835 ULONGEST u_regval[2] = {0};
11837 /* This mode is exclusively for load and store multiple. */
11838 /* Handle incremenrt after/before and decrment after.before mode;
11839 Rn is changing depending on W bit, but as of now we store Rn too
11840 without optimization. */
11842 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11844 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11846 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11848 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11853 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11857 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11858 while (register_bits)
11860 if (register_bits & 0x00000001)
11861 record_buf[index++] = register_count;
11862 register_bits = register_bits >> 1;
11866 /* Extra space for Base Register and CPSR; wihtout optimization. */
11867 record_buf[index++] = reg_src1;
11868 record_buf[index++] = ARM_PS_REGNUM;
11869 arm_insn_r->reg_rec_count = index;
11873 /* It handles both STM(1) and STM(2). */
11874 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11876 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11878 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11879 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11880 while (register_bits)
11882 if (register_bits & 0x00000001)
11884 register_bits = register_bits >> 1;
11889 /* Decrement after. */
11891 start_address = (u_regval[0]) - (register_count * 4) + 4;
11892 arm_insn_r->mem_rec_count = register_count;
11893 while (register_count)
11895 record_buf_mem[(register_count * 2) - 1] = start_address;
11896 record_buf_mem[(register_count * 2) - 2] = 4;
11897 start_address = start_address + 4;
11902 /* Increment after. */
11904 start_address = u_regval[0];
11905 arm_insn_r->mem_rec_count = register_count;
11906 while (register_count)
11908 record_buf_mem[(register_count * 2) - 1] = start_address;
11909 record_buf_mem[(register_count * 2) - 2] = 4;
11910 start_address = start_address + 4;
11915 /* Decrement before. */
11918 start_address = (u_regval[0]) - (register_count * 4);
11919 arm_insn_r->mem_rec_count = register_count;
11920 while (register_count)
11922 record_buf_mem[(register_count * 2) - 1] = start_address;
11923 record_buf_mem[(register_count * 2) - 2] = 4;
11924 start_address = start_address + 4;
11929 /* Increment before. */
11931 start_address = u_regval[0] + 4;
11932 arm_insn_r->mem_rec_count = register_count;
11933 while (register_count)
11935 record_buf_mem[(register_count * 2) - 1] = start_address;
11936 record_buf_mem[(register_count * 2) - 2] = 4;
11937 start_address = start_address + 4;
11943 gdb_assert_not_reached ("no decoding pattern found");
11947 /* Base register also changes; based on condition and W bit. */
11948 /* We save it anyway without optimization. */
11949 record_buf[0] = reg_src1;
11950 arm_insn_r->reg_rec_count = 1;
11953 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11954 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11958 /* Handling opcode 101 insns. */
11961 arm_record_b_bl (insn_decode_record *arm_insn_r)
11963 uint32_t record_buf[8];
11965 /* Handle B, BL, BLX(1) insns. */
11966 /* B simply branches so we do nothing here. */
11967 /* Note: BLX(1) doesnt fall here but instead it falls into
11968 extension space. */
11969 if (bit (arm_insn_r->arm_insn, 24))
11971 record_buf[0] = ARM_LR_REGNUM;
11972 arm_insn_r->reg_rec_count = 1;
11975 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11980 /* Handling opcode 110 insns. */
11983 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11985 printf_unfiltered (_("Process record does not support instruction "
11986 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11987 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11992 /* Handling opcode 111 insns. */
11995 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11997 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11998 struct regcache *reg_cache = arm_insn_r->regcache;
11999 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12000 ULONGEST u_regval = 0;
12002 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12004 /* Handle arm SWI/SVC system call instructions. */
12005 if (15 == arm_insn_r->opcode)
12007 if (tdep->arm_syscall_record != NULL)
12009 ULONGEST svc_operand, svc_number;
12011 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12013 if (svc_operand) /* OABI. */
12014 svc_number = svc_operand - 0x900000;
12016 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12018 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12022 printf_unfiltered (_("no syscall record support\n"));
12028 arm_record_unsupported_insn (arm_insn_r);
12035 /* Handling opcode 000 insns. */
12038 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12040 uint32_t record_buf[8];
12041 uint32_t reg_src1 = 0;
12043 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12045 record_buf[0] = ARM_PS_REGNUM;
12046 record_buf[1] = reg_src1;
12047 thumb_insn_r->reg_rec_count = 2;
12049 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12055 /* Handling opcode 001 insns. */
12058 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12060 uint32_t record_buf[8];
12061 uint32_t reg_src1 = 0;
12063 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12065 record_buf[0] = ARM_PS_REGNUM;
12066 record_buf[1] = reg_src1;
12067 thumb_insn_r->reg_rec_count = 2;
12069 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12074 /* Handling opcode 010 insns. */
12077 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12079 struct regcache *reg_cache = thumb_insn_r->regcache;
12080 uint32_t record_buf[8], record_buf_mem[8];
12082 uint32_t reg_src1 = 0, reg_src2 = 0;
12083 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12085 ULONGEST u_regval[2] = {0};
12087 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12089 if (bit (thumb_insn_r->arm_insn, 12))
12091 /* Handle load/store register offset. */
12092 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12093 if (opcode2 >= 12 && opcode2 <= 15)
12095 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12096 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12097 record_buf[0] = reg_src1;
12098 thumb_insn_r->reg_rec_count = 1;
12100 else if (opcode2 >= 8 && opcode2 <= 10)
12102 /* STR(2), STRB(2), STRH(2) . */
12103 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12104 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12105 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12106 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12108 record_buf_mem[0] = 4; /* STR (2). */
12109 else if (10 == opcode2)
12110 record_buf_mem[0] = 1; /* STRB (2). */
12111 else if (9 == opcode2)
12112 record_buf_mem[0] = 2; /* STRH (2). */
12113 record_buf_mem[1] = u_regval[0] + u_regval[1];
12114 thumb_insn_r->mem_rec_count = 1;
12117 else if (bit (thumb_insn_r->arm_insn, 11))
12119 /* Handle load from literal pool. */
12121 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12122 record_buf[0] = reg_src1;
12123 thumb_insn_r->reg_rec_count = 1;
12127 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12128 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12129 if ((3 == opcode2) && (!opcode3))
12131 /* Branch with exchange. */
12132 record_buf[0] = ARM_PS_REGNUM;
12133 thumb_insn_r->reg_rec_count = 1;
12137 /* Format 8; special data processing insns. */
12138 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12139 record_buf[0] = ARM_PS_REGNUM;
12140 record_buf[1] = reg_src1;
12141 thumb_insn_r->reg_rec_count = 2;
12146 /* Format 5; data processing insns. */
12147 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12148 if (bit (thumb_insn_r->arm_insn, 7))
12150 reg_src1 = reg_src1 + 8;
12152 record_buf[0] = ARM_PS_REGNUM;
12153 record_buf[1] = reg_src1;
12154 thumb_insn_r->reg_rec_count = 2;
12157 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12158 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12164 /* Handling opcode 001 insns. */
12167 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12169 struct regcache *reg_cache = thumb_insn_r->regcache;
12170 uint32_t record_buf[8], record_buf_mem[8];
12172 uint32_t reg_src1 = 0;
12173 uint32_t opcode = 0, immed_5 = 0;
12175 ULONGEST u_regval = 0;
12177 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12182 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12183 record_buf[0] = reg_src1;
12184 thumb_insn_r->reg_rec_count = 1;
12189 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12190 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12191 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12192 record_buf_mem[0] = 4;
12193 record_buf_mem[1] = u_regval + (immed_5 * 4);
12194 thumb_insn_r->mem_rec_count = 1;
12197 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12198 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12204 /* Handling opcode 100 insns. */
12207 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12209 struct regcache *reg_cache = thumb_insn_r->regcache;
12210 uint32_t record_buf[8], record_buf_mem[8];
12212 uint32_t reg_src1 = 0;
12213 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12215 ULONGEST u_regval = 0;
12217 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12222 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12223 record_buf[0] = reg_src1;
12224 thumb_insn_r->reg_rec_count = 1;
12226 else if (1 == opcode)
12229 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12230 record_buf[0] = reg_src1;
12231 thumb_insn_r->reg_rec_count = 1;
12233 else if (2 == opcode)
12236 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12237 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12238 record_buf_mem[0] = 4;
12239 record_buf_mem[1] = u_regval + (immed_8 * 4);
12240 thumb_insn_r->mem_rec_count = 1;
12242 else if (0 == opcode)
12245 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12246 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12247 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12248 record_buf_mem[0] = 2;
12249 record_buf_mem[1] = u_regval + (immed_5 * 2);
12250 thumb_insn_r->mem_rec_count = 1;
12253 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12254 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12260 /* Handling opcode 101 insns. */
12263 thumb_record_misc (insn_decode_record *thumb_insn_r)
12265 struct regcache *reg_cache = thumb_insn_r->regcache;
12267 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12268 uint32_t register_bits = 0, register_count = 0;
12269 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12270 uint32_t record_buf[24], record_buf_mem[48];
12273 ULONGEST u_regval = 0;
12275 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12276 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12277 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12282 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12283 while (register_bits)
12285 if (register_bits & 0x00000001)
12286 record_buf[index++] = register_count;
12287 register_bits = register_bits >> 1;
12290 record_buf[index++] = ARM_PS_REGNUM;
12291 record_buf[index++] = ARM_SP_REGNUM;
12292 thumb_insn_r->reg_rec_count = index;
12294 else if (10 == opcode2)
12297 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12298 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12299 while (register_bits)
12301 if (register_bits & 0x00000001)
12303 register_bits = register_bits >> 1;
12305 start_address = u_regval - \
12306 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12307 thumb_insn_r->mem_rec_count = register_count;
12308 while (register_count)
12310 record_buf_mem[(register_count * 2) - 1] = start_address;
12311 record_buf_mem[(register_count * 2) - 2] = 4;
12312 start_address = start_address + 4;
12315 record_buf[0] = ARM_SP_REGNUM;
12316 thumb_insn_r->reg_rec_count = 1;
12318 else if (0x1E == opcode1)
12321 /* Handle enhanced software breakpoint insn, BKPT. */
12322 /* CPSR is changed to be executed in ARM state, disabling normal
12323 interrupts, entering abort mode. */
12324 /* According to high vector configuration PC is set. */
12325 /* User hits breakpoint and type reverse, in that case, we need to go back with
12326 previous CPSR and Program Counter. */
12327 record_buf[0] = ARM_PS_REGNUM;
12328 record_buf[1] = ARM_LR_REGNUM;
12329 thumb_insn_r->reg_rec_count = 2;
12330 /* We need to save SPSR value, which is not yet done. */
12331 printf_unfiltered (_("Process record does not support instruction "
12332 "0x%0x at address %s.\n"),
12333 thumb_insn_r->arm_insn,
12334 paddress (thumb_insn_r->gdbarch,
12335 thumb_insn_r->this_addr));
12338 else if ((0 == opcode) || (1 == opcode))
12340 /* ADD(5), ADD(6). */
12341 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12342 record_buf[0] = reg_src1;
12343 thumb_insn_r->reg_rec_count = 1;
12345 else if (2 == opcode)
12347 /* ADD(7), SUB(4). */
12348 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12349 record_buf[0] = ARM_SP_REGNUM;
12350 thumb_insn_r->reg_rec_count = 1;
12353 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12354 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12360 /* Handling opcode 110 insns. */
12363 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12365 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12366 struct regcache *reg_cache = thumb_insn_r->regcache;
12368 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12369 uint32_t reg_src1 = 0;
12370 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12371 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12372 uint32_t record_buf[24], record_buf_mem[48];
12374 ULONGEST u_regval = 0;
12376 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12377 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12383 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12385 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12386 while (register_bits)
12388 if (register_bits & 0x00000001)
12389 record_buf[index++] = register_count;
12390 register_bits = register_bits >> 1;
12393 record_buf[index++] = reg_src1;
12394 thumb_insn_r->reg_rec_count = index;
12396 else if (0 == opcode2)
12398 /* It handles both STMIA. */
12399 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12401 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12402 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12403 while (register_bits)
12405 if (register_bits & 0x00000001)
12407 register_bits = register_bits >> 1;
12409 start_address = u_regval;
12410 thumb_insn_r->mem_rec_count = register_count;
12411 while (register_count)
12413 record_buf_mem[(register_count * 2) - 1] = start_address;
12414 record_buf_mem[(register_count * 2) - 2] = 4;
12415 start_address = start_address + 4;
12419 else if (0x1F == opcode1)
12421 /* Handle arm syscall insn. */
12422 if (tdep->arm_syscall_record != NULL)
12424 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12425 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12429 printf_unfiltered (_("no syscall record support\n"));
12434 /* B (1), conditional branch is automatically taken care in process_record,
12435 as PC is saved there. */
12437 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12438 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12444 /* Handling opcode 111 insns. */
12447 thumb_record_branch (insn_decode_record *thumb_insn_r)
12449 uint32_t record_buf[8];
12450 uint32_t bits_h = 0;
12452 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12454 if (2 == bits_h || 3 == bits_h)
12457 record_buf[0] = ARM_LR_REGNUM;
12458 thumb_insn_r->reg_rec_count = 1;
12460 else if (1 == bits_h)
12463 record_buf[0] = ARM_PS_REGNUM;
12464 record_buf[1] = ARM_LR_REGNUM;
12465 thumb_insn_r->reg_rec_count = 2;
12468 /* B(2) is automatically taken care in process_record, as PC is
12471 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12476 /* Handler for thumb2 load/store multiple instructions. */
12479 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12481 struct regcache *reg_cache = thumb2_insn_r->regcache;
12483 uint32_t reg_rn, op;
12484 uint32_t register_bits = 0, register_count = 0;
12485 uint32_t index = 0, start_address = 0;
12486 uint32_t record_buf[24], record_buf_mem[48];
12488 ULONGEST u_regval = 0;
12490 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12491 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12493 if (0 == op || 3 == op)
12495 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12497 /* Handle RFE instruction. */
12498 record_buf[0] = ARM_PS_REGNUM;
12499 thumb2_insn_r->reg_rec_count = 1;
12503 /* Handle SRS instruction after reading banked SP. */
12504 return arm_record_unsupported_insn (thumb2_insn_r);
12507 else if (1 == op || 2 == op)
12509 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12511 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12512 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12513 while (register_bits)
12515 if (register_bits & 0x00000001)
12516 record_buf[index++] = register_count;
12519 register_bits = register_bits >> 1;
12521 record_buf[index++] = reg_rn;
12522 record_buf[index++] = ARM_PS_REGNUM;
12523 thumb2_insn_r->reg_rec_count = index;
12527 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12528 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12529 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12530 while (register_bits)
12532 if (register_bits & 0x00000001)
12535 register_bits = register_bits >> 1;
12540 /* Start address calculation for LDMDB/LDMEA. */
12541 start_address = u_regval;
12545 /* Start address calculation for LDMDB/LDMEA. */
12546 start_address = u_regval - register_count * 4;
12549 thumb2_insn_r->mem_rec_count = register_count;
12550 while (register_count)
12552 record_buf_mem[register_count * 2 - 1] = start_address;
12553 record_buf_mem[register_count * 2 - 2] = 4;
12554 start_address = start_address + 4;
12557 record_buf[0] = reg_rn;
12558 record_buf[1] = ARM_PS_REGNUM;
12559 thumb2_insn_r->reg_rec_count = 2;
12563 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12565 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12567 return ARM_RECORD_SUCCESS;
12570 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12574 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12576 struct regcache *reg_cache = thumb2_insn_r->regcache;
12578 uint32_t reg_rd, reg_rn, offset_imm;
12579 uint32_t reg_dest1, reg_dest2;
12580 uint32_t address, offset_addr;
12581 uint32_t record_buf[8], record_buf_mem[8];
12582 uint32_t op1, op2, op3;
12585 ULONGEST u_regval[2];
12587 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12588 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12589 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12591 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12593 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12595 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12596 record_buf[0] = reg_dest1;
12597 record_buf[1] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 2;
12601 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12603 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12604 record_buf[2] = reg_dest2;
12605 thumb2_insn_r->reg_rec_count = 3;
12610 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12611 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12613 if (0 == op1 && 0 == op2)
12615 /* Handle STREX. */
12616 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12617 address = u_regval[0] + (offset_imm * 4);
12618 record_buf_mem[0] = 4;
12619 record_buf_mem[1] = address;
12620 thumb2_insn_r->mem_rec_count = 1;
12621 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12622 record_buf[0] = reg_rd;
12623 thumb2_insn_r->reg_rec_count = 1;
12625 else if (1 == op1 && 0 == op2)
12627 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12628 record_buf[0] = reg_rd;
12629 thumb2_insn_r->reg_rec_count = 1;
12630 address = u_regval[0];
12631 record_buf_mem[1] = address;
12635 /* Handle STREXB. */
12636 record_buf_mem[0] = 1;
12637 thumb2_insn_r->mem_rec_count = 1;
12641 /* Handle STREXH. */
12642 record_buf_mem[0] = 2 ;
12643 thumb2_insn_r->mem_rec_count = 1;
12647 /* Handle STREXD. */
12648 address = u_regval[0];
12649 record_buf_mem[0] = 4;
12650 record_buf_mem[2] = 4;
12651 record_buf_mem[3] = address + 4;
12652 thumb2_insn_r->mem_rec_count = 2;
12657 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12659 if (bit (thumb2_insn_r->arm_insn, 24))
12661 if (bit (thumb2_insn_r->arm_insn, 23))
12662 offset_addr = u_regval[0] + (offset_imm * 4);
12664 offset_addr = u_regval[0] - (offset_imm * 4);
12666 address = offset_addr;
12669 address = u_regval[0];
12671 record_buf_mem[0] = 4;
12672 record_buf_mem[1] = address;
12673 record_buf_mem[2] = 4;
12674 record_buf_mem[3] = address + 4;
12675 thumb2_insn_r->mem_rec_count = 2;
12676 record_buf[0] = reg_rn;
12677 thumb2_insn_r->reg_rec_count = 1;
12681 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12683 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12685 return ARM_RECORD_SUCCESS;
12688 /* Handler for thumb2 data processing (shift register and modified immediate)
12692 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12694 uint32_t reg_rd, op;
12695 uint32_t record_buf[8];
12697 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12698 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12700 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12702 record_buf[0] = ARM_PS_REGNUM;
12703 thumb2_insn_r->reg_rec_count = 1;
12707 record_buf[0] = reg_rd;
12708 record_buf[1] = ARM_PS_REGNUM;
12709 thumb2_insn_r->reg_rec_count = 2;
12712 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12714 return ARM_RECORD_SUCCESS;
12717 /* Generic handler for thumb2 instructions which effect destination and PS
12721 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12724 uint32_t record_buf[8];
12726 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12728 record_buf[0] = reg_rd;
12729 record_buf[1] = ARM_PS_REGNUM;
12730 thumb2_insn_r->reg_rec_count = 2;
12732 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12734 return ARM_RECORD_SUCCESS;
12737 /* Handler for thumb2 branch and miscellaneous control instructions. */
12740 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12742 uint32_t op, op1, op2;
12743 uint32_t record_buf[8];
12745 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12746 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12747 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12749 /* Handle MSR insn. */
12750 if (!(op1 & 0x2) && 0x38 == op)
12754 /* CPSR is going to be changed. */
12755 record_buf[0] = ARM_PS_REGNUM;
12756 thumb2_insn_r->reg_rec_count = 1;
12760 arm_record_unsupported_insn(thumb2_insn_r);
12764 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12767 record_buf[0] = ARM_PS_REGNUM;
12768 record_buf[1] = ARM_LR_REGNUM;
12769 thumb2_insn_r->reg_rec_count = 2;
12772 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12774 return ARM_RECORD_SUCCESS;
12777 /* Handler for thumb2 store single data item instructions. */
12780 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12782 struct regcache *reg_cache = thumb2_insn_r->regcache;
12784 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12785 uint32_t address, offset_addr;
12786 uint32_t record_buf[8], record_buf_mem[8];
12789 ULONGEST u_regval[2];
12791 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12792 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12793 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12794 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12796 if (bit (thumb2_insn_r->arm_insn, 23))
12799 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12800 offset_addr = u_regval[0] + offset_imm;
12801 address = offset_addr;
12806 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12808 /* Handle STRB (register). */
12809 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12810 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12811 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12812 offset_addr = u_regval[1] << shift_imm;
12813 address = u_regval[0] + offset_addr;
12817 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12818 if (bit (thumb2_insn_r->arm_insn, 10))
12820 if (bit (thumb2_insn_r->arm_insn, 9))
12821 offset_addr = u_regval[0] + offset_imm;
12823 offset_addr = u_regval[0] - offset_imm;
12825 address = offset_addr;
12828 address = u_regval[0];
12834 /* Store byte instructions. */
12837 record_buf_mem[0] = 1;
12839 /* Store half word instructions. */
12842 record_buf_mem[0] = 2;
12844 /* Store word instructions. */
12847 record_buf_mem[0] = 4;
12851 gdb_assert_not_reached ("no decoding pattern found");
12855 record_buf_mem[1] = address;
12856 thumb2_insn_r->mem_rec_count = 1;
12857 record_buf[0] = reg_rn;
12858 thumb2_insn_r->reg_rec_count = 1;
12860 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12862 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12864 return ARM_RECORD_SUCCESS;
12867 /* Handler for thumb2 load memory hints instructions. */
12870 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12872 uint32_t record_buf[8];
12873 uint32_t reg_rt, reg_rn;
12875 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12876 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12878 if (ARM_PC_REGNUM != reg_rt)
12880 record_buf[0] = reg_rt;
12881 record_buf[1] = reg_rn;
12882 record_buf[2] = ARM_PS_REGNUM;
12883 thumb2_insn_r->reg_rec_count = 3;
12885 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12887 return ARM_RECORD_SUCCESS;
12890 return ARM_RECORD_FAILURE;
12893 /* Handler for thumb2 load word instructions. */
12896 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12898 uint32_t opcode1 = 0, opcode2 = 0;
12899 uint32_t record_buf[8];
12901 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12902 record_buf[1] = ARM_PS_REGNUM;
12903 thumb2_insn_r->reg_rec_count = 2;
12905 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12907 return ARM_RECORD_SUCCESS;
12910 /* Handler for thumb2 long multiply, long multiply accumulate, and
12911 divide instructions. */
12914 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12916 uint32_t opcode1 = 0, opcode2 = 0;
12917 uint32_t record_buf[8];
12918 uint32_t reg_src1 = 0;
12920 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12921 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12923 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12925 /* Handle SMULL, UMULL, SMULAL. */
12926 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12927 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12928 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12929 record_buf[2] = ARM_PS_REGNUM;
12930 thumb2_insn_r->reg_rec_count = 3;
12932 else if (1 == opcode1 || 3 == opcode2)
12934 /* Handle SDIV and UDIV. */
12935 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12936 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12937 record_buf[2] = ARM_PS_REGNUM;
12938 thumb2_insn_r->reg_rec_count = 3;
12941 return ARM_RECORD_FAILURE;
12943 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12945 return ARM_RECORD_SUCCESS;
12948 /* Decodes thumb2 instruction type and invokes its record handler. */
12950 static unsigned int
12951 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12953 uint32_t op, op1, op2;
12955 op = bit (thumb2_insn_r->arm_insn, 15);
12956 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12957 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12961 if (!(op2 & 0x64 ))
12963 /* Load/store multiple instruction. */
12964 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12966 else if (!((op2 & 0x64) ^ 0x04))
12968 /* Load/store (dual/exclusive) and table branch instruction. */
12969 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12971 else if (!((op2 & 0x20) ^ 0x20))
12973 /* Data-processing (shifted register). */
12974 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12976 else if (op2 & 0x40)
12978 /* Co-processor instructions. */
12979 arm_record_unsupported_insn (thumb2_insn_r);
12982 else if (op1 == 0x02)
12986 /* Branches and miscellaneous control instructions. */
12987 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12989 else if (op2 & 0x20)
12991 /* Data-processing (plain binary immediate) instruction. */
12992 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12996 /* Data-processing (modified immediate). */
12997 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13000 else if (op1 == 0x03)
13002 if (!(op2 & 0x71 ))
13004 /* Store single data item. */
13005 return thumb2_record_str_single_data (thumb2_insn_r);
13007 else if (!((op2 & 0x71) ^ 0x10))
13009 /* Advanced SIMD or structure load/store instructions. */
13010 return arm_record_unsupported_insn (thumb2_insn_r);
13012 else if (!((op2 & 0x67) ^ 0x01))
13014 /* Load byte, memory hints instruction. */
13015 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13017 else if (!((op2 & 0x67) ^ 0x03))
13019 /* Load halfword, memory hints instruction. */
13020 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13022 else if (!((op2 & 0x67) ^ 0x05))
13024 /* Load word instruction. */
13025 return thumb2_record_ld_word (thumb2_insn_r);
13027 else if (!((op2 & 0x70) ^ 0x20))
13029 /* Data-processing (register) instruction. */
13030 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13032 else if (!((op2 & 0x78) ^ 0x30))
13034 /* Multiply, multiply accumulate, abs diff instruction. */
13035 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13037 else if (!((op2 & 0x78) ^ 0x38))
13039 /* Long multiply, long multiply accumulate, and divide. */
13040 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13042 else if (op2 & 0x40)
13044 /* Co-processor instructions. */
13045 return arm_record_unsupported_insn (thumb2_insn_r);
13052 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13053 and positive val on fauilure. */
13056 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13058 gdb_byte buf[insn_size];
13060 memset (&buf[0], 0, insn_size);
13062 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13064 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13066 gdbarch_byte_order (insn_record->gdbarch));
13070 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13072 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13076 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13077 uint32_t insn_size)
13080 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13081 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13083 arm_record_data_proc_misc_ld_str, /* 000. */
13084 arm_record_data_proc_imm, /* 001. */
13085 arm_record_ld_st_imm_offset, /* 010. */
13086 arm_record_ld_st_reg_offset, /* 011. */
13087 arm_record_ld_st_multiple, /* 100. */
13088 arm_record_b_bl, /* 101. */
13089 arm_record_unsupported_insn, /* 110. */
13090 arm_record_coproc_data_proc /* 111. */
13093 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13094 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13096 thumb_record_shift_add_sub, /* 000. */
13097 thumb_record_add_sub_cmp_mov, /* 001. */
13098 thumb_record_ld_st_reg_offset, /* 010. */
13099 thumb_record_ld_st_imm_offset, /* 011. */
13100 thumb_record_ld_st_stack, /* 100. */
13101 thumb_record_misc, /* 101. */
13102 thumb_record_ldm_stm_swi, /* 110. */
13103 thumb_record_branch /* 111. */
13106 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13107 uint32_t insn_id = 0;
13109 if (extract_arm_insn (arm_record, insn_size))
13113 printf_unfiltered (_("Process record: error reading memory at "
13114 "addr %s len = %d.\n"),
13115 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13119 else if (ARM_RECORD == record_type)
13121 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13122 insn_id = bits (arm_record->arm_insn, 25, 27);
13123 ret = arm_record_extension_space (arm_record);
13124 /* If this insn has fallen into extension space
13125 then we need not decode it anymore. */
13126 if (ret != -1 && !INSN_RECORDED(arm_record))
13128 ret = arm_handle_insn[insn_id] (arm_record);
13131 else if (THUMB_RECORD == record_type)
13133 /* As thumb does not have condition codes, we set negative. */
13134 arm_record->cond = -1;
13135 insn_id = bits (arm_record->arm_insn, 13, 15);
13136 ret = thumb_handle_insn[insn_id] (arm_record);
13138 else if (THUMB2_RECORD == record_type)
13140 /* As thumb does not have condition codes, we set negative. */
13141 arm_record->cond = -1;
13143 /* Swap first half of 32bit thumb instruction with second half. */
13144 arm_record->arm_insn
13145 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13147 insn_id = thumb2_record_decode_insn_handler (arm_record);
13149 if (insn_id != ARM_RECORD_SUCCESS)
13151 arm_record_unsupported_insn (arm_record);
13157 /* Throw assertion. */
13158 gdb_assert_not_reached ("not a valid instruction, could not decode");
13165 /* Cleans up local record registers and memory allocations. */
13168 deallocate_reg_mem (insn_decode_record *record)
13170 xfree (record->arm_regs);
13171 xfree (record->arm_mems);
13175 /* Parse the current instruction and record the values of the registers and
13176 memory that will be changed in current instruction to record_arch_list".
13177 Return -1 if something is wrong. */
13180 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13181 CORE_ADDR insn_addr)
13184 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13185 uint32_t no_of_rec = 0;
13186 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13187 ULONGEST t_bit = 0, insn_id = 0;
13189 ULONGEST u_regval = 0;
13191 insn_decode_record arm_record;
13193 memset (&arm_record, 0, sizeof (insn_decode_record));
13194 arm_record.regcache = regcache;
13195 arm_record.this_addr = insn_addr;
13196 arm_record.gdbarch = gdbarch;
13199 if (record_debug > 1)
13201 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13203 paddress (gdbarch, arm_record.this_addr));
13206 if (extract_arm_insn (&arm_record, 2))
13210 printf_unfiltered (_("Process record: error reading memory at "
13211 "addr %s len = %d.\n"),
13212 paddress (arm_record.gdbarch,
13213 arm_record.this_addr), 2);
13218 /* Check the insn, whether it is thumb or arm one. */
13220 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13221 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13224 if (!(u_regval & t_bit))
13226 /* We are decoding arm insn. */
13227 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13231 insn_id = bits (arm_record.arm_insn, 11, 15);
13232 /* is it thumb2 insn? */
13233 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13235 ret = decode_insn (&arm_record, THUMB2_RECORD,
13236 THUMB2_INSN_SIZE_BYTES);
13240 /* We are decoding thumb insn. */
13241 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13247 /* Record registers. */
13248 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13249 if (arm_record.arm_regs)
13251 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13253 if (record_full_arch_list_add_reg
13254 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13258 /* Record memories. */
13259 if (arm_record.arm_mems)
13261 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13263 if (record_full_arch_list_add_mem
13264 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13265 arm_record.arm_mems[no_of_rec].len))
13270 if (record_full_arch_list_add_end ())
13275 deallocate_reg_mem (&arm_record);