1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
56 #include "gdb_assert.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
240 static int thumb_insn_size (unsigned short inst1);
242 struct arm_prologue_cache
244 /* The stack pointer at the time this frame was created; i.e. the
245 caller's stack pointer when this function was called. It is used
246 to identify this frame. */
249 /* The frame base for this frame is just prev_sp - frame size.
250 FRAMESIZE is the distance from the frame pointer to the
251 initial stack pointer. */
255 /* The register used to hold the frame pointer for this frame. */
258 /* Saved register offsets. */
259 struct trad_frame_saved_reg *saved_regs;
262 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
263 CORE_ADDR prologue_start,
264 CORE_ADDR prologue_end,
265 struct arm_prologue_cache *cache);
267 /* Architecture version for displaced stepping. This effects the behaviour of
268 certain instructions, and really should not be hard-wired. */
270 #define DISPLACED_STEPPING_ARCH_VERSION 5
272 /* Addresses for calling Thumb functions have the bit 0 set.
273 Here are some macros to test, set, or clear bit 0 of addresses. */
274 #define IS_THUMB_ADDR(addr) ((addr) & 1)
275 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
276 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 if (gdbarch_tdep (gdbarch)->is_m)
293 /* Determine if FRAME is executing in Thumb mode. */
296 arm_frame_is_thumb (struct frame_info *frame)
299 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
301 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
302 directly (from a signal frame or dummy frame) or by interpreting
303 the saved LR (from a prologue or DWARF frame). So consult it and
304 trust the unwinders. */
305 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
307 return (cpsr & t_bit) != 0;
310 /* Callback for VEC_lower_bound. */
313 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
314 const struct arm_mapping_symbol *rhs)
316 return lhs->value < rhs->value;
319 /* Search for the mapping symbol covering MEMADDR. If one is found,
320 return its type. Otherwise, return 0. If START is non-NULL,
321 set *START to the location of the mapping symbol. */
324 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
326 struct obj_section *sec;
328 /* If there are mapping symbols, consult them. */
329 sec = find_pc_section (memaddr);
332 struct arm_per_objfile *data;
333 VEC(arm_mapping_symbol_s) *map;
334 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
338 data = objfile_data (sec->objfile, arm_objfile_data_key);
341 map = data->section_maps[sec->the_bfd_section->index];
342 if (!VEC_empty (arm_mapping_symbol_s, map))
344 struct arm_mapping_symbol *map_sym;
346 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
347 arm_compare_mapping_symbols);
349 /* VEC_lower_bound finds the earliest ordered insertion
350 point. If the following symbol starts at this exact
351 address, we use that; otherwise, the preceding
352 mapping symbol covers this address. */
353 if (idx < VEC_length (arm_mapping_symbol_s, map))
355 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
356 if (map_sym->value == map_key.value)
359 *start = map_sym->value + obj_section_addr (sec);
360 return map_sym->type;
366 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
368 *start = map_sym->value + obj_section_addr (sec);
369 return map_sym->type;
378 /* Determine if the program counter specified in MEMADDR is in a Thumb
379 function. This function should be called for addresses unrelated to
380 any executing frame; otherwise, prefer arm_frame_is_thumb. */
383 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
385 struct bound_minimal_symbol sym;
387 struct displaced_step_closure* dsc
388 = get_displaced_step_closure_by_addr(memaddr);
390 /* If checking the mode of displaced instruction in copy area, the mode
391 should be determined by instruction on the original address. */
395 fprintf_unfiltered (gdb_stdlog,
396 "displaced: check mode of %.8lx instead of %.8lx\n",
397 (unsigned long) dsc->insn_addr,
398 (unsigned long) memaddr);
399 memaddr = dsc->insn_addr;
402 /* If bit 0 of the address is set, assume this is a Thumb address. */
403 if (IS_THUMB_ADDR (memaddr))
406 /* Respect internal mode override if active. */
407 if (arm_override_mode != -1)
408 return arm_override_mode;
410 /* If the user wants to override the symbol table, let him. */
411 if (strcmp (arm_force_mode_string, "arm") == 0)
413 if (strcmp (arm_force_mode_string, "thumb") == 0)
416 /* ARM v6-M and v7-M are always in Thumb mode. */
417 if (gdbarch_tdep (gdbarch)->is_m)
420 /* If there are mapping symbols, consult them. */
421 type = arm_find_mapping_symbol (memaddr, NULL);
425 /* Thumb functions have a "special" bit set in minimal symbols. */
426 sym = lookup_minimal_symbol_by_pc (memaddr);
428 return (MSYMBOL_IS_SPECIAL (sym.minsym));
430 /* If the user wants to override the fallback mode, let them. */
431 if (strcmp (arm_fallback_mode_string, "arm") == 0)
433 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
436 /* If we couldn't find any symbol, but we're talking to a running
437 target, then trust the current value of $cpsr. This lets
438 "display/i $pc" always show the correct mode (though if there is
439 a symbol table we will not reach here, so it still may not be
440 displayed in the mode it will be executed). */
441 if (target_has_registers)
442 return arm_frame_is_thumb (get_current_frame ());
444 /* Otherwise we're out of luck; we assume ARM. */
448 /* Remove useless bits from addresses in a running program. */
450 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
452 /* On M-profile devices, do not strip the low bit from EXC_RETURN
453 (the magic exception return address). */
454 if (gdbarch_tdep (gdbarch)->is_m
455 && (val & 0xfffffff0) == 0xfffffff0)
459 return UNMAKE_THUMB_ADDR (val);
461 return (val & 0x03fffffc);
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct bound_minimal_symbol msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
475 if (msym.minsym != NULL
476 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
477 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
479 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
703 struct pv_area *stack;
704 struct cleanup *back_to;
706 CORE_ADDR unrecognized_pc = 0;
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
713 while (start < limit)
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
740 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
742 offset = (insn & 0x7f) << 2; /* get scaled offset */
743 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
747 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
749 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
750 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
751 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
753 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
754 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
755 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
757 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
758 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
759 && pv_is_constant (regs[bits (insn, 3, 5)]))
760 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
761 regs[bits (insn, 6, 8)]);
762 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
763 && pv_is_constant (regs[bits (insn, 3, 6)]))
765 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
766 int rm = bits (insn, 3, 6);
767 regs[rd] = pv_add (regs[rd], regs[rm]);
769 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
771 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
772 int src_reg = (insn & 0x78) >> 3;
773 regs[dst_reg] = regs[src_reg];
775 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
777 /* Handle stores to the stack. Normally pushes are used,
778 but with GCC -mtpcs-frame, there may be other stores
779 in the prologue to create the frame. */
780 int regno = (insn >> 8) & 0x7;
783 offset = (insn & 0xff) << 2;
784 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
786 if (pv_area_store_would_trash (stack, addr))
789 pv_area_store (stack, addr, 4, regs[regno]);
791 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
793 int rd = bits (insn, 0, 2);
794 int rn = bits (insn, 3, 5);
797 offset = bits (insn, 6, 10) << 2;
798 addr = pv_add_constant (regs[rn], offset);
800 if (pv_area_store_would_trash (stack, addr))
803 pv_area_store (stack, addr, 4, regs[rd]);
805 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
806 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
807 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
808 /* Ignore stores of argument registers to the stack. */
810 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
811 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
812 /* Ignore block loads from the stack, potentially copying
813 parameters from memory. */
815 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
816 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
817 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
818 /* Similarly ignore single loads from the stack. */
820 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
821 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
822 /* Skip register copies, i.e. saves to another register
823 instead of the stack. */
825 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
826 /* Recognize constant loads; even with small stacks these are necessary
828 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
829 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
831 /* Constant pool loads, for the same reason. */
832 unsigned int constant;
835 loc = start + 4 + bits (insn, 0, 7) * 4;
836 constant = read_memory_unsigned_integer (loc, 4, byte_order);
837 regs[bits (insn, 8, 10)] = pv_constant (constant);
839 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
841 unsigned short inst2;
843 inst2 = read_memory_unsigned_integer (start + 2, 2,
844 byte_order_for_code);
846 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
848 /* BL, BLX. Allow some special function calls when
849 skipping the prologue; GCC generates these before
850 storing arguments to the stack. */
852 int j1, j2, imm1, imm2;
854 imm1 = sbits (insn, 0, 10);
855 imm2 = bits (inst2, 0, 10);
856 j1 = bit (inst2, 13);
857 j2 = bit (inst2, 11);
859 offset = ((imm1 << 12) + (imm2 << 1));
860 offset ^= ((!j2) << 22) | ((!j1) << 23);
862 nextpc = start + 4 + offset;
863 /* For BLX make sure to clear the low bits. */
864 if (bit (inst2, 12) == 0)
865 nextpc = nextpc & 0xfffffffc;
867 if (!skip_prologue_function (gdbarch, nextpc,
868 bit (inst2, 12) != 0))
872 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
876 pv_t addr = regs[bits (insn, 0, 3)];
879 if (pv_area_store_would_trash (stack, addr))
882 /* Calculate offsets of saved registers. */
883 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
884 if (inst2 & (1 << regno))
886 addr = pv_add_constant (addr, -4);
887 pv_area_store (stack, addr, 4, regs[regno]);
891 regs[bits (insn, 0, 3)] = addr;
894 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
896 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
898 int regno1 = bits (inst2, 12, 15);
899 int regno2 = bits (inst2, 8, 11);
900 pv_t addr = regs[bits (insn, 0, 3)];
902 offset = inst2 & 0xff;
904 addr = pv_add_constant (addr, offset);
906 addr = pv_add_constant (addr, -offset);
908 if (pv_area_store_would_trash (stack, addr))
911 pv_area_store (stack, addr, 4, regs[regno1]);
912 pv_area_store (stack, pv_add_constant (addr, 4),
916 regs[bits (insn, 0, 3)] = addr;
919 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
920 && (inst2 & 0x0c00) == 0x0c00
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 int regno = bits (inst2, 12, 15);
924 pv_t addr = regs[bits (insn, 0, 3)];
926 offset = inst2 & 0xff;
928 addr = pv_add_constant (addr, offset);
930 addr = pv_add_constant (addr, -offset);
932 if (pv_area_store_would_trash (stack, addr))
935 pv_area_store (stack, addr, 4, regs[regno]);
938 regs[bits (insn, 0, 3)] = addr;
941 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
942 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 int regno = bits (inst2, 12, 15);
947 offset = inst2 & 0xfff;
948 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
950 if (pv_area_store_would_trash (stack, addr))
953 pv_area_store (stack, addr, 4, regs[regno]);
956 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
957 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 /* Ignore stores of argument registers to the stack. */
961 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
962 && (inst2 & 0x0d00) == 0x0c00
963 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
964 /* Ignore stores of argument registers to the stack. */
967 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
969 && (inst2 & 0x8000) == 0x0000
970 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
971 /* Ignore block loads from the stack, potentially copying
972 parameters from memory. */
975 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Similarly ignore dual loads from the stack. */
981 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
982 && (inst2 & 0x0d00) == 0x0c00
983 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
984 /* Similarly ignore single loads from the stack. */
987 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
993 && (inst2 & 0x8000) == 0x0000)
995 unsigned int imm = ((bits (insn, 10, 10) << 11)
996 | (bits (inst2, 12, 14) << 8)
997 | bits (inst2, 0, 7));
999 regs[bits (inst2, 8, 11)]
1000 = pv_add_constant (regs[bits (insn, 0, 3)],
1001 thumb_expand_immediate (imm));
1004 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1005 && (inst2 & 0x8000) == 0x0000)
1007 unsigned int imm = ((bits (insn, 10, 10) << 11)
1008 | (bits (inst2, 12, 14) << 8)
1009 | bits (inst2, 0, 7));
1011 regs[bits (inst2, 8, 11)]
1012 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1015 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1016 && (inst2 & 0x8000) == 0x0000)
1018 unsigned int imm = ((bits (insn, 10, 10) << 11)
1019 | (bits (inst2, 12, 14) << 8)
1020 | bits (inst2, 0, 7));
1022 regs[bits (inst2, 8, 11)]
1023 = pv_add_constant (regs[bits (insn, 0, 3)],
1024 - (CORE_ADDR) thumb_expand_immediate (imm));
1027 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1028 && (inst2 & 0x8000) == 0x0000)
1030 unsigned int imm = ((bits (insn, 10, 10) << 11)
1031 | (bits (inst2, 12, 14) << 8)
1032 | bits (inst2, 0, 7));
1034 regs[bits (inst2, 8, 11)]
1035 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1038 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1040 unsigned int imm = ((bits (insn, 10, 10) << 11)
1041 | (bits (inst2, 12, 14) << 8)
1042 | bits (inst2, 0, 7));
1044 regs[bits (inst2, 8, 11)]
1045 = pv_constant (thumb_expand_immediate (imm));
1048 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1051 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1053 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1056 else if (insn == 0xea5f /* mov.w Rd,Rm */
1057 && (inst2 & 0xf0f0) == 0)
1059 int dst_reg = (inst2 & 0x0f00) >> 8;
1060 int src_reg = inst2 & 0xf;
1061 regs[dst_reg] = regs[src_reg];
1064 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1066 /* Constant pool loads. */
1067 unsigned int constant;
1070 offset = bits (inst2, 0, 11);
1072 loc = start + 4 + offset;
1074 loc = start + 4 - offset;
1076 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1077 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1080 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1082 /* Constant pool loads. */
1083 unsigned int constant;
1086 offset = bits (inst2, 0, 7) << 2;
1088 loc = start + 4 + offset;
1090 loc = start + 4 - offset;
1092 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1093 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1095 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1096 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1099 else if (thumb2_instruction_changes_pc (insn, inst2))
1101 /* Don't scan past anything that might change control flow. */
1106 /* The optimizer might shove anything into the prologue,
1107 so we just skip what we don't recognize. */
1108 unrecognized_pc = start;
1113 else if (thumb_instruction_changes_pc (insn))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1129 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1130 paddress (gdbarch, start));
1132 if (unrecognized_pc == 0)
1133 unrecognized_pc = start;
1137 do_cleanups (back_to);
1138 return unrecognized_pc;
1141 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1143 /* Frame pointer is fp. Frame size is constant. */
1144 cache->framereg = ARM_FP_REGNUM;
1145 cache->framesize = -regs[ARM_FP_REGNUM].k;
1147 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1149 /* Frame pointer is r7. Frame size is constant. */
1150 cache->framereg = THUMB_FP_REGNUM;
1151 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1155 /* Try the stack pointer... this is a bit desperate. */
1156 cache->framereg = ARM_SP_REGNUM;
1157 cache->framesize = -regs[ARM_SP_REGNUM].k;
1160 for (i = 0; i < 16; i++)
1161 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1162 cache->saved_regs[i].addr = offset;
1164 do_cleanups (back_to);
1165 return unrecognized_pc;
1169 /* Try to analyze the instructions starting from PC, which load symbol
1170 __stack_chk_guard. Return the address of instruction after loading this
1171 symbol, set the dest register number to *BASEREG, and set the size of
1172 instructions for loading symbol in OFFSET. Return 0 if instructions are
1176 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1177 unsigned int *destreg, int *offset)
1179 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1180 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1181 unsigned int low, high, address;
1186 unsigned short insn1
1187 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1189 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1191 *destreg = bits (insn1, 8, 10);
1193 address = bits (insn1, 0, 7);
1195 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1197 unsigned short insn2
1198 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1200 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1205 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1207 /* movt Rd, #const */
1208 if ((insn1 & 0xfbc0) == 0xf2c0)
1210 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1211 *destreg = bits (insn2, 8, 11);
1213 address = (high << 16 | low);
1220 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1222 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1224 address = bits (insn, 0, 11);
1225 *destreg = bits (insn, 12, 15);
1228 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1230 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1233 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1235 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1237 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 *destreg = bits (insn, 12, 15);
1240 address = (high << 16 | low);
1248 /* Try to skip a sequence of instructions used for stack protector. If PC
1249 points to the first instruction of this sequence, return the address of
1250 first instruction after this sequence, otherwise, return original PC.
1252 On arm, this sequence of instructions is composed of mainly three steps,
1253 Step 1: load symbol __stack_chk_guard,
1254 Step 2: load from address of __stack_chk_guard,
1255 Step 3: store it to somewhere else.
1257 Usually, instructions on step 2 and step 3 are the same on various ARM
1258 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1259 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1260 instructions in step 1 vary from different ARM architectures. On ARMv7,
1263 movw Rn, #:lower16:__stack_chk_guard
1264 movt Rn, #:upper16:__stack_chk_guard
1271 .word __stack_chk_guard
1273 Since ldr/str is a very popular instruction, we can't use them as
1274 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1275 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1276 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1279 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1281 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1282 unsigned int basereg;
1283 struct bound_minimal_symbol stack_chk_guard;
1285 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1288 /* Try to parse the instructions in Step 1. */
1289 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1294 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1295 /* If name of symbol doesn't start with '__stack_chk_guard', this
1296 instruction sequence is not for stack protector. If symbol is
1297 removed, we conservatively think this sequence is for stack protector. */
1298 if (stack_chk_guard.minsym
1299 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1300 "__stack_chk_guard",
1301 strlen ("__stack_chk_guard")) != 0)
1306 unsigned int destreg;
1308 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1310 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1311 if ((insn & 0xf800) != 0x6800)
1313 if (bits (insn, 3, 5) != basereg)
1315 destreg = bits (insn, 0, 2);
1317 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1318 byte_order_for_code);
1319 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1320 if ((insn & 0xf800) != 0x6000)
1322 if (destreg != bits (insn, 0, 2))
1327 unsigned int destreg;
1329 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1331 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1332 if ((insn & 0x0e500000) != 0x04100000)
1334 if (bits (insn, 16, 19) != basereg)
1336 destreg = bits (insn, 12, 15);
1337 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1338 insn = read_memory_unsigned_integer (pc + offset + 4,
1339 4, byte_order_for_code);
1340 if ((insn & 0x0e500000) != 0x04000000)
1342 if (bits (insn, 12, 15) != destreg)
1345 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1348 return pc + offset + 4;
1350 return pc + offset + 8;
1353 /* Advance the PC across any function entry prologue instructions to
1354 reach some "real" code.
1356 The APCS (ARM Procedure Call Standard) defines the following
1360 [stmfd sp!, {a1,a2,a3,a4}]
1361 stmfd sp!, {...,fp,ip,lr,pc}
1362 [stfe f7, [sp, #-12]!]
1363 [stfe f6, [sp, #-12]!]
1364 [stfe f5, [sp, #-12]!]
1365 [stfe f4, [sp, #-12]!]
1366 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1369 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1371 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1374 CORE_ADDR func_addr, limit_pc;
1376 /* See if we can determine the end of the prologue via the symbol table.
1377 If so, then return either PC, or the PC after the prologue, whichever
1379 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1381 CORE_ADDR post_prologue_pc
1382 = skip_prologue_using_sal (gdbarch, func_addr);
1383 struct symtab *s = find_pc_symtab (func_addr);
1385 if (post_prologue_pc)
1387 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1390 /* GCC always emits a line note before the prologue and another
1391 one after, even if the two are at the same address or on the
1392 same line. Take advantage of this so that we do not need to
1393 know every instruction that might appear in the prologue. We
1394 will have producer information for most binaries; if it is
1395 missing (e.g. for -gstabs), assuming the GNU tools. */
1396 if (post_prologue_pc
1398 || s->producer == NULL
1399 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1400 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1401 return post_prologue_pc;
1403 if (post_prologue_pc != 0)
1405 CORE_ADDR analyzed_limit;
1407 /* For non-GCC compilers, make sure the entire line is an
1408 acceptable prologue; GDB will round this function's
1409 return value up to the end of the following line so we
1410 can not skip just part of a line (and we do not want to).
1412 RealView does not treat the prologue specially, but does
1413 associate prologue code with the opening brace; so this
1414 lets us skip the first line if we think it is the opening
1416 if (arm_pc_is_thumb (gdbarch, func_addr))
1417 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1418 post_prologue_pc, NULL);
1420 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1421 post_prologue_pc, NULL);
1423 if (analyzed_limit != post_prologue_pc)
1426 return post_prologue_pc;
1430 /* Can't determine prologue from the symbol table, need to examine
1433 /* Find an upper limit on the function prologue using the debug
1434 information. If the debug information could not be used to provide
1435 that bound, then use an arbitrary large number as the upper bound. */
1436 /* Like arm_scan_prologue, stop no later than pc + 64. */
1437 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1439 limit_pc = pc + 64; /* Magic. */
1442 /* Check if this is Thumb code. */
1443 if (arm_pc_is_thumb (gdbarch, pc))
1444 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1446 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1448 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1450 /* "mov ip, sp" is no longer a required part of the prologue. */
1451 if (inst == 0xe1a0c00d) /* mov ip, sp */
1454 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1457 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1460 /* Some prologues begin with "str lr, [sp, #-4]!". */
1461 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1464 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1467 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1470 /* Any insns after this point may float into the code, if it makes
1471 for better instruction scheduling, so we skip them only if we
1472 find them, but still consider the function to be frame-ful. */
1474 /* We may have either one sfmfd instruction here, or several stfe
1475 insns, depending on the version of floating point code we
1477 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1480 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1483 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1486 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1489 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1490 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1491 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1494 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1495 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1496 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1499 /* Un-recognized instruction; stop scanning. */
1503 return skip_pc; /* End of prologue. */
1507 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1508 This function decodes a Thumb function prologue to determine:
1509 1) the size of the stack frame
1510 2) which registers are saved on it
1511 3) the offsets of saved regs
1512 4) the offset from the stack pointer to the frame pointer
1514 A typical Thumb function prologue would create this stack frame
1515 (offsets relative to FP)
1516 old SP -> 24 stack parameters
1519 R7 -> 0 local variables (16 bytes)
1520 SP -> -12 additional stack space (12 bytes)
1521 The frame size would thus be 36 bytes, and the frame offset would be
1522 12 bytes. The frame register is R7.
1524 The comments for thumb_skip_prolog() describe the algorithm we use
1525 to detect the end of the prolog. */
1529 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1530 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1532 CORE_ADDR prologue_start;
1533 CORE_ADDR prologue_end;
1535 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1538 /* See comment in arm_scan_prologue for an explanation of
1540 if (prologue_end > prologue_start + 64)
1542 prologue_end = prologue_start + 64;
1546 /* We're in the boondocks: we have no idea where the start of the
1550 prologue_end = min (prologue_end, prev_pc);
1552 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1555 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1558 arm_instruction_changes_pc (uint32_t this_instr)
1560 if (bits (this_instr, 28, 31) == INST_NV)
1561 /* Unconditional instructions. */
1562 switch (bits (this_instr, 24, 27))
1566 /* Branch with Link and change to Thumb. */
1571 /* Coprocessor register transfer. */
1572 if (bits (this_instr, 12, 15) == 15)
1573 error (_("Invalid update to pc in instruction"));
1579 switch (bits (this_instr, 25, 27))
1582 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1584 /* Multiplies and extra load/stores. */
1585 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1586 /* Neither multiplies nor extension load/stores are allowed
1590 /* Otherwise, miscellaneous instructions. */
1592 /* BX <reg>, BXJ <reg>, BLX <reg> */
1593 if (bits (this_instr, 4, 27) == 0x12fff1
1594 || bits (this_instr, 4, 27) == 0x12fff2
1595 || bits (this_instr, 4, 27) == 0x12fff3)
1598 /* Other miscellaneous instructions are unpredictable if they
1602 /* Data processing instruction. Fall through. */
1605 if (bits (this_instr, 12, 15) == 15)
1612 /* Media instructions and architecturally undefined instructions. */
1613 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1617 if (bit (this_instr, 20) == 0)
1621 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1627 /* Load/store multiple. */
1628 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1634 /* Branch and branch with link. */
1639 /* Coprocessor transfers or SWIs can not affect PC. */
1643 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1647 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1648 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1649 fill it in. Return the first address not recognized as a prologue
1652 We recognize all the instructions typically found in ARM prologues,
1653 plus harmless instructions which can be skipped (either for analysis
1654 purposes, or a more restrictive set that can be skipped when finding
1655 the end of the prologue). */
1658 arm_analyze_prologue (struct gdbarch *gdbarch,
1659 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1660 struct arm_prologue_cache *cache)
1662 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1663 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1665 CORE_ADDR offset, current_pc;
1666 pv_t regs[ARM_FPS_REGNUM];
1667 struct pv_area *stack;
1668 struct cleanup *back_to;
1669 int framereg, framesize;
1670 CORE_ADDR unrecognized_pc = 0;
1672 /* Search the prologue looking for instructions that set up the
1673 frame pointer, adjust the stack pointer, and save registers.
1675 Be careful, however, and if it doesn't look like a prologue,
1676 don't try to scan it. If, for instance, a frameless function
1677 begins with stmfd sp!, then we will tell ourselves there is
1678 a frame, which will confuse stack traceback, as well as "finish"
1679 and other operations that rely on a knowledge of the stack
1682 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1683 regs[regno] = pv_register (regno, 0);
1684 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1685 back_to = make_cleanup_free_pv_area (stack);
1687 for (current_pc = prologue_start;
1688 current_pc < prologue_end;
1692 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1694 if (insn == 0xe1a0c00d) /* mov ip, sp */
1696 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1699 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1700 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1702 unsigned imm = insn & 0xff; /* immediate value */
1703 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1704 int rd = bits (insn, 12, 15);
1705 imm = (imm >> rot) | (imm << (32 - rot));
1706 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1709 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1710 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1712 unsigned imm = insn & 0xff; /* immediate value */
1713 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1714 int rd = bits (insn, 12, 15);
1715 imm = (imm >> rot) | (imm << (32 - rot));
1716 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1719 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1722 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1724 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1725 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1726 regs[bits (insn, 12, 15)]);
1729 else if ((insn & 0xffff0000) == 0xe92d0000)
1730 /* stmfd sp!, {..., fp, ip, lr, pc}
1732 stmfd sp!, {a1, a2, a3, a4} */
1734 int mask = insn & 0xffff;
1736 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1739 /* Calculate offsets of saved registers. */
1740 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1741 if (mask & (1 << regno))
1744 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1745 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1748 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1749 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1750 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1752 /* No need to add this to saved_regs -- it's just an arg reg. */
1755 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1756 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1757 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1759 /* No need to add this to saved_regs -- it's just an arg reg. */
1762 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1764 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1766 /* No need to add this to saved_regs -- it's just arg regs. */
1769 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1771 unsigned imm = insn & 0xff; /* immediate value */
1772 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1773 imm = (imm >> rot) | (imm << (32 - rot));
1774 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1776 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1778 unsigned imm = insn & 0xff; /* immediate value */
1779 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1780 imm = (imm >> rot) | (imm << (32 - rot));
1781 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1783 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1785 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1787 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1790 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1791 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1792 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1794 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1796 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1798 int n_saved_fp_regs;
1799 unsigned int fp_start_reg, fp_bound_reg;
1801 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1804 if ((insn & 0x800) == 0x800) /* N0 is set */
1806 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1807 n_saved_fp_regs = 3;
1809 n_saved_fp_regs = 1;
1813 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1814 n_saved_fp_regs = 2;
1816 n_saved_fp_regs = 4;
1819 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1820 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1821 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1823 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1824 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1825 regs[fp_start_reg++]);
1828 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1830 /* Allow some special function calls when skipping the
1831 prologue; GCC generates these before storing arguments to
1833 CORE_ADDR dest = BranchDest (current_pc, insn);
1835 if (skip_prologue_function (gdbarch, dest, 0))
1840 else if ((insn & 0xf0000000) != 0xe0000000)
1841 break; /* Condition not true, exit early. */
1842 else if (arm_instruction_changes_pc (insn))
1843 /* Don't scan past anything that might change control flow. */
1845 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1846 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1847 /* Ignore block loads from the stack, potentially copying
1848 parameters from memory. */
1850 else if ((insn & 0xfc500000) == 0xe4100000
1851 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1852 /* Similarly ignore single loads from the stack. */
1854 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1855 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1856 register instead of the stack. */
1860 /* The optimizer might shove anything into the prologue,
1861 so we just skip what we don't recognize. */
1862 unrecognized_pc = current_pc;
1867 if (unrecognized_pc == 0)
1868 unrecognized_pc = current_pc;
1870 /* The frame size is just the distance from the frame register
1871 to the original stack pointer. */
1872 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1874 /* Frame pointer is fp. */
1875 framereg = ARM_FP_REGNUM;
1876 framesize = -regs[ARM_FP_REGNUM].k;
1880 /* Try the stack pointer... this is a bit desperate. */
1881 framereg = ARM_SP_REGNUM;
1882 framesize = -regs[ARM_SP_REGNUM].k;
1887 cache->framereg = framereg;
1888 cache->framesize = framesize;
1890 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1891 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1892 cache->saved_regs[regno].addr = offset;
1896 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1897 paddress (gdbarch, unrecognized_pc));
1899 do_cleanups (back_to);
1900 return unrecognized_pc;
1904 arm_scan_prologue (struct frame_info *this_frame,
1905 struct arm_prologue_cache *cache)
1907 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1908 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1910 CORE_ADDR prologue_start, prologue_end, current_pc;
1911 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1912 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1913 pv_t regs[ARM_FPS_REGNUM];
1914 struct pv_area *stack;
1915 struct cleanup *back_to;
1918 /* Assume there is no frame until proven otherwise. */
1919 cache->framereg = ARM_SP_REGNUM;
1920 cache->framesize = 0;
1922 /* Check for Thumb prologue. */
1923 if (arm_frame_is_thumb (this_frame))
1925 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1929 /* Find the function prologue. If we can't find the function in
1930 the symbol table, peek in the stack frame to find the PC. */
1931 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1934 /* One way to find the end of the prologue (which works well
1935 for unoptimized code) is to do the following:
1937 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1940 prologue_end = prev_pc;
1941 else if (sal.end < prologue_end)
1942 prologue_end = sal.end;
1944 This mechanism is very accurate so long as the optimizer
1945 doesn't move any instructions from the function body into the
1946 prologue. If this happens, sal.end will be the last
1947 instruction in the first hunk of prologue code just before
1948 the first instruction that the scheduler has moved from
1949 the body to the prologue.
1951 In order to make sure that we scan all of the prologue
1952 instructions, we use a slightly less accurate mechanism which
1953 may scan more than necessary. To help compensate for this
1954 lack of accuracy, the prologue scanning loop below contains
1955 several clauses which'll cause the loop to terminate early if
1956 an implausible prologue instruction is encountered.
1962 is a suitable endpoint since it accounts for the largest
1963 possible prologue plus up to five instructions inserted by
1966 if (prologue_end > prologue_start + 64)
1968 prologue_end = prologue_start + 64; /* See above. */
1973 /* We have no symbol information. Our only option is to assume this
1974 function has a standard stack frame and the normal frame register.
1975 Then, we can find the value of our frame pointer on entrance to
1976 the callee (or at the present moment if this is the innermost frame).
1977 The value stored there should be the address of the stmfd + 8. */
1978 CORE_ADDR frame_loc;
1979 LONGEST return_value;
1981 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1982 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1986 prologue_start = gdbarch_addr_bits_remove
1987 (gdbarch, return_value) - 8;
1988 prologue_end = prologue_start + 64; /* See above. */
1992 if (prev_pc < prologue_end)
1993 prologue_end = prev_pc;
1995 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1998 static struct arm_prologue_cache *
1999 arm_make_prologue_cache (struct frame_info *this_frame)
2002 struct arm_prologue_cache *cache;
2003 CORE_ADDR unwound_fp;
2005 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2006 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2008 arm_scan_prologue (this_frame, cache);
2010 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2011 if (unwound_fp == 0)
2014 cache->prev_sp = unwound_fp + cache->framesize;
2016 /* Calculate actual addresses of saved registers using offsets
2017 determined by arm_scan_prologue. */
2018 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2019 if (trad_frame_addr_p (cache->saved_regs, reg))
2020 cache->saved_regs[reg].addr += cache->prev_sp;
2025 /* Our frame ID for a normal frame is the current function's starting PC
2026 and the caller's SP when we were called. */
2029 arm_prologue_this_id (struct frame_info *this_frame,
2031 struct frame_id *this_id)
2033 struct arm_prologue_cache *cache;
2037 if (*this_cache == NULL)
2038 *this_cache = arm_make_prologue_cache (this_frame);
2039 cache = *this_cache;
2041 /* This is meant to halt the backtrace at "_start". */
2042 pc = get_frame_pc (this_frame);
2043 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2046 /* If we've hit a wall, stop. */
2047 if (cache->prev_sp == 0)
2050 /* Use function start address as part of the frame ID. If we cannot
2051 identify the start address (due to missing symbol information),
2052 fall back to just using the current PC. */
2053 func = get_frame_func (this_frame);
2057 id = frame_id_build (cache->prev_sp, func);
2061 static struct value *
2062 arm_prologue_prev_register (struct frame_info *this_frame,
2066 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2067 struct arm_prologue_cache *cache;
2069 if (*this_cache == NULL)
2070 *this_cache = arm_make_prologue_cache (this_frame);
2071 cache = *this_cache;
2073 /* If we are asked to unwind the PC, then we need to return the LR
2074 instead. The prologue may save PC, but it will point into this
2075 frame's prologue, not the next frame's resume location. Also
2076 strip the saved T bit. A valid LR may have the low bit set, but
2077 a valid PC never does. */
2078 if (prev_regnum == ARM_PC_REGNUM)
2082 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2083 return frame_unwind_got_constant (this_frame, prev_regnum,
2084 arm_addr_bits_remove (gdbarch, lr));
2087 /* SP is generally not saved to the stack, but this frame is
2088 identified by the next frame's stack pointer at the time of the call.
2089 The value was already reconstructed into PREV_SP. */
2090 if (prev_regnum == ARM_SP_REGNUM)
2091 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2093 /* The CPSR may have been changed by the call instruction and by the
2094 called function. The only bit we can reconstruct is the T bit,
2095 by checking the low bit of LR as of the call. This is a reliable
2096 indicator of Thumb-ness except for some ARM v4T pre-interworking
2097 Thumb code, which could get away with a clear low bit as long as
2098 the called function did not use bx. Guess that all other
2099 bits are unchanged; the condition flags are presumably lost,
2100 but the processor status is likely valid. */
2101 if (prev_regnum == ARM_PS_REGNUM)
2104 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2106 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2107 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2108 if (IS_THUMB_ADDR (lr))
2112 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2115 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2119 struct frame_unwind arm_prologue_unwind = {
2121 default_frame_unwind_stop_reason,
2122 arm_prologue_this_id,
2123 arm_prologue_prev_register,
2125 default_frame_sniffer
2128 /* Maintain a list of ARM exception table entries per objfile, similar to the
2129 list of mapping symbols. We only cache entries for standard ARM-defined
2130 personality routines; the cache will contain only the frame unwinding
2131 instructions associated with the entry (not the descriptors). */
2133 static const struct objfile_data *arm_exidx_data_key;
2135 struct arm_exidx_entry
2140 typedef struct arm_exidx_entry arm_exidx_entry_s;
2141 DEF_VEC_O(arm_exidx_entry_s);
2143 struct arm_exidx_data
2145 VEC(arm_exidx_entry_s) **section_maps;
2149 arm_exidx_data_free (struct objfile *objfile, void *arg)
2151 struct arm_exidx_data *data = arg;
2154 for (i = 0; i < objfile->obfd->section_count; i++)
2155 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2159 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2160 const struct arm_exidx_entry *rhs)
2162 return lhs->addr < rhs->addr;
2165 static struct obj_section *
2166 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2168 struct obj_section *osect;
2170 ALL_OBJFILE_OSECTIONS (objfile, osect)
2171 if (bfd_get_section_flags (objfile->obfd,
2172 osect->the_bfd_section) & SEC_ALLOC)
2174 bfd_vma start, size;
2175 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2176 size = bfd_get_section_size (osect->the_bfd_section);
2178 if (start <= vma && vma < start + size)
2185 /* Parse contents of exception table and exception index sections
2186 of OBJFILE, and fill in the exception table entry cache.
2188 For each entry that refers to a standard ARM-defined personality
2189 routine, extract the frame unwinding instructions (from either
2190 the index or the table section). The unwinding instructions
2192 - extracting them from the rest of the table data
2193 - converting to host endianness
2194 - appending the implicit 0xb0 ("Finish") code
2196 The extracted and normalized instructions are stored for later
2197 retrieval by the arm_find_exidx_entry routine. */
2200 arm_exidx_new_objfile (struct objfile *objfile)
2202 struct cleanup *cleanups;
2203 struct arm_exidx_data *data;
2204 asection *exidx, *extab;
2205 bfd_vma exidx_vma = 0, extab_vma = 0;
2206 bfd_size_type exidx_size = 0, extab_size = 0;
2207 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2210 /* If we've already touched this file, do nothing. */
2211 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2213 cleanups = make_cleanup (null_cleanup, NULL);
2215 /* Read contents of exception table and index. */
2216 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2219 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2220 exidx_size = bfd_get_section_size (exidx);
2221 exidx_data = xmalloc (exidx_size);
2222 make_cleanup (xfree, exidx_data);
2224 if (!bfd_get_section_contents (objfile->obfd, exidx,
2225 exidx_data, 0, exidx_size))
2227 do_cleanups (cleanups);
2232 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2235 extab_vma = bfd_section_vma (objfile->obfd, extab);
2236 extab_size = bfd_get_section_size (extab);
2237 extab_data = xmalloc (extab_size);
2238 make_cleanup (xfree, extab_data);
2240 if (!bfd_get_section_contents (objfile->obfd, extab,
2241 extab_data, 0, extab_size))
2243 do_cleanups (cleanups);
2248 /* Allocate exception table data structure. */
2249 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2250 set_objfile_data (objfile, arm_exidx_data_key, data);
2251 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2252 objfile->obfd->section_count,
2253 VEC(arm_exidx_entry_s) *);
2255 /* Fill in exception table. */
2256 for (i = 0; i < exidx_size / 8; i++)
2258 struct arm_exidx_entry new_exidx_entry;
2259 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2260 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2261 bfd_vma addr = 0, word = 0;
2262 int n_bytes = 0, n_words = 0;
2263 struct obj_section *sec;
2264 gdb_byte *entry = NULL;
2266 /* Extract address of start of function. */
2267 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2268 idx += exidx_vma + i * 8;
2270 /* Find section containing function and compute section offset. */
2271 sec = arm_obj_section_from_vma (objfile, idx);
2274 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2276 /* Determine address of exception table entry. */
2279 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2281 else if ((val & 0xff000000) == 0x80000000)
2283 /* Exception table entry embedded in .ARM.exidx
2284 -- must be short form. */
2288 else if (!(val & 0x80000000))
2290 /* Exception table entry in .ARM.extab. */
2291 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2292 addr += exidx_vma + i * 8 + 4;
2294 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2296 word = bfd_h_get_32 (objfile->obfd,
2297 extab_data + addr - extab_vma);
2300 if ((word & 0xff000000) == 0x80000000)
2305 else if ((word & 0xff000000) == 0x81000000
2306 || (word & 0xff000000) == 0x82000000)
2310 n_words = ((word >> 16) & 0xff);
2312 else if (!(word & 0x80000000))
2315 struct obj_section *pers_sec;
2316 int gnu_personality = 0;
2318 /* Custom personality routine. */
2319 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2320 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2322 /* Check whether we've got one of the variants of the
2323 GNU personality routines. */
2324 pers_sec = arm_obj_section_from_vma (objfile, pers);
2327 static const char *personality[] =
2329 "__gcc_personality_v0",
2330 "__gxx_personality_v0",
2331 "__gcj_personality_v0",
2332 "__gnu_objc_personality_v0",
2336 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2339 for (k = 0; personality[k]; k++)
2340 if (lookup_minimal_symbol_by_pc_name
2341 (pc, personality[k], objfile))
2343 gnu_personality = 1;
2348 /* If so, the next word contains a word count in the high
2349 byte, followed by the same unwind instructions as the
2350 pre-defined forms. */
2352 && addr + 4 <= extab_vma + extab_size)
2354 word = bfd_h_get_32 (objfile->obfd,
2355 extab_data + addr - extab_vma);
2358 n_words = ((word >> 24) & 0xff);
2364 /* Sanity check address. */
2366 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2367 n_words = n_bytes = 0;
2369 /* The unwind instructions reside in WORD (only the N_BYTES least
2370 significant bytes are valid), followed by N_WORDS words in the
2371 extab section starting at ADDR. */
2372 if (n_bytes || n_words)
2374 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2375 n_bytes + n_words * 4 + 1);
2378 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2382 word = bfd_h_get_32 (objfile->obfd,
2383 extab_data + addr - extab_vma);
2386 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2387 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2388 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2389 *p++ = (gdb_byte) (word & 0xff);
2392 /* Implied "Finish" to terminate the list. */
2396 /* Push entry onto vector. They are guaranteed to always
2397 appear in order of increasing addresses. */
2398 new_exidx_entry.addr = idx;
2399 new_exidx_entry.entry = entry;
2400 VEC_safe_push (arm_exidx_entry_s,
2401 data->section_maps[sec->the_bfd_section->index],
2405 do_cleanups (cleanups);
2408 /* Search for the exception table entry covering MEMADDR. If one is found,
2409 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2410 set *START to the start of the region covered by this entry. */
2413 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2415 struct obj_section *sec;
2417 sec = find_pc_section (memaddr);
2420 struct arm_exidx_data *data;
2421 VEC(arm_exidx_entry_s) *map;
2422 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2425 data = objfile_data (sec->objfile, arm_exidx_data_key);
2428 map = data->section_maps[sec->the_bfd_section->index];
2429 if (!VEC_empty (arm_exidx_entry_s, map))
2431 struct arm_exidx_entry *map_sym;
2433 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2434 arm_compare_exidx_entries);
2436 /* VEC_lower_bound finds the earliest ordered insertion
2437 point. If the following symbol starts at this exact
2438 address, we use that; otherwise, the preceding
2439 exception table entry covers this address. */
2440 if (idx < VEC_length (arm_exidx_entry_s, map))
2442 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2443 if (map_sym->addr == map_key.addr)
2446 *start = map_sym->addr + obj_section_addr (sec);
2447 return map_sym->entry;
2453 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2455 *start = map_sym->addr + obj_section_addr (sec);
2456 return map_sym->entry;
2465 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2466 instruction list from the ARM exception table entry ENTRY, allocate and
2467 return a prologue cache structure describing how to unwind this frame.
2469 Return NULL if the unwinding instruction list contains a "spare",
2470 "reserved" or "refuse to unwind" instruction as defined in section
2471 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2472 for the ARM Architecture" document. */
2474 static struct arm_prologue_cache *
2475 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2480 struct arm_prologue_cache *cache;
2481 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2482 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2488 /* Whenever we reload SP, we actually have to retrieve its
2489 actual value in the current frame. */
2492 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2494 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2495 vsp = get_frame_register_unsigned (this_frame, reg);
2499 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2500 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2506 /* Decode next unwind instruction. */
2509 if ((insn & 0xc0) == 0)
2511 int offset = insn & 0x3f;
2512 vsp += (offset << 2) + 4;
2514 else if ((insn & 0xc0) == 0x40)
2516 int offset = insn & 0x3f;
2517 vsp -= (offset << 2) + 4;
2519 else if ((insn & 0xf0) == 0x80)
2521 int mask = ((insn & 0xf) << 8) | *entry++;
2524 /* The special case of an all-zero mask identifies
2525 "Refuse to unwind". We return NULL to fall back
2526 to the prologue analyzer. */
2530 /* Pop registers r4..r15 under mask. */
2531 for (i = 0; i < 12; i++)
2532 if (mask & (1 << i))
2534 cache->saved_regs[4 + i].addr = vsp;
2538 /* Special-case popping SP -- we need to reload vsp. */
2539 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2542 else if ((insn & 0xf0) == 0x90)
2544 int reg = insn & 0xf;
2546 /* Reserved cases. */
2547 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2550 /* Set SP from another register and mark VSP for reload. */
2551 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2554 else if ((insn & 0xf0) == 0xa0)
2556 int count = insn & 0x7;
2557 int pop_lr = (insn & 0x8) != 0;
2560 /* Pop r4..r[4+count]. */
2561 for (i = 0; i <= count; i++)
2563 cache->saved_regs[4 + i].addr = vsp;
2567 /* If indicated by flag, pop LR as well. */
2570 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2574 else if (insn == 0xb0)
2576 /* We could only have updated PC by popping into it; if so, it
2577 will show up as address. Otherwise, copy LR into PC. */
2578 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2579 cache->saved_regs[ARM_PC_REGNUM]
2580 = cache->saved_regs[ARM_LR_REGNUM];
2585 else if (insn == 0xb1)
2587 int mask = *entry++;
2590 /* All-zero mask and mask >= 16 is "spare". */
2591 if (mask == 0 || mask >= 16)
2594 /* Pop r0..r3 under mask. */
2595 for (i = 0; i < 4; i++)
2596 if (mask & (1 << i))
2598 cache->saved_regs[i].addr = vsp;
2602 else if (insn == 0xb2)
2604 ULONGEST offset = 0;
2609 offset |= (*entry & 0x7f) << shift;
2612 while (*entry++ & 0x80);
2614 vsp += 0x204 + (offset << 2);
2616 else if (insn == 0xb3)
2618 int start = *entry >> 4;
2619 int count = (*entry++) & 0xf;
2622 /* Only registers D0..D15 are valid here. */
2623 if (start + count >= 16)
2626 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2627 for (i = 0; i <= count; i++)
2629 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2633 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2636 else if ((insn & 0xf8) == 0xb8)
2638 int count = insn & 0x7;
2641 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2642 for (i = 0; i <= count; i++)
2644 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2648 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2651 else if (insn == 0xc6)
2653 int start = *entry >> 4;
2654 int count = (*entry++) & 0xf;
2657 /* Only registers WR0..WR15 are valid. */
2658 if (start + count >= 16)
2661 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2662 for (i = 0; i <= count; i++)
2664 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2668 else if (insn == 0xc7)
2670 int mask = *entry++;
2673 /* All-zero mask and mask >= 16 is "spare". */
2674 if (mask == 0 || mask >= 16)
2677 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2678 for (i = 0; i < 4; i++)
2679 if (mask & (1 << i))
2681 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2685 else if ((insn & 0xf8) == 0xc0)
2687 int count = insn & 0x7;
2690 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2691 for (i = 0; i <= count; i++)
2693 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2697 else if (insn == 0xc8)
2699 int start = *entry >> 4;
2700 int count = (*entry++) & 0xf;
2703 /* Only registers D0..D31 are valid. */
2704 if (start + count >= 16)
2707 /* Pop VFP double-precision registers
2708 D[16+start]..D[16+start+count]. */
2709 for (i = 0; i <= count; i++)
2711 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2715 else if (insn == 0xc9)
2717 int start = *entry >> 4;
2718 int count = (*entry++) & 0xf;
2721 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2722 for (i = 0; i <= count; i++)
2724 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2728 else if ((insn & 0xf8) == 0xd0)
2730 int count = insn & 0x7;
2733 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2734 for (i = 0; i <= count; i++)
2736 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2742 /* Everything else is "spare". */
2747 /* If we restore SP from a register, assume this was the frame register.
2748 Otherwise just fall back to SP as frame register. */
2749 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2750 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2752 cache->framereg = ARM_SP_REGNUM;
2754 /* Determine offset to previous frame. */
2756 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2758 /* We already got the previous SP. */
2759 cache->prev_sp = vsp;
2764 /* Unwinding via ARM exception table entries. Note that the sniffer
2765 already computes a filled-in prologue cache, which is then used
2766 with the same arm_prologue_this_id and arm_prologue_prev_register
2767 routines also used for prologue-parsing based unwinding. */
2770 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2771 struct frame_info *this_frame,
2772 void **this_prologue_cache)
2774 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2775 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2776 CORE_ADDR addr_in_block, exidx_region, func_start;
2777 struct arm_prologue_cache *cache;
2780 /* See if we have an ARM exception table entry covering this address. */
2781 addr_in_block = get_frame_address_in_block (this_frame);
2782 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2786 /* The ARM exception table does not describe unwind information
2787 for arbitrary PC values, but is guaranteed to be correct only
2788 at call sites. We have to decide here whether we want to use
2789 ARM exception table information for this frame, or fall back
2790 to using prologue parsing. (Note that if we have DWARF CFI,
2791 this sniffer isn't even called -- CFI is always preferred.)
2793 Before we make this decision, however, we check whether we
2794 actually have *symbol* information for the current frame.
2795 If not, prologue parsing would not work anyway, so we might
2796 as well use the exception table and hope for the best. */
2797 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2801 /* If the next frame is "normal", we are at a call site in this
2802 frame, so exception information is guaranteed to be valid. */
2803 if (get_next_frame (this_frame)
2804 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2807 /* We also assume exception information is valid if we're currently
2808 blocked in a system call. The system library is supposed to
2809 ensure this, so that e.g. pthread cancellation works. */
2810 if (arm_frame_is_thumb (this_frame))
2814 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2815 byte_order_for_code, &insn)
2816 && (insn & 0xff00) == 0xdf00 /* svc */)
2823 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2824 byte_order_for_code, &insn)
2825 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2829 /* Bail out if we don't know that exception information is valid. */
2833 /* The ARM exception index does not mark the *end* of the region
2834 covered by the entry, and some functions will not have any entry.
2835 To correctly recognize the end of the covered region, the linker
2836 should have inserted dummy records with a CANTUNWIND marker.
2838 Unfortunately, current versions of GNU ld do not reliably do
2839 this, and thus we may have found an incorrect entry above.
2840 As a (temporary) sanity check, we only use the entry if it
2841 lies *within* the bounds of the function. Note that this check
2842 might reject perfectly valid entries that just happen to cover
2843 multiple functions; therefore this check ought to be removed
2844 once the linker is fixed. */
2845 if (func_start > exidx_region)
2849 /* Decode the list of unwinding instructions into a prologue cache.
2850 Note that this may fail due to e.g. a "refuse to unwind" code. */
2851 cache = arm_exidx_fill_cache (this_frame, entry);
2855 *this_prologue_cache = cache;
2859 struct frame_unwind arm_exidx_unwind = {
2861 default_frame_unwind_stop_reason,
2862 arm_prologue_this_id,
2863 arm_prologue_prev_register,
2865 arm_exidx_unwind_sniffer
2868 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2869 trampoline, return the target PC. Otherwise return 0.
2871 void call0a (char c, short s, int i, long l) {}
2875 (*pointer_to_call0a) (c, s, i, l);
2878 Instead of calling a stub library function _call_via_xx (xx is
2879 the register name), GCC may inline the trampoline in the object
2880 file as below (register r2 has the address of call0a).
2883 .type main, %function
2892 The trampoline 'bx r2' doesn't belong to main. */
2895 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2897 /* The heuristics of recognizing such trampoline is that FRAME is
2898 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2899 if (arm_frame_is_thumb (frame))
2903 if (target_read_memory (pc, buf, 2) == 0)
2905 struct gdbarch *gdbarch = get_frame_arch (frame);
2906 enum bfd_endian byte_order_for_code
2907 = gdbarch_byte_order_for_code (gdbarch);
2909 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2911 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2914 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2916 /* Clear the LSB so that gdb core sets step-resume
2917 breakpoint at the right address. */
2918 return UNMAKE_THUMB_ADDR (dest);
2926 static struct arm_prologue_cache *
2927 arm_make_stub_cache (struct frame_info *this_frame)
2929 struct arm_prologue_cache *cache;
2931 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2932 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2934 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2939 /* Our frame ID for a stub frame is the current SP and LR. */
2942 arm_stub_this_id (struct frame_info *this_frame,
2944 struct frame_id *this_id)
2946 struct arm_prologue_cache *cache;
2948 if (*this_cache == NULL)
2949 *this_cache = arm_make_stub_cache (this_frame);
2950 cache = *this_cache;
2952 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2956 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2957 struct frame_info *this_frame,
2958 void **this_prologue_cache)
2960 CORE_ADDR addr_in_block;
2962 CORE_ADDR pc, start_addr;
2965 addr_in_block = get_frame_address_in_block (this_frame);
2966 pc = get_frame_pc (this_frame);
2967 if (in_plt_section (addr_in_block)
2968 /* We also use the stub winder if the target memory is unreadable
2969 to avoid having the prologue unwinder trying to read it. */
2970 || target_read_memory (pc, dummy, 4) != 0)
2973 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2974 && arm_skip_bx_reg (this_frame, pc) != 0)
2980 struct frame_unwind arm_stub_unwind = {
2982 default_frame_unwind_stop_reason,
2984 arm_prologue_prev_register,
2986 arm_stub_unwind_sniffer
2989 /* Put here the code to store, into CACHE->saved_regs, the addresses
2990 of the saved registers of frame described by THIS_FRAME. CACHE is
2993 static struct arm_prologue_cache *
2994 arm_m_exception_cache (struct frame_info *this_frame)
2996 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2997 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2998 struct arm_prologue_cache *cache;
2999 CORE_ADDR unwound_sp;
3002 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3003 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3005 unwound_sp = get_frame_register_unsigned (this_frame,
3008 /* The hardware saves eight 32-bit words, comprising xPSR,
3009 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3010 "B1.5.6 Exception entry behavior" in
3011 "ARMv7-M Architecture Reference Manual". */
3012 cache->saved_regs[0].addr = unwound_sp;
3013 cache->saved_regs[1].addr = unwound_sp + 4;
3014 cache->saved_regs[2].addr = unwound_sp + 8;
3015 cache->saved_regs[3].addr = unwound_sp + 12;
3016 cache->saved_regs[12].addr = unwound_sp + 16;
3017 cache->saved_regs[14].addr = unwound_sp + 20;
3018 cache->saved_regs[15].addr = unwound_sp + 24;
3019 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3021 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3022 aligner between the top of the 32-byte stack frame and the
3023 previous context's stack pointer. */
3024 cache->prev_sp = unwound_sp + 32;
3025 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3026 && (xpsr & (1 << 9)) != 0)
3027 cache->prev_sp += 4;
3032 /* Implementation of function hook 'this_id' in
3033 'struct frame_uwnind'. */
3036 arm_m_exception_this_id (struct frame_info *this_frame,
3038 struct frame_id *this_id)
3040 struct arm_prologue_cache *cache;
3042 if (*this_cache == NULL)
3043 *this_cache = arm_m_exception_cache (this_frame);
3044 cache = *this_cache;
3046 /* Our frame ID for a stub frame is the current SP and LR. */
3047 *this_id = frame_id_build (cache->prev_sp,
3048 get_frame_pc (this_frame));
3051 /* Implementation of function hook 'prev_register' in
3052 'struct frame_uwnind'. */
3054 static struct value *
3055 arm_m_exception_prev_register (struct frame_info *this_frame,
3059 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3060 struct arm_prologue_cache *cache;
3062 if (*this_cache == NULL)
3063 *this_cache = arm_m_exception_cache (this_frame);
3064 cache = *this_cache;
3066 /* The value was already reconstructed into PREV_SP. */
3067 if (prev_regnum == ARM_SP_REGNUM)
3068 return frame_unwind_got_constant (this_frame, prev_regnum,
3071 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3075 /* Implementation of function hook 'sniffer' in
3076 'struct frame_uwnind'. */
3079 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3080 struct frame_info *this_frame,
3081 void **this_prologue_cache)
3083 CORE_ADDR this_pc = get_frame_pc (this_frame);
3085 /* No need to check is_m; this sniffer is only registered for
3086 M-profile architectures. */
3088 /* Exception frames return to one of these magic PCs. Other values
3089 are not defined as of v7-M. See details in "B1.5.8 Exception
3090 return behavior" in "ARMv7-M Architecture Reference Manual". */
3091 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3092 || this_pc == 0xfffffffd)
3098 /* Frame unwinder for M-profile exceptions. */
3100 struct frame_unwind arm_m_exception_unwind =
3103 default_frame_unwind_stop_reason,
3104 arm_m_exception_this_id,
3105 arm_m_exception_prev_register,
3107 arm_m_exception_unwind_sniffer
3111 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3113 struct arm_prologue_cache *cache;
3115 if (*this_cache == NULL)
3116 *this_cache = arm_make_prologue_cache (this_frame);
3117 cache = *this_cache;
3119 return cache->prev_sp - cache->framesize;
3122 struct frame_base arm_normal_base = {
3123 &arm_prologue_unwind,
3124 arm_normal_frame_base,
3125 arm_normal_frame_base,
3126 arm_normal_frame_base
3129 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3130 dummy frame. The frame ID's base needs to match the TOS value
3131 saved by save_dummy_frame_tos() and returned from
3132 arm_push_dummy_call, and the PC needs to match the dummy frame's
3135 static struct frame_id
3136 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3138 return frame_id_build (get_frame_register_unsigned (this_frame,
3140 get_frame_pc (this_frame));
3143 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3144 be used to construct the previous frame's ID, after looking up the
3145 containing function). */
3148 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3151 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3152 return arm_addr_bits_remove (gdbarch, pc);
3156 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3158 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3161 static struct value *
3162 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3165 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3167 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3172 /* The PC is normally copied from the return column, which
3173 describes saves of LR. However, that version may have an
3174 extra bit set to indicate Thumb state. The bit is not
3176 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3177 return frame_unwind_got_constant (this_frame, regnum,
3178 arm_addr_bits_remove (gdbarch, lr));
3181 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3182 cpsr = get_frame_register_unsigned (this_frame, regnum);
3183 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3184 if (IS_THUMB_ADDR (lr))
3188 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3191 internal_error (__FILE__, __LINE__,
3192 _("Unexpected register %d"), regnum);
3197 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3198 struct dwarf2_frame_state_reg *reg,
3199 struct frame_info *this_frame)
3205 reg->how = DWARF2_FRAME_REG_FN;
3206 reg->loc.fn = arm_dwarf2_prev_register;
3209 reg->how = DWARF2_FRAME_REG_CFA;
3214 /* Return true if we are in the function's epilogue, i.e. after the
3215 instruction that destroyed the function's stack frame. */
3218 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3220 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3221 unsigned int insn, insn2;
3222 int found_return = 0, found_stack_adjust = 0;
3223 CORE_ADDR func_start, func_end;
3227 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3230 /* The epilogue is a sequence of instructions along the following lines:
3232 - add stack frame size to SP or FP
3233 - [if frame pointer used] restore SP from FP
3234 - restore registers from SP [may include PC]
3235 - a return-type instruction [if PC wasn't already restored]
3237 In a first pass, we scan forward from the current PC and verify the
3238 instructions we find as compatible with this sequence, ending in a
3241 However, this is not sufficient to distinguish indirect function calls
3242 within a function from indirect tail calls in the epilogue in some cases.
3243 Therefore, if we didn't already find any SP-changing instruction during
3244 forward scan, we add a backward scanning heuristic to ensure we actually
3245 are in the epilogue. */
3248 while (scan_pc < func_end && !found_return)
3250 if (target_read_memory (scan_pc, buf, 2))
3254 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3256 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3258 else if (insn == 0x46f7) /* mov pc, lr */
3260 else if (insn == 0x46bd) /* mov sp, r7 */
3261 found_stack_adjust = 1;
3262 else if ((insn & 0xff80) == 0xb000) /* add sp, imm */
3263 found_stack_adjust = 1;
3264 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3266 found_stack_adjust = 1;
3267 if (insn & 0x0100) /* <registers> include PC. */
3270 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3272 if (target_read_memory (scan_pc, buf, 2))
3276 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3278 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3280 found_stack_adjust = 1;
3281 if (insn2 & 0x8000) /* <registers> include PC. */
3284 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3285 && (insn2 & 0x0fff) == 0x0b04)
3287 found_stack_adjust = 1;
3288 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3291 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3292 && (insn2 & 0x0e00) == 0x0a00)
3293 found_stack_adjust = 1;
3304 /* Since any instruction in the epilogue sequence, with the possible
3305 exception of return itself, updates the stack pointer, we need to
3306 scan backwards for at most one instruction. Try either a 16-bit or
3307 a 32-bit instruction. This is just a heuristic, so we do not worry
3308 too much about false positives. */
3310 if (!found_stack_adjust)
3312 if (pc - 4 < func_start)
3314 if (target_read_memory (pc - 4, buf, 4))
3317 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3318 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3320 if (insn2 == 0x46bd) /* mov sp, r7 */
3321 found_stack_adjust = 1;
3322 else if ((insn2 & 0xff80) == 0xb000) /* add sp, imm */
3323 found_stack_adjust = 1;
3324 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3325 found_stack_adjust = 1;
3326 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3327 found_stack_adjust = 1;
3328 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3329 && (insn2 & 0x0fff) == 0x0b04)
3330 found_stack_adjust = 1;
3331 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3332 && (insn2 & 0x0e00) == 0x0a00)
3333 found_stack_adjust = 1;
3336 return found_stack_adjust;
3339 /* Return true if we are in the function's epilogue, i.e. after the
3340 instruction that destroyed the function's stack frame. */
3343 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3345 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3347 int found_return, found_stack_adjust;
3348 CORE_ADDR func_start, func_end;
3350 if (arm_pc_is_thumb (gdbarch, pc))
3351 return thumb_in_function_epilogue_p (gdbarch, pc);
3353 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3356 /* We are in the epilogue if the previous instruction was a stack
3357 adjustment and the next instruction is a possible return (bx, mov
3358 pc, or pop). We could have to scan backwards to find the stack
3359 adjustment, or forwards to find the return, but this is a decent
3360 approximation. First scan forwards. */
3363 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3364 if (bits (insn, 28, 31) != INST_NV)
3366 if ((insn & 0x0ffffff0) == 0x012fff10)
3369 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3372 else if ((insn & 0x0fff0000) == 0x08bd0000
3373 && (insn & 0x0000c000) != 0)
3374 /* POP (LDMIA), including PC or LR. */
3381 /* Scan backwards. This is just a heuristic, so do not worry about
3382 false positives from mode changes. */
3384 if (pc < func_start + 4)
3387 found_stack_adjust = 0;
3388 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3389 if (bits (insn, 28, 31) != INST_NV)
3391 if ((insn & 0x0df0f000) == 0x0080d000)
3392 /* ADD SP (register or immediate). */
3393 found_stack_adjust = 1;
3394 else if ((insn & 0x0df0f000) == 0x0040d000)
3395 /* SUB SP (register or immediate). */
3396 found_stack_adjust = 1;
3397 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3399 found_stack_adjust = 1;
3400 else if ((insn & 0x0fff0000) == 0x08bd0000)
3402 found_stack_adjust = 1;
3403 else if ((insn & 0x0fff0000) == 0x049d0000)
3404 /* POP of a single register. */
3405 found_stack_adjust = 1;
3408 if (found_stack_adjust)
3415 /* When arguments must be pushed onto the stack, they go on in reverse
3416 order. The code below implements a FILO (stack) to do this. */
3421 struct stack_item *prev;
3425 static struct stack_item *
3426 push_stack_item (struct stack_item *prev, const void *contents, int len)
3428 struct stack_item *si;
3429 si = xmalloc (sizeof (struct stack_item));
3430 si->data = xmalloc (len);
3433 memcpy (si->data, contents, len);
3437 static struct stack_item *
3438 pop_stack_item (struct stack_item *si)
3440 struct stack_item *dead = si;
3448 /* Return the alignment (in bytes) of the given type. */
3451 arm_type_align (struct type *t)
3457 t = check_typedef (t);
3458 switch (TYPE_CODE (t))
3461 /* Should never happen. */
3462 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3466 case TYPE_CODE_ENUM:
3470 case TYPE_CODE_RANGE:
3472 case TYPE_CODE_CHAR:
3473 case TYPE_CODE_BOOL:
3474 return TYPE_LENGTH (t);
3476 case TYPE_CODE_ARRAY:
3477 case TYPE_CODE_COMPLEX:
3478 /* TODO: What about vector types? */
3479 return arm_type_align (TYPE_TARGET_TYPE (t));
3481 case TYPE_CODE_STRUCT:
3482 case TYPE_CODE_UNION:
3484 for (n = 0; n < TYPE_NFIELDS (t); n++)
3486 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3494 /* Possible base types for a candidate for passing and returning in
3497 enum arm_vfp_cprc_base_type
3506 /* The length of one element of base type B. */
3509 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3513 case VFP_CPRC_SINGLE:
3515 case VFP_CPRC_DOUBLE:
3517 case VFP_CPRC_VEC64:
3519 case VFP_CPRC_VEC128:
3522 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3527 /* The character ('s', 'd' or 'q') for the type of VFP register used
3528 for passing base type B. */
3531 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3535 case VFP_CPRC_SINGLE:
3537 case VFP_CPRC_DOUBLE:
3539 case VFP_CPRC_VEC64:
3541 case VFP_CPRC_VEC128:
3544 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3549 /* Determine whether T may be part of a candidate for passing and
3550 returning in VFP registers, ignoring the limit on the total number
3551 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3552 classification of the first valid component found; if it is not
3553 VFP_CPRC_UNKNOWN, all components must have the same classification
3554 as *BASE_TYPE. If it is found that T contains a type not permitted
3555 for passing and returning in VFP registers, a type differently
3556 classified from *BASE_TYPE, or two types differently classified
3557 from each other, return -1, otherwise return the total number of
3558 base-type elements found (possibly 0 in an empty structure or
3559 array). Vectors and complex types are not currently supported,
3560 matching the generic AAPCS support. */
3563 arm_vfp_cprc_sub_candidate (struct type *t,
3564 enum arm_vfp_cprc_base_type *base_type)
3566 t = check_typedef (t);
3567 switch (TYPE_CODE (t))
3570 switch (TYPE_LENGTH (t))
3573 if (*base_type == VFP_CPRC_UNKNOWN)
3574 *base_type = VFP_CPRC_SINGLE;
3575 else if (*base_type != VFP_CPRC_SINGLE)
3580 if (*base_type == VFP_CPRC_UNKNOWN)
3581 *base_type = VFP_CPRC_DOUBLE;
3582 else if (*base_type != VFP_CPRC_DOUBLE)
3591 case TYPE_CODE_ARRAY:
3595 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3598 if (TYPE_LENGTH (t) == 0)
3600 gdb_assert (count == 0);
3603 else if (count == 0)
3605 unitlen = arm_vfp_cprc_unit_length (*base_type);
3606 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3607 return TYPE_LENGTH (t) / unitlen;
3611 case TYPE_CODE_STRUCT:
3616 for (i = 0; i < TYPE_NFIELDS (t); i++)
3618 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3620 if (sub_count == -1)
3624 if (TYPE_LENGTH (t) == 0)
3626 gdb_assert (count == 0);
3629 else if (count == 0)
3631 unitlen = arm_vfp_cprc_unit_length (*base_type);
3632 if (TYPE_LENGTH (t) != unitlen * count)
3637 case TYPE_CODE_UNION:
3642 for (i = 0; i < TYPE_NFIELDS (t); i++)
3644 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3646 if (sub_count == -1)
3648 count = (count > sub_count ? count : sub_count);
3650 if (TYPE_LENGTH (t) == 0)
3652 gdb_assert (count == 0);
3655 else if (count == 0)
3657 unitlen = arm_vfp_cprc_unit_length (*base_type);
3658 if (TYPE_LENGTH (t) != unitlen * count)
3670 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3671 if passed to or returned from a non-variadic function with the VFP
3672 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3673 *BASE_TYPE to the base type for T and *COUNT to the number of
3674 elements of that base type before returning. */
3677 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3680 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3681 int c = arm_vfp_cprc_sub_candidate (t, &b);
3682 if (c <= 0 || c > 4)
3689 /* Return 1 if the VFP ABI should be used for passing arguments to and
3690 returning values from a function of type FUNC_TYPE, 0
3694 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3696 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3697 /* Variadic functions always use the base ABI. Assume that functions
3698 without debug info are not variadic. */
3699 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3701 /* The VFP ABI is only supported as a variant of AAPCS. */
3702 if (tdep->arm_abi != ARM_ABI_AAPCS)
3704 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3707 /* We currently only support passing parameters in integer registers, which
3708 conforms with GCC's default model, and VFP argument passing following
3709 the VFP variant of AAPCS. Several other variants exist and
3710 we should probably support some of them based on the selected ABI. */
3713 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3714 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3715 struct value **args, CORE_ADDR sp, int struct_return,
3716 CORE_ADDR struct_addr)
3718 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3722 struct stack_item *si = NULL;
3725 unsigned vfp_regs_free = (1 << 16) - 1;
3727 /* Determine the type of this function and whether the VFP ABI
3729 ftype = check_typedef (value_type (function));
3730 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3731 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3732 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3734 /* Set the return address. For the ARM, the return breakpoint is
3735 always at BP_ADDR. */
3736 if (arm_pc_is_thumb (gdbarch, bp_addr))
3738 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3740 /* Walk through the list of args and determine how large a temporary
3741 stack is required. Need to take care here as structs may be
3742 passed on the stack, and we have to push them. */
3745 argreg = ARM_A1_REGNUM;
3748 /* The struct_return pointer occupies the first parameter
3749 passing register. */
3753 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3754 gdbarch_register_name (gdbarch, argreg),
3755 paddress (gdbarch, struct_addr));
3756 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3760 for (argnum = 0; argnum < nargs; argnum++)
3763 struct type *arg_type;
3764 struct type *target_type;
3765 enum type_code typecode;
3766 const bfd_byte *val;
3768 enum arm_vfp_cprc_base_type vfp_base_type;
3770 int may_use_core_reg = 1;
3772 arg_type = check_typedef (value_type (args[argnum]));
3773 len = TYPE_LENGTH (arg_type);
3774 target_type = TYPE_TARGET_TYPE (arg_type);
3775 typecode = TYPE_CODE (arg_type);
3776 val = value_contents (args[argnum]);
3778 align = arm_type_align (arg_type);
3779 /* Round alignment up to a whole number of words. */
3780 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3781 /* Different ABIs have different maximum alignments. */
3782 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3784 /* The APCS ABI only requires word alignment. */
3785 align = INT_REGISTER_SIZE;
3789 /* The AAPCS requires at most doubleword alignment. */
3790 if (align > INT_REGISTER_SIZE * 2)
3791 align = INT_REGISTER_SIZE * 2;
3795 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3803 /* Because this is a CPRC it cannot go in a core register or
3804 cause a core register to be skipped for alignment.
3805 Either it goes in VFP registers and the rest of this loop
3806 iteration is skipped for this argument, or it goes on the
3807 stack (and the stack alignment code is correct for this
3809 may_use_core_reg = 0;
3811 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3812 shift = unit_length / 4;
3813 mask = (1 << (shift * vfp_base_count)) - 1;
3814 for (regno = 0; regno < 16; regno += shift)
3815 if (((vfp_regs_free >> regno) & mask) == mask)
3824 vfp_regs_free &= ~(mask << regno);
3825 reg_scaled = regno / shift;
3826 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3827 for (i = 0; i < vfp_base_count; i++)
3831 if (reg_char == 'q')
3832 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3833 val + i * unit_length);
3836 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3837 reg_char, reg_scaled + i);
3838 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3840 regcache_cooked_write (regcache, regnum,
3841 val + i * unit_length);
3848 /* This CPRC could not go in VFP registers, so all VFP
3849 registers are now marked as used. */
3854 /* Push stack padding for dowubleword alignment. */
3855 if (nstack & (align - 1))
3857 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3858 nstack += INT_REGISTER_SIZE;
3861 /* Doubleword aligned quantities must go in even register pairs. */
3862 if (may_use_core_reg
3863 && argreg <= ARM_LAST_ARG_REGNUM
3864 && align > INT_REGISTER_SIZE
3868 /* If the argument is a pointer to a function, and it is a
3869 Thumb function, create a LOCAL copy of the value and set
3870 the THUMB bit in it. */
3871 if (TYPE_CODE_PTR == typecode
3872 && target_type != NULL
3873 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3875 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3876 if (arm_pc_is_thumb (gdbarch, regval))
3878 bfd_byte *copy = alloca (len);
3879 store_unsigned_integer (copy, len, byte_order,
3880 MAKE_THUMB_ADDR (regval));
3885 /* Copy the argument to general registers or the stack in
3886 register-sized pieces. Large arguments are split between
3887 registers and stack. */
3890 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3892 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3894 /* The argument is being passed in a general purpose
3897 = extract_unsigned_integer (val, partial_len, byte_order);
3898 if (byte_order == BFD_ENDIAN_BIG)
3899 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3901 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3903 gdbarch_register_name
3905 phex (regval, INT_REGISTER_SIZE));
3906 regcache_cooked_write_unsigned (regcache, argreg, regval);
3911 /* Push the arguments onto the stack. */
3913 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3915 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3916 nstack += INT_REGISTER_SIZE;
3923 /* If we have an odd number of words to push, then decrement the stack
3924 by one word now, so first stack argument will be dword aligned. */
3931 write_memory (sp, si->data, si->len);
3932 si = pop_stack_item (si);
3935 /* Finally, update teh SP register. */
3936 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3942 /* Always align the frame to an 8-byte boundary. This is required on
3943 some platforms and harmless on the rest. */
3946 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3948 /* Align the stack to eight bytes. */
3949 return sp & ~ (CORE_ADDR) 7;
3953 print_fpu_flags (struct ui_file *file, int flags)
3955 if (flags & (1 << 0))
3956 fputs_filtered ("IVO ", file);
3957 if (flags & (1 << 1))
3958 fputs_filtered ("DVZ ", file);
3959 if (flags & (1 << 2))
3960 fputs_filtered ("OFL ", file);
3961 if (flags & (1 << 3))
3962 fputs_filtered ("UFL ", file);
3963 if (flags & (1 << 4))
3964 fputs_filtered ("INX ", file);
3965 fputc_filtered ('\n', file);
3968 /* Print interesting information about the floating point processor
3969 (if present) or emulator. */
3971 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3972 struct frame_info *frame, const char *args)
3974 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3977 type = (status >> 24) & 127;
3978 if (status & (1 << 31))
3979 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3981 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3982 /* i18n: [floating point unit] mask */
3983 fputs_filtered (_("mask: "), file);
3984 print_fpu_flags (file, status >> 16);
3985 /* i18n: [floating point unit] flags */
3986 fputs_filtered (_("flags: "), file);
3987 print_fpu_flags (file, status);
3990 /* Construct the ARM extended floating point type. */
3991 static struct type *
3992 arm_ext_type (struct gdbarch *gdbarch)
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3996 if (!tdep->arm_ext_type)
3998 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3999 floatformats_arm_ext);
4001 return tdep->arm_ext_type;
4004 static struct type *
4005 arm_neon_double_type (struct gdbarch *gdbarch)
4007 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4009 if (tdep->neon_double_type == NULL)
4011 struct type *t, *elem;
4013 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4015 elem = builtin_type (gdbarch)->builtin_uint8;
4016 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4017 elem = builtin_type (gdbarch)->builtin_uint16;
4018 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4019 elem = builtin_type (gdbarch)->builtin_uint32;
4020 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4021 elem = builtin_type (gdbarch)->builtin_uint64;
4022 append_composite_type_field (t, "u64", elem);
4023 elem = builtin_type (gdbarch)->builtin_float;
4024 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4025 elem = builtin_type (gdbarch)->builtin_double;
4026 append_composite_type_field (t, "f64", elem);
4028 TYPE_VECTOR (t) = 1;
4029 TYPE_NAME (t) = "neon_d";
4030 tdep->neon_double_type = t;
4033 return tdep->neon_double_type;
4036 /* FIXME: The vector types are not correctly ordered on big-endian
4037 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4038 bits of d0 - regardless of what unit size is being held in d0. So
4039 the offset of the first uint8 in d0 is 7, but the offset of the
4040 first float is 4. This code works as-is for little-endian
4043 static struct type *
4044 arm_neon_quad_type (struct gdbarch *gdbarch)
4046 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4048 if (tdep->neon_quad_type == NULL)
4050 struct type *t, *elem;
4052 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4054 elem = builtin_type (gdbarch)->builtin_uint8;
4055 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4056 elem = builtin_type (gdbarch)->builtin_uint16;
4057 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4058 elem = builtin_type (gdbarch)->builtin_uint32;
4059 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4060 elem = builtin_type (gdbarch)->builtin_uint64;
4061 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4062 elem = builtin_type (gdbarch)->builtin_float;
4063 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4064 elem = builtin_type (gdbarch)->builtin_double;
4065 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4067 TYPE_VECTOR (t) = 1;
4068 TYPE_NAME (t) = "neon_q";
4069 tdep->neon_quad_type = t;
4072 return tdep->neon_quad_type;
4075 /* Return the GDB type object for the "standard" data type of data in
4078 static struct type *
4079 arm_register_type (struct gdbarch *gdbarch, int regnum)
4081 int num_regs = gdbarch_num_regs (gdbarch);
4083 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4084 && regnum >= num_regs && regnum < num_regs + 32)
4085 return builtin_type (gdbarch)->builtin_float;
4087 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4088 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4089 return arm_neon_quad_type (gdbarch);
4091 /* If the target description has register information, we are only
4092 in this function so that we can override the types of
4093 double-precision registers for NEON. */
4094 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4096 struct type *t = tdesc_register_type (gdbarch, regnum);
4098 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4099 && TYPE_CODE (t) == TYPE_CODE_FLT
4100 && gdbarch_tdep (gdbarch)->have_neon)
4101 return arm_neon_double_type (gdbarch);
4106 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4108 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4109 return builtin_type (gdbarch)->builtin_void;
4111 return arm_ext_type (gdbarch);
4113 else if (regnum == ARM_SP_REGNUM)
4114 return builtin_type (gdbarch)->builtin_data_ptr;
4115 else if (regnum == ARM_PC_REGNUM)
4116 return builtin_type (gdbarch)->builtin_func_ptr;
4117 else if (regnum >= ARRAY_SIZE (arm_register_names))
4118 /* These registers are only supported on targets which supply
4119 an XML description. */
4120 return builtin_type (gdbarch)->builtin_int0;
4122 return builtin_type (gdbarch)->builtin_uint32;
4125 /* Map a DWARF register REGNUM onto the appropriate GDB register
4129 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4131 /* Core integer regs. */
4132 if (reg >= 0 && reg <= 15)
4135 /* Legacy FPA encoding. These were once used in a way which
4136 overlapped with VFP register numbering, so their use is
4137 discouraged, but GDB doesn't support the ARM toolchain
4138 which used them for VFP. */
4139 if (reg >= 16 && reg <= 23)
4140 return ARM_F0_REGNUM + reg - 16;
4142 /* New assignments for the FPA registers. */
4143 if (reg >= 96 && reg <= 103)
4144 return ARM_F0_REGNUM + reg - 96;
4146 /* WMMX register assignments. */
4147 if (reg >= 104 && reg <= 111)
4148 return ARM_WCGR0_REGNUM + reg - 104;
4150 if (reg >= 112 && reg <= 127)
4151 return ARM_WR0_REGNUM + reg - 112;
4153 if (reg >= 192 && reg <= 199)
4154 return ARM_WC0_REGNUM + reg - 192;
4156 /* VFP v2 registers. A double precision value is actually
4157 in d1 rather than s2, but the ABI only defines numbering
4158 for the single precision registers. This will "just work"
4159 in GDB for little endian targets (we'll read eight bytes,
4160 starting in s0 and then progressing to s1), but will be
4161 reversed on big endian targets with VFP. This won't
4162 be a problem for the new Neon quad registers; you're supposed
4163 to use DW_OP_piece for those. */
4164 if (reg >= 64 && reg <= 95)
4168 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4169 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4173 /* VFP v3 / Neon registers. This range is also used for VFP v2
4174 registers, except that it now describes d0 instead of s0. */
4175 if (reg >= 256 && reg <= 287)
4179 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4180 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4187 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4189 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4192 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4194 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4195 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4197 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4198 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4200 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4201 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4203 if (reg < NUM_GREGS)
4204 return SIM_ARM_R0_REGNUM + reg;
4207 if (reg < NUM_FREGS)
4208 return SIM_ARM_FP0_REGNUM + reg;
4211 if (reg < NUM_SREGS)
4212 return SIM_ARM_FPS_REGNUM + reg;
4215 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4218 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4219 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4220 It is thought that this is is the floating-point register format on
4221 little-endian systems. */
4224 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4225 void *dbl, int endianess)
4229 if (endianess == BFD_ENDIAN_BIG)
4230 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4232 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4234 floatformat_from_doublest (fmt, &d, dbl);
4238 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4243 floatformat_to_doublest (fmt, ptr, &d);
4244 if (endianess == BFD_ENDIAN_BIG)
4245 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4247 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4252 condition_true (unsigned long cond, unsigned long status_reg)
4254 if (cond == INST_AL || cond == INST_NV)
4260 return ((status_reg & FLAG_Z) != 0);
4262 return ((status_reg & FLAG_Z) == 0);
4264 return ((status_reg & FLAG_C) != 0);
4266 return ((status_reg & FLAG_C) == 0);
4268 return ((status_reg & FLAG_N) != 0);
4270 return ((status_reg & FLAG_N) == 0);
4272 return ((status_reg & FLAG_V) != 0);
4274 return ((status_reg & FLAG_V) == 0);
4276 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4278 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4280 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4282 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4284 return (((status_reg & FLAG_Z) == 0)
4285 && (((status_reg & FLAG_N) == 0)
4286 == ((status_reg & FLAG_V) == 0)));
4288 return (((status_reg & FLAG_Z) != 0)
4289 || (((status_reg & FLAG_N) == 0)
4290 != ((status_reg & FLAG_V) == 0)));
4295 static unsigned long
4296 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4297 unsigned long pc_val, unsigned long status_reg)
4299 unsigned long res, shift;
4300 int rm = bits (inst, 0, 3);
4301 unsigned long shifttype = bits (inst, 5, 6);
4305 int rs = bits (inst, 8, 11);
4306 shift = (rs == 15 ? pc_val + 8
4307 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4310 shift = bits (inst, 7, 11);
4312 res = (rm == ARM_PC_REGNUM
4313 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4314 : get_frame_register_unsigned (frame, rm));
4319 res = shift >= 32 ? 0 : res << shift;
4323 res = shift >= 32 ? 0 : res >> shift;
4329 res = ((res & 0x80000000L)
4330 ? ~((~res) >> shift) : res >> shift);
4333 case 3: /* ROR/RRX */
4336 res = (res >> 1) | (carry ? 0x80000000L : 0);
4338 res = (res >> shift) | (res << (32 - shift));
4342 return res & 0xffffffff;
4345 /* Return number of 1-bits in VAL. */
4348 bitcount (unsigned long val)
4351 for (nbits = 0; val != 0; nbits++)
4352 val &= val - 1; /* Delete rightmost 1-bit in val. */
4356 /* Return the size in bytes of the complete Thumb instruction whose
4357 first halfword is INST1. */
4360 thumb_insn_size (unsigned short inst1)
4362 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4369 thumb_advance_itstate (unsigned int itstate)
4371 /* Preserve IT[7:5], the first three bits of the condition. Shift
4372 the upcoming condition flags left by one bit. */
4373 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4375 /* If we have finished the IT block, clear the state. */
4376 if ((itstate & 0x0f) == 0)
4382 /* Find the next PC after the current instruction executes. In some
4383 cases we can not statically determine the answer (see the IT state
4384 handling in this function); in that case, a breakpoint may be
4385 inserted in addition to the returned PC, which will be used to set
4386 another breakpoint by our caller. */
4389 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4391 struct gdbarch *gdbarch = get_frame_arch (frame);
4392 struct address_space *aspace = get_frame_address_space (frame);
4393 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4394 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4395 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4396 unsigned short inst1;
4397 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4398 unsigned long offset;
4399 ULONGEST status, itstate;
4401 nextpc = MAKE_THUMB_ADDR (nextpc);
4402 pc_val = MAKE_THUMB_ADDR (pc_val);
4404 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4406 /* Thumb-2 conditional execution support. There are eight bits in
4407 the CPSR which describe conditional execution state. Once
4408 reconstructed (they're in a funny order), the low five bits
4409 describe the low bit of the condition for each instruction and
4410 how many instructions remain. The high three bits describe the
4411 base condition. One of the low four bits will be set if an IT
4412 block is active. These bits read as zero on earlier
4414 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4415 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4417 /* If-Then handling. On GNU/Linux, where this routine is used, we
4418 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4419 can disable execution of the undefined instruction. So we might
4420 miss the breakpoint if we set it on a skipped conditional
4421 instruction. Because conditional instructions can change the
4422 flags, affecting the execution of further instructions, we may
4423 need to set two breakpoints. */
4425 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4427 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4429 /* An IT instruction. Because this instruction does not
4430 modify the flags, we can accurately predict the next
4431 executed instruction. */
4432 itstate = inst1 & 0x00ff;
4433 pc += thumb_insn_size (inst1);
4435 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4437 inst1 = read_memory_unsigned_integer (pc, 2,
4438 byte_order_for_code);
4439 pc += thumb_insn_size (inst1);
4440 itstate = thumb_advance_itstate (itstate);
4443 return MAKE_THUMB_ADDR (pc);
4445 else if (itstate != 0)
4447 /* We are in a conditional block. Check the condition. */
4448 if (! condition_true (itstate >> 4, status))
4450 /* Advance to the next executed instruction. */
4451 pc += thumb_insn_size (inst1);
4452 itstate = thumb_advance_itstate (itstate);
4454 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4456 inst1 = read_memory_unsigned_integer (pc, 2,
4457 byte_order_for_code);
4458 pc += thumb_insn_size (inst1);
4459 itstate = thumb_advance_itstate (itstate);
4462 return MAKE_THUMB_ADDR (pc);
4464 else if ((itstate & 0x0f) == 0x08)
4466 /* This is the last instruction of the conditional
4467 block, and it is executed. We can handle it normally
4468 because the following instruction is not conditional,
4469 and we must handle it normally because it is
4470 permitted to branch. Fall through. */
4476 /* There are conditional instructions after this one.
4477 If this instruction modifies the flags, then we can
4478 not predict what the next executed instruction will
4479 be. Fortunately, this instruction is architecturally
4480 forbidden to branch; we know it will fall through.
4481 Start by skipping past it. */
4482 pc += thumb_insn_size (inst1);
4483 itstate = thumb_advance_itstate (itstate);
4485 /* Set a breakpoint on the following instruction. */
4486 gdb_assert ((itstate & 0x0f) != 0);
4487 arm_insert_single_step_breakpoint (gdbarch, aspace,
4488 MAKE_THUMB_ADDR (pc));
4489 cond_negated = (itstate >> 4) & 1;
4491 /* Skip all following instructions with the same
4492 condition. If there is a later instruction in the IT
4493 block with the opposite condition, set the other
4494 breakpoint there. If not, then set a breakpoint on
4495 the instruction after the IT block. */
4498 inst1 = read_memory_unsigned_integer (pc, 2,
4499 byte_order_for_code);
4500 pc += thumb_insn_size (inst1);
4501 itstate = thumb_advance_itstate (itstate);
4503 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4505 return MAKE_THUMB_ADDR (pc);
4509 else if (itstate & 0x0f)
4511 /* We are in a conditional block. Check the condition. */
4512 int cond = itstate >> 4;
4514 if (! condition_true (cond, status))
4515 /* Advance to the next instruction. All the 32-bit
4516 instructions share a common prefix. */
4517 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4519 /* Otherwise, handle the instruction normally. */
4522 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4526 /* Fetch the saved PC from the stack. It's stored above
4527 all of the other registers. */
4528 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4529 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4530 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4532 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4534 unsigned long cond = bits (inst1, 8, 11);
4535 if (cond == 0x0f) /* 0x0f = SWI */
4537 struct gdbarch_tdep *tdep;
4538 tdep = gdbarch_tdep (gdbarch);
4540 if (tdep->syscall_next_pc != NULL)
4541 nextpc = tdep->syscall_next_pc (frame);
4544 else if (cond != 0x0f && condition_true (cond, status))
4545 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4547 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4549 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4551 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4553 unsigned short inst2;
4554 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4556 /* Default to the next instruction. */
4558 nextpc = MAKE_THUMB_ADDR (nextpc);
4560 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4562 /* Branches and miscellaneous control instructions. */
4564 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4567 int j1, j2, imm1, imm2;
4569 imm1 = sbits (inst1, 0, 10);
4570 imm2 = bits (inst2, 0, 10);
4571 j1 = bit (inst2, 13);
4572 j2 = bit (inst2, 11);
4574 offset = ((imm1 << 12) + (imm2 << 1));
4575 offset ^= ((!j2) << 22) | ((!j1) << 23);
4577 nextpc = pc_val + offset;
4578 /* For BLX make sure to clear the low bits. */
4579 if (bit (inst2, 12) == 0)
4580 nextpc = nextpc & 0xfffffffc;
4582 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4584 /* SUBS PC, LR, #imm8. */
4585 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4586 nextpc -= inst2 & 0x00ff;
4588 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4590 /* Conditional branch. */
4591 if (condition_true (bits (inst1, 6, 9), status))
4593 int sign, j1, j2, imm1, imm2;
4595 sign = sbits (inst1, 10, 10);
4596 imm1 = bits (inst1, 0, 5);
4597 imm2 = bits (inst2, 0, 10);
4598 j1 = bit (inst2, 13);
4599 j2 = bit (inst2, 11);
4601 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4602 offset += (imm1 << 12) + (imm2 << 1);
4604 nextpc = pc_val + offset;
4608 else if ((inst1 & 0xfe50) == 0xe810)
4610 /* Load multiple or RFE. */
4611 int rn, offset, load_pc = 1;
4613 rn = bits (inst1, 0, 3);
4614 if (bit (inst1, 7) && !bit (inst1, 8))
4617 if (!bit (inst2, 15))
4619 offset = bitcount (inst2) * 4 - 4;
4621 else if (!bit (inst1, 7) && bit (inst1, 8))
4624 if (!bit (inst2, 15))
4628 else if (bit (inst1, 7) && bit (inst1, 8))
4633 else if (!bit (inst1, 7) && !bit (inst1, 8))
4643 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4644 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4647 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4649 /* MOV PC or MOVS PC. */
4650 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4651 nextpc = MAKE_THUMB_ADDR (nextpc);
4653 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4657 int rn, load_pc = 1;
4659 rn = bits (inst1, 0, 3);
4660 base = get_frame_register_unsigned (frame, rn);
4661 if (rn == ARM_PC_REGNUM)
4663 base = (base + 4) & ~(CORE_ADDR) 0x3;
4665 base += bits (inst2, 0, 11);
4667 base -= bits (inst2, 0, 11);
4669 else if (bit (inst1, 7))
4670 base += bits (inst2, 0, 11);
4671 else if (bit (inst2, 11))
4673 if (bit (inst2, 10))
4676 base += bits (inst2, 0, 7);
4678 base -= bits (inst2, 0, 7);
4681 else if ((inst2 & 0x0fc0) == 0x0000)
4683 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4684 base += get_frame_register_unsigned (frame, rm) << shift;
4691 nextpc = get_frame_memory_unsigned (frame, base, 4);
4693 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4696 CORE_ADDR tbl_reg, table, offset, length;
4698 tbl_reg = bits (inst1, 0, 3);
4699 if (tbl_reg == 0x0f)
4700 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4702 table = get_frame_register_unsigned (frame, tbl_reg);
4704 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4705 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4706 nextpc = pc_val + length;
4708 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4711 CORE_ADDR tbl_reg, table, offset, length;
4713 tbl_reg = bits (inst1, 0, 3);
4714 if (tbl_reg == 0x0f)
4715 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4717 table = get_frame_register_unsigned (frame, tbl_reg);
4719 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4720 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4721 nextpc = pc_val + length;
4724 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4726 if (bits (inst1, 3, 6) == 0x0f)
4727 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4729 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4731 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4733 if (bits (inst1, 3, 6) == 0x0f)
4736 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4738 nextpc = MAKE_THUMB_ADDR (nextpc);
4740 else if ((inst1 & 0xf500) == 0xb100)
4743 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4744 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4746 if (bit (inst1, 11) && reg != 0)
4747 nextpc = pc_val + imm;
4748 else if (!bit (inst1, 11) && reg == 0)
4749 nextpc = pc_val + imm;
4754 /* Get the raw next address. PC is the current program counter, in
4755 FRAME, which is assumed to be executing in ARM mode.
4757 The value returned has the execution state of the next instruction
4758 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4759 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4763 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4765 struct gdbarch *gdbarch = get_frame_arch (frame);
4766 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4767 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4768 unsigned long pc_val;
4769 unsigned long this_instr;
4770 unsigned long status;
4773 pc_val = (unsigned long) pc;
4774 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4776 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4777 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4779 if (bits (this_instr, 28, 31) == INST_NV)
4780 switch (bits (this_instr, 24, 27))
4785 /* Branch with Link and change to Thumb. */
4786 nextpc = BranchDest (pc, this_instr);
4787 nextpc |= bit (this_instr, 24) << 1;
4788 nextpc = MAKE_THUMB_ADDR (nextpc);
4794 /* Coprocessor register transfer. */
4795 if (bits (this_instr, 12, 15) == 15)
4796 error (_("Invalid update to pc in instruction"));
4799 else if (condition_true (bits (this_instr, 28, 31), status))
4801 switch (bits (this_instr, 24, 27))
4804 case 0x1: /* data processing */
4808 unsigned long operand1, operand2, result = 0;
4812 if (bits (this_instr, 12, 15) != 15)
4815 if (bits (this_instr, 22, 25) == 0
4816 && bits (this_instr, 4, 7) == 9) /* multiply */
4817 error (_("Invalid update to pc in instruction"));
4819 /* BX <reg>, BLX <reg> */
4820 if (bits (this_instr, 4, 27) == 0x12fff1
4821 || bits (this_instr, 4, 27) == 0x12fff3)
4823 rn = bits (this_instr, 0, 3);
4824 nextpc = ((rn == ARM_PC_REGNUM)
4826 : get_frame_register_unsigned (frame, rn));
4831 /* Multiply into PC. */
4832 c = (status & FLAG_C) ? 1 : 0;
4833 rn = bits (this_instr, 16, 19);
4834 operand1 = ((rn == ARM_PC_REGNUM)
4836 : get_frame_register_unsigned (frame, rn));
4838 if (bit (this_instr, 25))
4840 unsigned long immval = bits (this_instr, 0, 7);
4841 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4842 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4845 else /* operand 2 is a shifted register. */
4846 operand2 = shifted_reg_val (frame, this_instr, c,
4849 switch (bits (this_instr, 21, 24))
4852 result = operand1 & operand2;
4856 result = operand1 ^ operand2;
4860 result = operand1 - operand2;
4864 result = operand2 - operand1;
4868 result = operand1 + operand2;
4872 result = operand1 + operand2 + c;
4876 result = operand1 - operand2 + c;
4880 result = operand2 - operand1 + c;
4886 case 0xb: /* tst, teq, cmp, cmn */
4887 result = (unsigned long) nextpc;
4891 result = operand1 | operand2;
4895 /* Always step into a function. */
4900 result = operand1 & ~operand2;
4908 /* In 26-bit APCS the bottom two bits of the result are
4909 ignored, and we always end up in ARM state. */
4911 nextpc = arm_addr_bits_remove (gdbarch, result);
4919 case 0x5: /* data transfer */
4922 if (bit (this_instr, 20))
4925 if (bits (this_instr, 12, 15) == 15)
4931 if (bit (this_instr, 22))
4932 error (_("Invalid update to pc in instruction"));
4934 /* byte write to PC */
4935 rn = bits (this_instr, 16, 19);
4936 base = ((rn == ARM_PC_REGNUM)
4938 : get_frame_register_unsigned (frame, rn));
4940 if (bit (this_instr, 24))
4943 int c = (status & FLAG_C) ? 1 : 0;
4944 unsigned long offset =
4945 (bit (this_instr, 25)
4946 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4947 : bits (this_instr, 0, 11));
4949 if (bit (this_instr, 23))
4955 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4962 case 0x9: /* block transfer */
4963 if (bit (this_instr, 20))
4966 if (bit (this_instr, 15))
4970 unsigned long rn_val
4971 = get_frame_register_unsigned (frame,
4972 bits (this_instr, 16, 19));
4974 if (bit (this_instr, 23))
4977 unsigned long reglist = bits (this_instr, 0, 14);
4978 offset = bitcount (reglist) * 4;
4979 if (bit (this_instr, 24)) /* pre */
4982 else if (bit (this_instr, 24))
4986 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4993 case 0xb: /* branch & link */
4994 case 0xa: /* branch */
4996 nextpc = BranchDest (pc, this_instr);
5002 case 0xe: /* coproc ops */
5006 struct gdbarch_tdep *tdep;
5007 tdep = gdbarch_tdep (gdbarch);
5009 if (tdep->syscall_next_pc != NULL)
5010 nextpc = tdep->syscall_next_pc (frame);
5016 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5024 /* Determine next PC after current instruction executes. Will call either
5025 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5026 loop is detected. */
5029 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5033 if (arm_frame_is_thumb (frame))
5034 nextpc = thumb_get_next_pc_raw (frame, pc);
5036 nextpc = arm_get_next_pc_raw (frame, pc);
5041 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5042 of the appropriate mode (as encoded in the PC value), even if this
5043 differs from what would be expected according to the symbol tables. */
5046 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5047 struct address_space *aspace,
5050 struct cleanup *old_chain
5051 = make_cleanup_restore_integer (&arm_override_mode);
5053 arm_override_mode = IS_THUMB_ADDR (pc);
5054 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5056 insert_single_step_breakpoint (gdbarch, aspace, pc);
5058 do_cleanups (old_chain);
5061 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5062 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5063 is found, attempt to step through it. A breakpoint is placed at the end of
5067 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5069 struct gdbarch *gdbarch = get_frame_arch (frame);
5070 struct address_space *aspace = get_frame_address_space (frame);
5071 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5072 CORE_ADDR pc = get_frame_pc (frame);
5073 CORE_ADDR breaks[2] = {-1, -1};
5075 unsigned short insn1, insn2;
5078 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5079 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5080 ULONGEST status, itstate;
5082 /* We currently do not support atomic sequences within an IT block. */
5083 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5084 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5088 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5089 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5091 if (thumb_insn_size (insn1) != 4)
5094 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5096 if (!((insn1 & 0xfff0) == 0xe850
5097 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5100 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5102 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5104 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5107 if (thumb_insn_size (insn1) != 4)
5109 /* Assume that there is at most one conditional branch in the
5110 atomic sequence. If a conditional branch is found, put a
5111 breakpoint in its destination address. */
5112 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5114 if (last_breakpoint > 0)
5115 return 0; /* More than one conditional branch found,
5116 fallback to the standard code. */
5118 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5122 /* We do not support atomic sequences that use any *other*
5123 instructions but conditional branches to change the PC.
5124 Fall back to standard code to avoid losing control of
5126 else if (thumb_instruction_changes_pc (insn1))
5131 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5134 /* Assume that there is at most one conditional branch in the
5135 atomic sequence. If a conditional branch is found, put a
5136 breakpoint in its destination address. */
5137 if ((insn1 & 0xf800) == 0xf000
5138 && (insn2 & 0xd000) == 0x8000
5139 && (insn1 & 0x0380) != 0x0380)
5141 int sign, j1, j2, imm1, imm2;
5142 unsigned int offset;
5144 sign = sbits (insn1, 10, 10);
5145 imm1 = bits (insn1, 0, 5);
5146 imm2 = bits (insn2, 0, 10);
5147 j1 = bit (insn2, 13);
5148 j2 = bit (insn2, 11);
5150 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5151 offset += (imm1 << 12) + (imm2 << 1);
5153 if (last_breakpoint > 0)
5154 return 0; /* More than one conditional branch found,
5155 fallback to the standard code. */
5157 breaks[1] = loc + offset;
5161 /* We do not support atomic sequences that use any *other*
5162 instructions but conditional branches to change the PC.
5163 Fall back to standard code to avoid losing control of
5165 else if (thumb2_instruction_changes_pc (insn1, insn2))
5168 /* If we find a strex{,b,h,d}, we're done. */
5169 if ((insn1 & 0xfff0) == 0xe840
5170 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5175 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5176 if (insn_count == atomic_sequence_length)
5179 /* Insert a breakpoint right after the end of the atomic sequence. */
5182 /* Check for duplicated breakpoints. Check also for a breakpoint
5183 placed (branch instruction's destination) anywhere in sequence. */
5185 && (breaks[1] == breaks[0]
5186 || (breaks[1] >= pc && breaks[1] < loc)))
5187 last_breakpoint = 0;
5189 /* Effectively inserts the breakpoints. */
5190 for (index = 0; index <= last_breakpoint; index++)
5191 arm_insert_single_step_breakpoint (gdbarch, aspace,
5192 MAKE_THUMB_ADDR (breaks[index]));
5198 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5200 struct gdbarch *gdbarch = get_frame_arch (frame);
5201 struct address_space *aspace = get_frame_address_space (frame);
5202 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5203 CORE_ADDR pc = get_frame_pc (frame);
5204 CORE_ADDR breaks[2] = {-1, -1};
5209 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5210 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5212 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5213 Note that we do not currently support conditionally executed atomic
5215 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5217 if ((insn & 0xff9000f0) != 0xe1900090)
5220 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5222 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5224 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5227 /* Assume that there is at most one conditional branch in the atomic
5228 sequence. If a conditional branch is found, put a breakpoint in
5229 its destination address. */
5230 if (bits (insn, 24, 27) == 0xa)
5232 if (last_breakpoint > 0)
5233 return 0; /* More than one conditional branch found, fallback
5234 to the standard single-step code. */
5236 breaks[1] = BranchDest (loc - 4, insn);
5240 /* We do not support atomic sequences that use any *other* instructions
5241 but conditional branches to change the PC. Fall back to standard
5242 code to avoid losing control of execution. */
5243 else if (arm_instruction_changes_pc (insn))
5246 /* If we find a strex{,b,h,d}, we're done. */
5247 if ((insn & 0xff9000f0) == 0xe1800090)
5251 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5252 if (insn_count == atomic_sequence_length)
5255 /* Insert a breakpoint right after the end of the atomic sequence. */
5258 /* Check for duplicated breakpoints. Check also for a breakpoint
5259 placed (branch instruction's destination) anywhere in sequence. */
5261 && (breaks[1] == breaks[0]
5262 || (breaks[1] >= pc && breaks[1] < loc)))
5263 last_breakpoint = 0;
5265 /* Effectively inserts the breakpoints. */
5266 for (index = 0; index <= last_breakpoint; index++)
5267 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5273 arm_deal_with_atomic_sequence (struct frame_info *frame)
5275 if (arm_frame_is_thumb (frame))
5276 return thumb_deal_with_atomic_sequence_raw (frame);
5278 return arm_deal_with_atomic_sequence_raw (frame);
5281 /* single_step() is called just before we want to resume the inferior,
5282 if we want to single-step it but there is no hardware or kernel
5283 single-step support. We find the target of the coming instruction
5284 and breakpoint it. */
5287 arm_software_single_step (struct frame_info *frame)
5289 struct gdbarch *gdbarch = get_frame_arch (frame);
5290 struct address_space *aspace = get_frame_address_space (frame);
5293 if (arm_deal_with_atomic_sequence (frame))
5296 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5297 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5302 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5303 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5304 NULL if an error occurs. BUF is freed. */
5307 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5308 int old_len, int new_len)
5311 int bytes_to_read = new_len - old_len;
5313 new_buf = xmalloc (new_len);
5314 memcpy (new_buf + bytes_to_read, buf, old_len);
5316 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5324 /* An IT block is at most the 2-byte IT instruction followed by
5325 four 4-byte instructions. The furthest back we must search to
5326 find an IT block that affects the current instruction is thus
5327 2 + 3 * 4 == 14 bytes. */
5328 #define MAX_IT_BLOCK_PREFIX 14
5330 /* Use a quick scan if there are more than this many bytes of
5332 #define IT_SCAN_THRESHOLD 32
5334 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5335 A breakpoint in an IT block may not be hit, depending on the
5338 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5342 CORE_ADDR boundary, func_start;
5344 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5345 int i, any, last_it, last_it_count;
5347 /* If we are using BKPT breakpoints, none of this is necessary. */
5348 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5351 /* ARM mode does not have this problem. */
5352 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5355 /* We are setting a breakpoint in Thumb code that could potentially
5356 contain an IT block. The first step is to find how much Thumb
5357 code there is; we do not need to read outside of known Thumb
5359 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5361 /* Thumb-2 code must have mapping symbols to have a chance. */
5364 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5366 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5367 && func_start > boundary)
5368 boundary = func_start;
5370 /* Search for a candidate IT instruction. We have to do some fancy
5371 footwork to distinguish a real IT instruction from the second
5372 half of a 32-bit instruction, but there is no need for that if
5373 there's no candidate. */
5374 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5376 /* No room for an IT instruction. */
5379 buf = xmalloc (buf_len);
5380 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5383 for (i = 0; i < buf_len; i += 2)
5385 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5386 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5398 /* OK, the code bytes before this instruction contain at least one
5399 halfword which resembles an IT instruction. We know that it's
5400 Thumb code, but there are still two possibilities. Either the
5401 halfword really is an IT instruction, or it is the second half of
5402 a 32-bit Thumb instruction. The only way we can tell is to
5403 scan forwards from a known instruction boundary. */
5404 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5408 /* There's a lot of code before this instruction. Start with an
5409 optimistic search; it's easy to recognize halfwords that can
5410 not be the start of a 32-bit instruction, and use that to
5411 lock on to the instruction boundaries. */
5412 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5415 buf_len = IT_SCAN_THRESHOLD;
5418 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5420 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5421 if (thumb_insn_size (inst1) == 2)
5428 /* At this point, if DEFINITE, BUF[I] is the first place we
5429 are sure that we know the instruction boundaries, and it is far
5430 enough from BPADDR that we could not miss an IT instruction
5431 affecting BPADDR. If ! DEFINITE, give up - start from a
5435 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5439 buf_len = bpaddr - boundary;
5445 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5448 buf_len = bpaddr - boundary;
5452 /* Scan forwards. Find the last IT instruction before BPADDR. */
5457 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5459 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5464 else if (inst1 & 0x0002)
5466 else if (inst1 & 0x0004)
5471 i += thumb_insn_size (inst1);
5477 /* There wasn't really an IT instruction after all. */
5480 if (last_it_count < 1)
5481 /* It was too far away. */
5484 /* This really is a trouble spot. Move the breakpoint to the IT
5486 return bpaddr - buf_len + last_it;
5489 /* ARM displaced stepping support.
5491 Generally ARM displaced stepping works as follows:
5493 1. When an instruction is to be single-stepped, it is first decoded by
5494 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5495 Depending on the type of instruction, it is then copied to a scratch
5496 location, possibly in a modified form. The copy_* set of functions
5497 performs such modification, as necessary. A breakpoint is placed after
5498 the modified instruction in the scratch space to return control to GDB.
5499 Note in particular that instructions which modify the PC will no longer
5500 do so after modification.
5502 2. The instruction is single-stepped, by setting the PC to the scratch
5503 location address, and resuming. Control returns to GDB when the
5506 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5507 function used for the current instruction. This function's job is to
5508 put the CPU/memory state back to what it would have been if the
5509 instruction had been executed unmodified in its original location. */
5511 /* NOP instruction (mov r0, r0). */
5512 #define ARM_NOP 0xe1a00000
5513 #define THUMB_NOP 0x4600
5515 /* Helper for register reads for displaced stepping. In particular, this
5516 returns the PC as it would be seen by the instruction at its original
5520 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5524 CORE_ADDR from = dsc->insn_addr;
5526 if (regno == ARM_PC_REGNUM)
5528 /* Compute pipeline offset:
5529 - When executing an ARM instruction, PC reads as the address of the
5530 current instruction plus 8.
5531 - When executing a Thumb instruction, PC reads as the address of the
5532 current instruction plus 4. */
5539 if (debug_displaced)
5540 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5541 (unsigned long) from);
5542 return (ULONGEST) from;
5546 regcache_cooked_read_unsigned (regs, regno, &ret);
5547 if (debug_displaced)
5548 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5549 regno, (unsigned long) ret);
5555 displaced_in_arm_mode (struct regcache *regs)
5558 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5560 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5562 return (ps & t_bit) == 0;
5565 /* Write to the PC as from a branch instruction. */
5568 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5572 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5573 architecture versions < 6. */
5574 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5575 val & ~(ULONGEST) 0x3);
5577 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5578 val & ~(ULONGEST) 0x1);
5581 /* Write to the PC as from a branch-exchange instruction. */
5584 bx_write_pc (struct regcache *regs, ULONGEST val)
5587 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5589 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5593 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5594 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5596 else if ((val & 2) == 0)
5598 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5603 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5604 mode, align dest to 4 bytes). */
5605 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5606 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5607 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5611 /* Write to the PC as if from a load instruction. */
5614 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5617 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5618 bx_write_pc (regs, val);
5620 branch_write_pc (regs, dsc, val);
5623 /* Write to the PC as if from an ALU instruction. */
5626 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5629 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5630 bx_write_pc (regs, val);
5632 branch_write_pc (regs, dsc, val);
5635 /* Helper for writing to registers for displaced stepping. Writing to the PC
5636 has a varying effects depending on the instruction which does the write:
5637 this is controlled by the WRITE_PC argument. */
5640 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5641 int regno, ULONGEST val, enum pc_write_style write_pc)
5643 if (regno == ARM_PC_REGNUM)
5645 if (debug_displaced)
5646 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5647 (unsigned long) val);
5650 case BRANCH_WRITE_PC:
5651 branch_write_pc (regs, dsc, val);
5655 bx_write_pc (regs, val);
5659 load_write_pc (regs, dsc, val);
5663 alu_write_pc (regs, dsc, val);
5666 case CANNOT_WRITE_PC:
5667 warning (_("Instruction wrote to PC in an unexpected way when "
5668 "single-stepping"));
5672 internal_error (__FILE__, __LINE__,
5673 _("Invalid argument to displaced_write_reg"));
5676 dsc->wrote_to_pc = 1;
5680 if (debug_displaced)
5681 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5682 regno, (unsigned long) val);
5683 regcache_cooked_write_unsigned (regs, regno, val);
5687 /* This function is used to concisely determine if an instruction INSN
5688 references PC. Register fields of interest in INSN should have the
5689 corresponding fields of BITMASK set to 0b1111. The function
5690 returns return 1 if any of these fields in INSN reference the PC
5691 (also 0b1111, r15), else it returns 0. */
5694 insn_references_pc (uint32_t insn, uint32_t bitmask)
5696 uint32_t lowbit = 1;
5698 while (bitmask != 0)
5702 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5708 mask = lowbit * 0xf;
5710 if ((insn & mask) == mask)
5719 /* The simplest copy function. Many instructions have the same effect no
5720 matter what address they are executed at: in those cases, use this. */
5723 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5724 const char *iname, struct displaced_step_closure *dsc)
5726 if (debug_displaced)
5727 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5728 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5731 dsc->modinsn[0] = insn;
5737 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5738 uint16_t insn2, const char *iname,
5739 struct displaced_step_closure *dsc)
5741 if (debug_displaced)
5742 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5743 "opcode/class '%s' unmodified\n", insn1, insn2,
5746 dsc->modinsn[0] = insn1;
5747 dsc->modinsn[1] = insn2;
5753 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5756 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5758 struct displaced_step_closure *dsc)
5760 if (debug_displaced)
5761 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5762 "opcode/class '%s' unmodified\n", insn,
5765 dsc->modinsn[0] = insn;
5770 /* Preload instructions with immediate offset. */
5773 cleanup_preload (struct gdbarch *gdbarch,
5774 struct regcache *regs, struct displaced_step_closure *dsc)
5776 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5777 if (!dsc->u.preload.immed)
5778 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5782 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5783 struct displaced_step_closure *dsc, unsigned int rn)
5786 /* Preload instructions:
5788 {pli/pld} [rn, #+/-imm]
5790 {pli/pld} [r0, #+/-imm]. */
5792 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5793 rn_val = displaced_read_reg (regs, dsc, rn);
5794 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5795 dsc->u.preload.immed = 1;
5797 dsc->cleanup = &cleanup_preload;
5801 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5802 struct displaced_step_closure *dsc)
5804 unsigned int rn = bits (insn, 16, 19);
5806 if (!insn_references_pc (insn, 0x000f0000ul))
5807 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5809 if (debug_displaced)
5810 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5811 (unsigned long) insn);
5813 dsc->modinsn[0] = insn & 0xfff0ffff;
5815 install_preload (gdbarch, regs, dsc, rn);
5821 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5822 struct regcache *regs, struct displaced_step_closure *dsc)
5824 unsigned int rn = bits (insn1, 0, 3);
5825 unsigned int u_bit = bit (insn1, 7);
5826 int imm12 = bits (insn2, 0, 11);
5829 if (rn != ARM_PC_REGNUM)
5830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5832 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5833 PLD (literal) Encoding T1. */
5834 if (debug_displaced)
5835 fprintf_unfiltered (gdb_stdlog,
5836 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5837 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5843 /* Rewrite instruction {pli/pld} PC imm12 into:
5844 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5848 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5850 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5851 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5853 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5855 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5856 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5857 dsc->u.preload.immed = 0;
5859 /* {pli/pld} [r0, r1] */
5860 dsc->modinsn[0] = insn1 & 0xfff0;
5861 dsc->modinsn[1] = 0xf001;
5864 dsc->cleanup = &cleanup_preload;
5868 /* Preload instructions with register offset. */
5871 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5872 struct displaced_step_closure *dsc, unsigned int rn,
5875 ULONGEST rn_val, rm_val;
5877 /* Preload register-offset instructions:
5879 {pli/pld} [rn, rm {, shift}]
5881 {pli/pld} [r0, r1 {, shift}]. */
5883 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5884 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5885 rn_val = displaced_read_reg (regs, dsc, rn);
5886 rm_val = displaced_read_reg (regs, dsc, rm);
5887 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5888 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5889 dsc->u.preload.immed = 0;
5891 dsc->cleanup = &cleanup_preload;
5895 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5896 struct regcache *regs,
5897 struct displaced_step_closure *dsc)
5899 unsigned int rn = bits (insn, 16, 19);
5900 unsigned int rm = bits (insn, 0, 3);
5903 if (!insn_references_pc (insn, 0x000f000ful))
5904 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5906 if (debug_displaced)
5907 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5908 (unsigned long) insn);
5910 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5912 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5916 /* Copy/cleanup coprocessor load and store instructions. */
5919 cleanup_copro_load_store (struct gdbarch *gdbarch,
5920 struct regcache *regs,
5921 struct displaced_step_closure *dsc)
5923 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5925 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5927 if (dsc->u.ldst.writeback)
5928 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5932 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5933 struct displaced_step_closure *dsc,
5934 int writeback, unsigned int rn)
5938 /* Coprocessor load/store instructions:
5940 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5942 {stc/stc2} [r0, #+/-imm].
5944 ldc/ldc2 are handled identically. */
5946 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5947 rn_val = displaced_read_reg (regs, dsc, rn);
5948 /* PC should be 4-byte aligned. */
5949 rn_val = rn_val & 0xfffffffc;
5950 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5952 dsc->u.ldst.writeback = writeback;
5953 dsc->u.ldst.rn = rn;
5955 dsc->cleanup = &cleanup_copro_load_store;
5959 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5960 struct regcache *regs,
5961 struct displaced_step_closure *dsc)
5963 unsigned int rn = bits (insn, 16, 19);
5965 if (!insn_references_pc (insn, 0x000f0000ul))
5966 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5968 if (debug_displaced)
5969 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5970 "load/store insn %.8lx\n", (unsigned long) insn);
5972 dsc->modinsn[0] = insn & 0xfff0ffff;
5974 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5980 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5981 uint16_t insn2, struct regcache *regs,
5982 struct displaced_step_closure *dsc)
5984 unsigned int rn = bits (insn1, 0, 3);
5986 if (rn != ARM_PC_REGNUM)
5987 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5988 "copro load/store", dsc);
5990 if (debug_displaced)
5991 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5992 "load/store insn %.4x%.4x\n", insn1, insn2);
5994 dsc->modinsn[0] = insn1 & 0xfff0;
5995 dsc->modinsn[1] = insn2;
5998 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5999 doesn't support writeback, so pass 0. */
6000 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6005 /* Clean up branch instructions (actually perform the branch, by setting
6009 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6010 struct displaced_step_closure *dsc)
6012 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6013 int branch_taken = condition_true (dsc->u.branch.cond, status);
6014 enum pc_write_style write_pc = dsc->u.branch.exchange
6015 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6020 if (dsc->u.branch.link)
6022 /* The value of LR should be the next insn of current one. In order
6023 not to confuse logic hanlding later insn `bx lr', if current insn mode
6024 is Thumb, the bit 0 of LR value should be set to 1. */
6025 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6028 next_insn_addr |= 0x1;
6030 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6034 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6037 /* Copy B/BL/BLX instructions with immediate destinations. */
6040 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6041 struct displaced_step_closure *dsc,
6042 unsigned int cond, int exchange, int link, long offset)
6044 /* Implement "BL<cond> <label>" as:
6046 Preparation: cond <- instruction condition
6047 Insn: mov r0, r0 (nop)
6048 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6050 B<cond> similar, but don't set r14 in cleanup. */
6052 dsc->u.branch.cond = cond;
6053 dsc->u.branch.link = link;
6054 dsc->u.branch.exchange = exchange;
6056 dsc->u.branch.dest = dsc->insn_addr;
6057 if (link && exchange)
6058 /* For BLX, offset is computed from the Align (PC, 4). */
6059 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6062 dsc->u.branch.dest += 4 + offset;
6064 dsc->u.branch.dest += 8 + offset;
6066 dsc->cleanup = &cleanup_branch;
6069 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6070 struct regcache *regs, struct displaced_step_closure *dsc)
6072 unsigned int cond = bits (insn, 28, 31);
6073 int exchange = (cond == 0xf);
6074 int link = exchange || bit (insn, 24);
6077 if (debug_displaced)
6078 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6079 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6080 (unsigned long) insn);
6082 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6083 then arrange the switch into Thumb mode. */
6084 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6086 offset = bits (insn, 0, 23) << 2;
6088 if (bit (offset, 25))
6089 offset = offset | ~0x3ffffff;
6091 dsc->modinsn[0] = ARM_NOP;
6093 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6098 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6099 uint16_t insn2, struct regcache *regs,
6100 struct displaced_step_closure *dsc)
6102 int link = bit (insn2, 14);
6103 int exchange = link && !bit (insn2, 12);
6106 int j1 = bit (insn2, 13);
6107 int j2 = bit (insn2, 11);
6108 int s = sbits (insn1, 10, 10);
6109 int i1 = !(j1 ^ bit (insn1, 10));
6110 int i2 = !(j2 ^ bit (insn1, 10));
6112 if (!link && !exchange) /* B */
6114 offset = (bits (insn2, 0, 10) << 1);
6115 if (bit (insn2, 12)) /* Encoding T4 */
6117 offset |= (bits (insn1, 0, 9) << 12)
6123 else /* Encoding T3 */
6125 offset |= (bits (insn1, 0, 5) << 12)
6129 cond = bits (insn1, 6, 9);
6134 offset = (bits (insn1, 0, 9) << 12);
6135 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6136 offset |= exchange ?
6137 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6140 if (debug_displaced)
6141 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6142 "%.4x %.4x with offset %.8lx\n",
6143 link ? (exchange) ? "blx" : "bl" : "b",
6144 insn1, insn2, offset);
6146 dsc->modinsn[0] = THUMB_NOP;
6148 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6152 /* Copy B Thumb instructions. */
6154 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6155 struct displaced_step_closure *dsc)
6157 unsigned int cond = 0;
6159 unsigned short bit_12_15 = bits (insn, 12, 15);
6160 CORE_ADDR from = dsc->insn_addr;
6162 if (bit_12_15 == 0xd)
6164 /* offset = SignExtend (imm8:0, 32) */
6165 offset = sbits ((insn << 1), 0, 8);
6166 cond = bits (insn, 8, 11);
6168 else if (bit_12_15 == 0xe) /* Encoding T2 */
6170 offset = sbits ((insn << 1), 0, 11);
6174 if (debug_displaced)
6175 fprintf_unfiltered (gdb_stdlog,
6176 "displaced: copying b immediate insn %.4x "
6177 "with offset %d\n", insn, offset);
6179 dsc->u.branch.cond = cond;
6180 dsc->u.branch.link = 0;
6181 dsc->u.branch.exchange = 0;
6182 dsc->u.branch.dest = from + 4 + offset;
6184 dsc->modinsn[0] = THUMB_NOP;
6186 dsc->cleanup = &cleanup_branch;
6191 /* Copy BX/BLX with register-specified destinations. */
6194 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6195 struct displaced_step_closure *dsc, int link,
6196 unsigned int cond, unsigned int rm)
6198 /* Implement {BX,BLX}<cond> <reg>" as:
6200 Preparation: cond <- instruction condition
6201 Insn: mov r0, r0 (nop)
6202 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6204 Don't set r14 in cleanup for BX. */
6206 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6208 dsc->u.branch.cond = cond;
6209 dsc->u.branch.link = link;
6211 dsc->u.branch.exchange = 1;
6213 dsc->cleanup = &cleanup_branch;
6217 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6218 struct regcache *regs, struct displaced_step_closure *dsc)
6220 unsigned int cond = bits (insn, 28, 31);
6223 int link = bit (insn, 5);
6224 unsigned int rm = bits (insn, 0, 3);
6226 if (debug_displaced)
6227 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6228 (unsigned long) insn);
6230 dsc->modinsn[0] = ARM_NOP;
6232 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6237 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6238 struct regcache *regs,
6239 struct displaced_step_closure *dsc)
6241 int link = bit (insn, 7);
6242 unsigned int rm = bits (insn, 3, 6);
6244 if (debug_displaced)
6245 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6246 (unsigned short) insn);
6248 dsc->modinsn[0] = THUMB_NOP;
6250 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6256 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6259 cleanup_alu_imm (struct gdbarch *gdbarch,
6260 struct regcache *regs, struct displaced_step_closure *dsc)
6262 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6263 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6264 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6265 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6269 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6270 struct displaced_step_closure *dsc)
6272 unsigned int rn = bits (insn, 16, 19);
6273 unsigned int rd = bits (insn, 12, 15);
6274 unsigned int op = bits (insn, 21, 24);
6275 int is_mov = (op == 0xd);
6276 ULONGEST rd_val, rn_val;
6278 if (!insn_references_pc (insn, 0x000ff000ul))
6279 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6281 if (debug_displaced)
6282 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6283 "%.8lx\n", is_mov ? "move" : "ALU",
6284 (unsigned long) insn);
6286 /* Instruction is of form:
6288 <op><cond> rd, [rn,] #imm
6292 Preparation: tmp1, tmp2 <- r0, r1;
6294 Insn: <op><cond> r0, r1, #imm
6295 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6298 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6299 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6300 rn_val = displaced_read_reg (regs, dsc, rn);
6301 rd_val = displaced_read_reg (regs, dsc, rd);
6302 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6303 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6307 dsc->modinsn[0] = insn & 0xfff00fff;
6309 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6311 dsc->cleanup = &cleanup_alu_imm;
6317 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6318 uint16_t insn2, struct regcache *regs,
6319 struct displaced_step_closure *dsc)
6321 unsigned int op = bits (insn1, 5, 8);
6322 unsigned int rn, rm, rd;
6323 ULONGEST rd_val, rn_val;
6325 rn = bits (insn1, 0, 3); /* Rn */
6326 rm = bits (insn2, 0, 3); /* Rm */
6327 rd = bits (insn2, 8, 11); /* Rd */
6329 /* This routine is only called for instruction MOV. */
6330 gdb_assert (op == 0x2 && rn == 0xf);
6332 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6333 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6335 if (debug_displaced)
6336 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6337 "ALU", insn1, insn2);
6339 /* Instruction is of form:
6341 <op><cond> rd, [rn,] #imm
6345 Preparation: tmp1, tmp2 <- r0, r1;
6347 Insn: <op><cond> r0, r1, #imm
6348 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6351 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6352 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6353 rn_val = displaced_read_reg (regs, dsc, rn);
6354 rd_val = displaced_read_reg (regs, dsc, rd);
6355 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6356 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6359 dsc->modinsn[0] = insn1;
6360 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6363 dsc->cleanup = &cleanup_alu_imm;
6368 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6371 cleanup_alu_reg (struct gdbarch *gdbarch,
6372 struct regcache *regs, struct displaced_step_closure *dsc)
6377 rd_val = displaced_read_reg (regs, dsc, 0);
6379 for (i = 0; i < 3; i++)
6380 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6382 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6386 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6387 struct displaced_step_closure *dsc,
6388 unsigned int rd, unsigned int rn, unsigned int rm)
6390 ULONGEST rd_val, rn_val, rm_val;
6392 /* Instruction is of form:
6394 <op><cond> rd, [rn,] rm [, <shift>]
6398 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6399 r0, r1, r2 <- rd, rn, rm
6400 Insn: <op><cond> r0, r1, r2 [, <shift>]
6401 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6404 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6405 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6406 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6407 rd_val = displaced_read_reg (regs, dsc, rd);
6408 rn_val = displaced_read_reg (regs, dsc, rn);
6409 rm_val = displaced_read_reg (regs, dsc, rm);
6410 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6411 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6412 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6415 dsc->cleanup = &cleanup_alu_reg;
6419 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6420 struct displaced_step_closure *dsc)
6422 unsigned int op = bits (insn, 21, 24);
6423 int is_mov = (op == 0xd);
6425 if (!insn_references_pc (insn, 0x000ff00ful))
6426 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6428 if (debug_displaced)
6429 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6430 is_mov ? "move" : "ALU", (unsigned long) insn);
6433 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6435 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6437 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6443 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6444 struct regcache *regs,
6445 struct displaced_step_closure *dsc)
6447 unsigned rn, rm, rd;
6449 rd = bits (insn, 3, 6);
6450 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6453 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6454 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6456 if (debug_displaced)
6457 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6458 "ALU", (unsigned short) insn);
6460 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6462 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6467 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6470 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6471 struct regcache *regs,
6472 struct displaced_step_closure *dsc)
6474 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6477 for (i = 0; i < 4; i++)
6478 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6480 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6484 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6485 struct displaced_step_closure *dsc,
6486 unsigned int rd, unsigned int rn, unsigned int rm,
6490 ULONGEST rd_val, rn_val, rm_val, rs_val;
6492 /* Instruction is of form:
6494 <op><cond> rd, [rn,] rm, <shift> rs
6498 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6499 r0, r1, r2, r3 <- rd, rn, rm, rs
6500 Insn: <op><cond> r0, r1, r2, <shift> r3
6502 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6506 for (i = 0; i < 4; i++)
6507 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6509 rd_val = displaced_read_reg (regs, dsc, rd);
6510 rn_val = displaced_read_reg (regs, dsc, rn);
6511 rm_val = displaced_read_reg (regs, dsc, rm);
6512 rs_val = displaced_read_reg (regs, dsc, rs);
6513 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6514 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6515 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6516 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6518 dsc->cleanup = &cleanup_alu_shifted_reg;
6522 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6523 struct regcache *regs,
6524 struct displaced_step_closure *dsc)
6526 unsigned int op = bits (insn, 21, 24);
6527 int is_mov = (op == 0xd);
6528 unsigned int rd, rn, rm, rs;
6530 if (!insn_references_pc (insn, 0x000fff0ful))
6531 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6533 if (debug_displaced)
6534 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6535 "%.8lx\n", is_mov ? "move" : "ALU",
6536 (unsigned long) insn);
6538 rn = bits (insn, 16, 19);
6539 rm = bits (insn, 0, 3);
6540 rs = bits (insn, 8, 11);
6541 rd = bits (insn, 12, 15);
6544 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6546 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6548 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6553 /* Clean up load instructions. */
6556 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6557 struct displaced_step_closure *dsc)
6559 ULONGEST rt_val, rt_val2 = 0, rn_val;
6561 rt_val = displaced_read_reg (regs, dsc, 0);
6562 if (dsc->u.ldst.xfersize == 8)
6563 rt_val2 = displaced_read_reg (regs, dsc, 1);
6564 rn_val = displaced_read_reg (regs, dsc, 2);
6566 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6567 if (dsc->u.ldst.xfersize > 4)
6568 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6569 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6570 if (!dsc->u.ldst.immed)
6571 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6573 /* Handle register writeback. */
6574 if (dsc->u.ldst.writeback)
6575 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6576 /* Put result in right place. */
6577 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6578 if (dsc->u.ldst.xfersize == 8)
6579 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6582 /* Clean up store instructions. */
6585 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6586 struct displaced_step_closure *dsc)
6588 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6590 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6591 if (dsc->u.ldst.xfersize > 4)
6592 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6593 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6594 if (!dsc->u.ldst.immed)
6595 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6596 if (!dsc->u.ldst.restore_r4)
6597 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6600 if (dsc->u.ldst.writeback)
6601 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6604 /* Copy "extra" load/store instructions. These are halfword/doubleword
6605 transfers, which have a different encoding to byte/word transfers. */
6608 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6609 struct regcache *regs, struct displaced_step_closure *dsc)
6611 unsigned int op1 = bits (insn, 20, 24);
6612 unsigned int op2 = bits (insn, 5, 6);
6613 unsigned int rt = bits (insn, 12, 15);
6614 unsigned int rn = bits (insn, 16, 19);
6615 unsigned int rm = bits (insn, 0, 3);
6616 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6617 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6618 int immed = (op1 & 0x4) != 0;
6620 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6622 if (!insn_references_pc (insn, 0x000ff00ful))
6623 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6625 if (debug_displaced)
6626 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6627 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6628 (unsigned long) insn);
6630 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6633 internal_error (__FILE__, __LINE__,
6634 _("copy_extra_ld_st: instruction decode error"));
6636 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6637 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6638 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6640 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6642 rt_val = displaced_read_reg (regs, dsc, rt);
6643 if (bytesize[opcode] == 8)
6644 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6645 rn_val = displaced_read_reg (regs, dsc, rn);
6647 rm_val = displaced_read_reg (regs, dsc, rm);
6649 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6650 if (bytesize[opcode] == 8)
6651 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6652 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6654 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6657 dsc->u.ldst.xfersize = bytesize[opcode];
6658 dsc->u.ldst.rn = rn;
6659 dsc->u.ldst.immed = immed;
6660 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6661 dsc->u.ldst.restore_r4 = 0;
6664 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6666 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6667 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6669 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6671 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6672 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6674 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6679 /* Copy byte/half word/word loads and stores. */
6682 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6683 struct displaced_step_closure *dsc, int load,
6684 int immed, int writeback, int size, int usermode,
6685 int rt, int rm, int rn)
6687 ULONGEST rt_val, rn_val, rm_val = 0;
6689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6690 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6692 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6694 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6696 rt_val = displaced_read_reg (regs, dsc, rt);
6697 rn_val = displaced_read_reg (regs, dsc, rn);
6699 rm_val = displaced_read_reg (regs, dsc, rm);
6701 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6702 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6704 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6706 dsc->u.ldst.xfersize = size;
6707 dsc->u.ldst.rn = rn;
6708 dsc->u.ldst.immed = immed;
6709 dsc->u.ldst.writeback = writeback;
6711 /* To write PC we can do:
6713 Before this sequence of instructions:
6714 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6715 r2 is the Rn value got from dispalced_read_reg.
6717 Insn1: push {pc} Write address of STR instruction + offset on stack
6718 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6719 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6720 = addr(Insn1) + offset - addr(Insn3) - 8
6722 Insn4: add r4, r4, #8 r4 = offset - 8
6723 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6725 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6727 Otherwise we don't know what value to write for PC, since the offset is
6728 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6729 of this can be found in Section "Saving from r15" in
6730 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6732 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6737 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6738 uint16_t insn2, struct regcache *regs,
6739 struct displaced_step_closure *dsc, int size)
6741 unsigned int u_bit = bit (insn1, 7);
6742 unsigned int rt = bits (insn2, 12, 15);
6743 int imm12 = bits (insn2, 0, 11);
6746 if (debug_displaced)
6747 fprintf_unfiltered (gdb_stdlog,
6748 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6749 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6755 /* Rewrite instruction LDR Rt imm12 into:
6757 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6761 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6764 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6765 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6766 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6768 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6770 pc_val = pc_val & 0xfffffffc;
6772 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6773 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6777 dsc->u.ldst.xfersize = size;
6778 dsc->u.ldst.immed = 0;
6779 dsc->u.ldst.writeback = 0;
6780 dsc->u.ldst.restore_r4 = 0;
6782 /* LDR R0, R2, R3 */
6783 dsc->modinsn[0] = 0xf852;
6784 dsc->modinsn[1] = 0x3;
6787 dsc->cleanup = &cleanup_load;
6793 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6794 uint16_t insn2, struct regcache *regs,
6795 struct displaced_step_closure *dsc,
6796 int writeback, int immed)
6798 unsigned int rt = bits (insn2, 12, 15);
6799 unsigned int rn = bits (insn1, 0, 3);
6800 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6801 /* In LDR (register), there is also a register Rm, which is not allowed to
6802 be PC, so we don't have to check it. */
6804 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6805 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6808 if (debug_displaced)
6809 fprintf_unfiltered (gdb_stdlog,
6810 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6811 rt, rn, insn1, insn2);
6813 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6816 dsc->u.ldst.restore_r4 = 0;
6819 /* ldr[b]<cond> rt, [rn, #imm], etc.
6821 ldr[b]<cond> r0, [r2, #imm]. */
6823 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6824 dsc->modinsn[1] = insn2 & 0x0fff;
6827 /* ldr[b]<cond> rt, [rn, rm], etc.
6829 ldr[b]<cond> r0, [r2, r3]. */
6831 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6832 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6842 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6843 struct regcache *regs,
6844 struct displaced_step_closure *dsc,
6845 int load, int size, int usermode)
6847 int immed = !bit (insn, 25);
6848 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6849 unsigned int rt = bits (insn, 12, 15);
6850 unsigned int rn = bits (insn, 16, 19);
6851 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6853 if (!insn_references_pc (insn, 0x000ff00ful))
6854 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6856 if (debug_displaced)
6857 fprintf_unfiltered (gdb_stdlog,
6858 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6859 load ? (size == 1 ? "ldrb" : "ldr")
6860 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6862 (unsigned long) insn);
6864 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6865 usermode, rt, rm, rn);
6867 if (load || rt != ARM_PC_REGNUM)
6869 dsc->u.ldst.restore_r4 = 0;
6872 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6874 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6875 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6877 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6879 {ldr,str}[b]<cond> r0, [r2, r3]. */
6880 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6884 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6885 dsc->u.ldst.restore_r4 = 1;
6886 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6887 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6888 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6889 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6890 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6894 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6896 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6901 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6906 /* Cleanup LDM instructions with fully-populated register list. This is an
6907 unfortunate corner case: it's impossible to implement correctly by modifying
6908 the instruction. The issue is as follows: we have an instruction,
6912 which we must rewrite to avoid loading PC. A possible solution would be to
6913 do the load in two halves, something like (with suitable cleanup
6917 ldm[id][ab] r8!, {r0-r7}
6919 ldm[id][ab] r8, {r7-r14}
6922 but at present there's no suitable place for <temp>, since the scratch space
6923 is overwritten before the cleanup routine is called. For now, we simply
6924 emulate the instruction. */
6927 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6928 struct displaced_step_closure *dsc)
6930 int inc = dsc->u.block.increment;
6931 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6932 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6933 uint32_t regmask = dsc->u.block.regmask;
6934 int regno = inc ? 0 : 15;
6935 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6936 int exception_return = dsc->u.block.load && dsc->u.block.user
6937 && (regmask & 0x8000) != 0;
6938 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6939 int do_transfer = condition_true (dsc->u.block.cond, status);
6940 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6945 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6946 sensible we can do here. Complain loudly. */
6947 if (exception_return)
6948 error (_("Cannot single-step exception return"));
6950 /* We don't handle any stores here for now. */
6951 gdb_assert (dsc->u.block.load != 0);
6953 if (debug_displaced)
6954 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6955 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6956 dsc->u.block.increment ? "inc" : "dec",
6957 dsc->u.block.before ? "before" : "after");
6964 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6967 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6970 xfer_addr += bump_before;
6972 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6973 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6975 xfer_addr += bump_after;
6977 regmask &= ~(1 << regno);
6980 if (dsc->u.block.writeback)
6981 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6985 /* Clean up an STM which included the PC in the register list. */
6988 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6989 struct displaced_step_closure *dsc)
6991 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6992 int store_executed = condition_true (dsc->u.block.cond, status);
6993 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6994 CORE_ADDR stm_insn_addr;
6997 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6999 /* If condition code fails, there's nothing else to do. */
7000 if (!store_executed)
7003 if (dsc->u.block.increment)
7005 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7007 if (dsc->u.block.before)
7012 pc_stored_at = dsc->u.block.xfer_addr;
7014 if (dsc->u.block.before)
7018 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7019 stm_insn_addr = dsc->scratch_base;
7020 offset = pc_val - stm_insn_addr;
7022 if (debug_displaced)
7023 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7024 "STM instruction\n", offset);
7026 /* Rewrite the stored PC to the proper value for the non-displaced original
7028 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7029 dsc->insn_addr + offset);
7032 /* Clean up an LDM which includes the PC in the register list. We clumped all
7033 the registers in the transferred list into a contiguous range r0...rX (to
7034 avoid loading PC directly and losing control of the debugged program), so we
7035 must undo that here. */
7038 cleanup_block_load_pc (struct gdbarch *gdbarch,
7039 struct regcache *regs,
7040 struct displaced_step_closure *dsc)
7042 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7043 int load_executed = condition_true (dsc->u.block.cond, status);
7044 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7045 unsigned int regs_loaded = bitcount (mask);
7046 unsigned int num_to_shuffle = regs_loaded, clobbered;
7048 /* The method employed here will fail if the register list is fully populated
7049 (we need to avoid loading PC directly). */
7050 gdb_assert (num_to_shuffle < 16);
7055 clobbered = (1 << num_to_shuffle) - 1;
7057 while (num_to_shuffle > 0)
7059 if ((mask & (1 << write_reg)) != 0)
7061 unsigned int read_reg = num_to_shuffle - 1;
7063 if (read_reg != write_reg)
7065 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7066 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7067 if (debug_displaced)
7068 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7069 "loaded register r%d to r%d\n"), read_reg,
7072 else if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7074 "r%d already in the right place\n"),
7077 clobbered &= ~(1 << write_reg);
7085 /* Restore any registers we scribbled over. */
7086 for (write_reg = 0; clobbered != 0; write_reg++)
7088 if ((clobbered & (1 << write_reg)) != 0)
7090 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7092 if (debug_displaced)
7093 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7094 "clobbered register r%d\n"), write_reg);
7095 clobbered &= ~(1 << write_reg);
7099 /* Perform register writeback manually. */
7100 if (dsc->u.block.writeback)
7102 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7104 if (dsc->u.block.increment)
7105 new_rn_val += regs_loaded * 4;
7107 new_rn_val -= regs_loaded * 4;
7109 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7114 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7115 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7118 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7119 struct regcache *regs,
7120 struct displaced_step_closure *dsc)
7122 int load = bit (insn, 20);
7123 int user = bit (insn, 22);
7124 int increment = bit (insn, 23);
7125 int before = bit (insn, 24);
7126 int writeback = bit (insn, 21);
7127 int rn = bits (insn, 16, 19);
7129 /* Block transfers which don't mention PC can be run directly
7131 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7132 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7134 if (rn == ARM_PC_REGNUM)
7136 warning (_("displaced: Unpredictable LDM or STM with "
7137 "base register r15"));
7138 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7141 if (debug_displaced)
7142 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7143 "%.8lx\n", (unsigned long) insn);
7145 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7146 dsc->u.block.rn = rn;
7148 dsc->u.block.load = load;
7149 dsc->u.block.user = user;
7150 dsc->u.block.increment = increment;
7151 dsc->u.block.before = before;
7152 dsc->u.block.writeback = writeback;
7153 dsc->u.block.cond = bits (insn, 28, 31);
7155 dsc->u.block.regmask = insn & 0xffff;
7159 if ((insn & 0xffff) == 0xffff)
7161 /* LDM with a fully-populated register list. This case is
7162 particularly tricky. Implement for now by fully emulating the
7163 instruction (which might not behave perfectly in all cases, but
7164 these instructions should be rare enough for that not to matter
7166 dsc->modinsn[0] = ARM_NOP;
7168 dsc->cleanup = &cleanup_block_load_all;
7172 /* LDM of a list of registers which includes PC. Implement by
7173 rewriting the list of registers to be transferred into a
7174 contiguous chunk r0...rX before doing the transfer, then shuffling
7175 registers into the correct places in the cleanup routine. */
7176 unsigned int regmask = insn & 0xffff;
7177 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7178 unsigned int to = 0, from = 0, i, new_rn;
7180 for (i = 0; i < num_in_list; i++)
7181 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7183 /* Writeback makes things complicated. We need to avoid clobbering
7184 the base register with one of the registers in our modified
7185 register list, but just using a different register can't work in
7188 ldm r14!, {r0-r13,pc}
7190 which would need to be rewritten as:
7194 but that can't work, because there's no free register for N.
7196 Solve this by turning off the writeback bit, and emulating
7197 writeback manually in the cleanup routine. */
7202 new_regmask = (1 << num_in_list) - 1;
7204 if (debug_displaced)
7205 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7206 "{..., pc}: original reg list %.4x, modified "
7207 "list %.4x\n"), rn, writeback ? "!" : "",
7208 (int) insn & 0xffff, new_regmask);
7210 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7212 dsc->cleanup = &cleanup_block_load_pc;
7217 /* STM of a list of registers which includes PC. Run the instruction
7218 as-is, but out of line: this will store the wrong value for the PC,
7219 so we must manually fix up the memory in the cleanup routine.
7220 Doing things this way has the advantage that we can auto-detect
7221 the offset of the PC write (which is architecture-dependent) in
7222 the cleanup routine. */
7223 dsc->modinsn[0] = insn;
7225 dsc->cleanup = &cleanup_block_store_pc;
7232 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7233 struct regcache *regs,
7234 struct displaced_step_closure *dsc)
7236 int rn = bits (insn1, 0, 3);
7237 int load = bit (insn1, 4);
7238 int writeback = bit (insn1, 5);
7240 /* Block transfers which don't mention PC can be run directly
7242 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7243 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7245 if (rn == ARM_PC_REGNUM)
7247 warning (_("displaced: Unpredictable LDM or STM with "
7248 "base register r15"));
7249 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7250 "unpredictable ldm/stm", dsc);
7253 if (debug_displaced)
7254 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7255 "%.4x%.4x\n", insn1, insn2);
7257 /* Clear bit 13, since it should be always zero. */
7258 dsc->u.block.regmask = (insn2 & 0xdfff);
7259 dsc->u.block.rn = rn;
7261 dsc->u.block.load = load;
7262 dsc->u.block.user = 0;
7263 dsc->u.block.increment = bit (insn1, 7);
7264 dsc->u.block.before = bit (insn1, 8);
7265 dsc->u.block.writeback = writeback;
7266 dsc->u.block.cond = INST_AL;
7267 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7271 if (dsc->u.block.regmask == 0xffff)
7273 /* This branch is impossible to happen. */
7278 unsigned int regmask = dsc->u.block.regmask;
7279 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7280 unsigned int to = 0, from = 0, i, new_rn;
7282 for (i = 0; i < num_in_list; i++)
7283 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7288 new_regmask = (1 << num_in_list) - 1;
7290 if (debug_displaced)
7291 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7292 "{..., pc}: original reg list %.4x, modified "
7293 "list %.4x\n"), rn, writeback ? "!" : "",
7294 (int) dsc->u.block.regmask, new_regmask);
7296 dsc->modinsn[0] = insn1;
7297 dsc->modinsn[1] = (new_regmask & 0xffff);
7300 dsc->cleanup = &cleanup_block_load_pc;
7305 dsc->modinsn[0] = insn1;
7306 dsc->modinsn[1] = insn2;
7308 dsc->cleanup = &cleanup_block_store_pc;
7313 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7314 for Linux, where some SVC instructions must be treated specially. */
7317 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7318 struct displaced_step_closure *dsc)
7320 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7322 if (debug_displaced)
7323 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7324 "%.8lx\n", (unsigned long) resume_addr);
7326 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7330 /* Common copy routine for svc instruciton. */
7333 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7334 struct displaced_step_closure *dsc)
7336 /* Preparation: none.
7337 Insn: unmodified svc.
7338 Cleanup: pc <- insn_addr + insn_size. */
7340 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7342 dsc->wrote_to_pc = 1;
7344 /* Allow OS-specific code to override SVC handling. */
7345 if (dsc->u.svc.copy_svc_os)
7346 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7349 dsc->cleanup = &cleanup_svc;
7355 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7356 struct regcache *regs, struct displaced_step_closure *dsc)
7359 if (debug_displaced)
7360 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7361 (unsigned long) insn);
7363 dsc->modinsn[0] = insn;
7365 return install_svc (gdbarch, regs, dsc);
7369 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7370 struct regcache *regs, struct displaced_step_closure *dsc)
7373 if (debug_displaced)
7374 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7377 dsc->modinsn[0] = insn;
7379 return install_svc (gdbarch, regs, dsc);
7382 /* Copy undefined instructions. */
7385 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7386 struct displaced_step_closure *dsc)
7388 if (debug_displaced)
7389 fprintf_unfiltered (gdb_stdlog,
7390 "displaced: copying undefined insn %.8lx\n",
7391 (unsigned long) insn);
7393 dsc->modinsn[0] = insn;
7399 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7400 struct displaced_step_closure *dsc)
7403 if (debug_displaced)
7404 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7405 "%.4x %.4x\n", (unsigned short) insn1,
7406 (unsigned short) insn2);
7408 dsc->modinsn[0] = insn1;
7409 dsc->modinsn[1] = insn2;
7415 /* Copy unpredictable instructions. */
7418 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7419 struct displaced_step_closure *dsc)
7421 if (debug_displaced)
7422 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7423 "%.8lx\n", (unsigned long) insn);
7425 dsc->modinsn[0] = insn;
7430 /* The decode_* functions are instruction decoding helpers. They mostly follow
7431 the presentation in the ARM ARM. */
7434 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7435 struct regcache *regs,
7436 struct displaced_step_closure *dsc)
7438 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7439 unsigned int rn = bits (insn, 16, 19);
7441 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7442 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7443 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7444 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7445 else if ((op1 & 0x60) == 0x20)
7446 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7447 else if ((op1 & 0x71) == 0x40)
7448 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7450 else if ((op1 & 0x77) == 0x41)
7451 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7452 else if ((op1 & 0x77) == 0x45)
7453 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7454 else if ((op1 & 0x77) == 0x51)
7457 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7459 return arm_copy_unpred (gdbarch, insn, dsc);
7461 else if ((op1 & 0x77) == 0x55)
7462 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7463 else if (op1 == 0x57)
7466 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7467 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7468 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7469 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7470 default: return arm_copy_unpred (gdbarch, insn, dsc);
7472 else if ((op1 & 0x63) == 0x43)
7473 return arm_copy_unpred (gdbarch, insn, dsc);
7474 else if ((op2 & 0x1) == 0x0)
7475 switch (op1 & ~0x80)
7478 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7480 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7481 case 0x71: case 0x75:
7483 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7484 case 0x63: case 0x67: case 0x73: case 0x77:
7485 return arm_copy_unpred (gdbarch, insn, dsc);
7487 return arm_copy_undef (gdbarch, insn, dsc);
7490 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7494 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7495 struct regcache *regs,
7496 struct displaced_step_closure *dsc)
7498 if (bit (insn, 27) == 0)
7499 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7500 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7501 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7504 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7507 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7509 case 0x4: case 0x5: case 0x6: case 0x7:
7510 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7513 switch ((insn & 0xe00000) >> 21)
7515 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7517 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7520 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7523 return arm_copy_undef (gdbarch, insn, dsc);
7528 int rn_f = (bits (insn, 16, 19) == 0xf);
7529 switch ((insn & 0xe00000) >> 21)
7532 /* ldc/ldc2 imm (undefined for rn == pc). */
7533 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7534 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7537 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7539 case 0x4: case 0x5: case 0x6: case 0x7:
7540 /* ldc/ldc2 lit (undefined for rn != pc). */
7541 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7542 : arm_copy_undef (gdbarch, insn, dsc);
7545 return arm_copy_undef (gdbarch, insn, dsc);
7550 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7553 if (bits (insn, 16, 19) == 0xf)
7555 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7557 return arm_copy_undef (gdbarch, insn, dsc);
7561 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7563 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7567 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7569 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7572 return arm_copy_undef (gdbarch, insn, dsc);
7576 /* Decode miscellaneous instructions in dp/misc encoding space. */
7579 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7580 struct regcache *regs,
7581 struct displaced_step_closure *dsc)
7583 unsigned int op2 = bits (insn, 4, 6);
7584 unsigned int op = bits (insn, 21, 22);
7585 unsigned int op1 = bits (insn, 16, 19);
7590 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7593 if (op == 0x1) /* bx. */
7594 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7596 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7598 return arm_copy_undef (gdbarch, insn, dsc);
7602 /* Not really supported. */
7603 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7605 return arm_copy_undef (gdbarch, insn, dsc);
7609 return arm_copy_bx_blx_reg (gdbarch, insn,
7610 regs, dsc); /* blx register. */
7612 return arm_copy_undef (gdbarch, insn, dsc);
7615 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7619 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7621 /* Not really supported. */
7622 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7625 return arm_copy_undef (gdbarch, insn, dsc);
7630 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7631 struct regcache *regs,
7632 struct displaced_step_closure *dsc)
7635 switch (bits (insn, 20, 24))
7638 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7641 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7643 case 0x12: case 0x16:
7644 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7647 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7651 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7653 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7654 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7655 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7656 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7657 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7658 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7659 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7660 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7661 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7662 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7663 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7664 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7665 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7666 /* 2nd arg means "unpriveleged". */
7667 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7671 /* Should be unreachable. */
7676 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7677 struct regcache *regs,
7678 struct displaced_step_closure *dsc)
7680 int a = bit (insn, 25), b = bit (insn, 4);
7681 uint32_t op1 = bits (insn, 20, 24);
7682 int rn_f = bits (insn, 16, 19) == 0xf;
7684 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7685 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7686 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7687 else if ((!a && (op1 & 0x17) == 0x02)
7688 || (a && (op1 & 0x17) == 0x02 && !b))
7689 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7690 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7691 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7692 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7693 else if ((!a && (op1 & 0x17) == 0x03)
7694 || (a && (op1 & 0x17) == 0x03 && !b))
7695 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7696 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7697 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7698 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7699 else if ((!a && (op1 & 0x17) == 0x06)
7700 || (a && (op1 & 0x17) == 0x06 && !b))
7701 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7702 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7703 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7704 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7705 else if ((!a && (op1 & 0x17) == 0x07)
7706 || (a && (op1 & 0x17) == 0x07 && !b))
7707 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7709 /* Should be unreachable. */
7714 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7715 struct displaced_step_closure *dsc)
7717 switch (bits (insn, 20, 24))
7719 case 0x00: case 0x01: case 0x02: case 0x03:
7720 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7722 case 0x04: case 0x05: case 0x06: case 0x07:
7723 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7725 case 0x08: case 0x09: case 0x0a: case 0x0b:
7726 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7727 return arm_copy_unmodified (gdbarch, insn,
7728 "decode/pack/unpack/saturate/reverse", dsc);
7731 if (bits (insn, 5, 7) == 0) /* op2. */
7733 if (bits (insn, 12, 15) == 0xf)
7734 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7736 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7739 return arm_copy_undef (gdbarch, insn, dsc);
7741 case 0x1a: case 0x1b:
7742 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7743 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7745 return arm_copy_undef (gdbarch, insn, dsc);
7747 case 0x1c: case 0x1d:
7748 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7750 if (bits (insn, 0, 3) == 0xf)
7751 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7753 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7756 return arm_copy_undef (gdbarch, insn, dsc);
7758 case 0x1e: case 0x1f:
7759 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7760 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7762 return arm_copy_undef (gdbarch, insn, dsc);
7765 /* Should be unreachable. */
7770 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7771 struct regcache *regs,
7772 struct displaced_step_closure *dsc)
7775 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7777 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7781 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7782 struct regcache *regs,
7783 struct displaced_step_closure *dsc)
7785 unsigned int opcode = bits (insn, 20, 24);
7789 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7790 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7792 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7793 case 0x12: case 0x16:
7794 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7796 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7797 case 0x13: case 0x17:
7798 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7800 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7801 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7802 /* Note: no writeback for these instructions. Bit 25 will always be
7803 zero though (via caller), so the following works OK. */
7804 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7807 /* Should be unreachable. */
7811 /* Decode shifted register instructions. */
7814 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7815 uint16_t insn2, struct regcache *regs,
7816 struct displaced_step_closure *dsc)
7818 /* PC is only allowed to be used in instruction MOV. */
7820 unsigned int op = bits (insn1, 5, 8);
7821 unsigned int rn = bits (insn1, 0, 3);
7823 if (op == 0x2 && rn == 0xf) /* MOV */
7824 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7827 "dp (shift reg)", dsc);
7831 /* Decode extension register load/store. Exactly the same as
7832 arm_decode_ext_reg_ld_st. */
7835 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7836 uint16_t insn2, struct regcache *regs,
7837 struct displaced_step_closure *dsc)
7839 unsigned int opcode = bits (insn1, 4, 8);
7843 case 0x04: case 0x05:
7844 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7845 "vfp/neon vmov", dsc);
7847 case 0x08: case 0x0c: /* 01x00 */
7848 case 0x0a: case 0x0e: /* 01x10 */
7849 case 0x12: case 0x16: /* 10x10 */
7850 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7851 "vfp/neon vstm/vpush", dsc);
7853 case 0x09: case 0x0d: /* 01x01 */
7854 case 0x0b: case 0x0f: /* 01x11 */
7855 case 0x13: case 0x17: /* 10x11 */
7856 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7857 "vfp/neon vldm/vpop", dsc);
7859 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7860 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7862 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7863 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7866 /* Should be unreachable. */
7871 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7872 struct regcache *regs, struct displaced_step_closure *dsc)
7874 unsigned int op1 = bits (insn, 20, 25);
7875 int op = bit (insn, 4);
7876 unsigned int coproc = bits (insn, 8, 11);
7877 unsigned int rn = bits (insn, 16, 19);
7879 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7880 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7881 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7882 && (coproc & 0xe) != 0xa)
7884 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7885 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7886 && (coproc & 0xe) != 0xa)
7887 /* ldc/ldc2 imm/lit. */
7888 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7889 else if ((op1 & 0x3e) == 0x00)
7890 return arm_copy_undef (gdbarch, insn, dsc);
7891 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7892 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7893 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7894 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7895 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7896 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7897 else if ((op1 & 0x30) == 0x20 && !op)
7899 if ((coproc & 0xe) == 0xa)
7900 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7902 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7904 else if ((op1 & 0x30) == 0x20 && op)
7905 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7906 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7907 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7908 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7909 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7910 else if ((op1 & 0x30) == 0x30)
7911 return arm_copy_svc (gdbarch, insn, regs, dsc);
7913 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7917 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7918 uint16_t insn2, struct regcache *regs,
7919 struct displaced_step_closure *dsc)
7921 unsigned int coproc = bits (insn2, 8, 11);
7922 unsigned int op1 = bits (insn1, 4, 9);
7923 unsigned int bit_5_8 = bits (insn1, 5, 8);
7924 unsigned int bit_9 = bit (insn1, 9);
7925 unsigned int bit_4 = bit (insn1, 4);
7926 unsigned int rn = bits (insn1, 0, 3);
7931 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7932 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7934 else if (bit_5_8 == 0) /* UNDEFINED. */
7935 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7938 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7939 if ((coproc & 0xe) == 0xa)
7940 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7942 else /* coproc is not 101x. */
7944 if (bit_4 == 0) /* STC/STC2. */
7945 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7947 else /* LDC/LDC2 {literal, immeidate}. */
7948 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7954 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7960 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7961 struct displaced_step_closure *dsc, int rd)
7967 Preparation: Rd <- PC
7973 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7974 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7978 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7979 struct displaced_step_closure *dsc,
7980 int rd, unsigned int imm)
7983 /* Encoding T2: ADDS Rd, #imm */
7984 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7986 install_pc_relative (gdbarch, regs, dsc, rd);
7992 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7993 struct regcache *regs,
7994 struct displaced_step_closure *dsc)
7996 unsigned int rd = bits (insn, 8, 10);
7997 unsigned int imm8 = bits (insn, 0, 7);
7999 if (debug_displaced)
8000 fprintf_unfiltered (gdb_stdlog,
8001 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8004 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8008 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8009 uint16_t insn2, struct regcache *regs,
8010 struct displaced_step_closure *dsc)
8012 unsigned int rd = bits (insn2, 8, 11);
8013 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8014 extract raw immediate encoding rather than computing immediate. When
8015 generating ADD or SUB instruction, we can simply perform OR operation to
8016 set immediate into ADD. */
8017 unsigned int imm_3_8 = insn2 & 0x70ff;
8018 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8020 if (debug_displaced)
8021 fprintf_unfiltered (gdb_stdlog,
8022 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8023 rd, imm_i, imm_3_8, insn1, insn2);
8025 if (bit (insn1, 7)) /* Encoding T2 */
8027 /* Encoding T3: SUB Rd, Rd, #imm */
8028 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8029 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8031 else /* Encoding T3 */
8033 /* Encoding T3: ADD Rd, Rd, #imm */
8034 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8035 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8039 install_pc_relative (gdbarch, regs, dsc, rd);
8045 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8046 struct regcache *regs,
8047 struct displaced_step_closure *dsc)
8049 unsigned int rt = bits (insn1, 8, 10);
8051 int imm8 = (bits (insn1, 0, 7) << 2);
8052 CORE_ADDR from = dsc->insn_addr;
8058 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8060 Insn: LDR R0, [R2, R3];
8061 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8063 if (debug_displaced)
8064 fprintf_unfiltered (gdb_stdlog,
8065 "displaced: copying thumb ldr r%d [pc #%d]\n"
8068 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8069 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8070 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8071 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8072 /* The assembler calculates the required value of the offset from the
8073 Align(PC,4) value of this instruction to the label. */
8074 pc = pc & 0xfffffffc;
8076 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8077 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8080 dsc->u.ldst.xfersize = 4;
8082 dsc->u.ldst.immed = 0;
8083 dsc->u.ldst.writeback = 0;
8084 dsc->u.ldst.restore_r4 = 0;
8086 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8088 dsc->cleanup = &cleanup_load;
8093 /* Copy Thumb cbnz/cbz insruction. */
8096 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8097 struct regcache *regs,
8098 struct displaced_step_closure *dsc)
8100 int non_zero = bit (insn1, 11);
8101 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8102 CORE_ADDR from = dsc->insn_addr;
8103 int rn = bits (insn1, 0, 2);
8104 int rn_val = displaced_read_reg (regs, dsc, rn);
8106 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8107 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8108 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8109 condition is false, let it be, cleanup_branch will do nothing. */
8110 if (dsc->u.branch.cond)
8112 dsc->u.branch.cond = INST_AL;
8113 dsc->u.branch.dest = from + 4 + imm5;
8116 dsc->u.branch.dest = from + 2;
8118 dsc->u.branch.link = 0;
8119 dsc->u.branch.exchange = 0;
8121 if (debug_displaced)
8122 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8123 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8124 rn, rn_val, insn1, dsc->u.branch.dest);
8126 dsc->modinsn[0] = THUMB_NOP;
8128 dsc->cleanup = &cleanup_branch;
8132 /* Copy Table Branch Byte/Halfword */
8134 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8135 uint16_t insn2, struct regcache *regs,
8136 struct displaced_step_closure *dsc)
8138 ULONGEST rn_val, rm_val;
8139 int is_tbh = bit (insn2, 4);
8140 CORE_ADDR halfwords = 0;
8141 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8143 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8144 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8150 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8151 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8157 target_read_memory (rn_val + rm_val, buf, 1);
8158 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8161 if (debug_displaced)
8162 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8163 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8164 (unsigned int) rn_val, (unsigned int) rm_val,
8165 (unsigned int) halfwords);
8167 dsc->u.branch.cond = INST_AL;
8168 dsc->u.branch.link = 0;
8169 dsc->u.branch.exchange = 0;
8170 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8172 dsc->cleanup = &cleanup_branch;
8178 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8179 struct displaced_step_closure *dsc)
8182 int val = displaced_read_reg (regs, dsc, 7);
8183 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8186 val = displaced_read_reg (regs, dsc, 8);
8187 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8190 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8195 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8196 struct regcache *regs,
8197 struct displaced_step_closure *dsc)
8199 dsc->u.block.regmask = insn1 & 0x00ff;
8201 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8204 (1) register list is full, that is, r0-r7 are used.
8205 Prepare: tmp[0] <- r8
8207 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8208 MOV r8, r7; Move value of r7 to r8;
8209 POP {r7}; Store PC value into r7.
8211 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8213 (2) register list is not full, supposing there are N registers in
8214 register list (except PC, 0 <= N <= 7).
8215 Prepare: for each i, 0 - N, tmp[i] <- ri.
8217 POP {r0, r1, ...., rN};
8219 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8220 from tmp[] properly.
8222 if (debug_displaced)
8223 fprintf_unfiltered (gdb_stdlog,
8224 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8225 dsc->u.block.regmask, insn1);
8227 if (dsc->u.block.regmask == 0xff)
8229 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8231 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8232 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8233 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8236 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8240 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8241 unsigned int new_regmask, bit = 1;
8242 unsigned int to = 0, from = 0, i, new_rn;
8244 for (i = 0; i < num_in_list + 1; i++)
8245 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8247 new_regmask = (1 << (num_in_list + 1)) - 1;
8249 if (debug_displaced)
8250 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8251 "{..., pc}: original reg list %.4x,"
8252 " modified list %.4x\n"),
8253 (int) dsc->u.block.regmask, new_regmask);
8255 dsc->u.block.regmask |= 0x8000;
8256 dsc->u.block.writeback = 0;
8257 dsc->u.block.cond = INST_AL;
8259 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8261 dsc->cleanup = &cleanup_block_load_pc;
8268 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8269 struct regcache *regs,
8270 struct displaced_step_closure *dsc)
8272 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8273 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8276 /* 16-bit thumb instructions. */
8277 switch (op_bit_12_15)
8279 /* Shift (imme), add, subtract, move and compare. */
8280 case 0: case 1: case 2: case 3:
8281 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8282 "shift/add/sub/mov/cmp",
8286 switch (op_bit_10_11)
8288 case 0: /* Data-processing */
8289 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8293 case 1: /* Special data instructions and branch and exchange. */
8295 unsigned short op = bits (insn1, 7, 9);
8296 if (op == 6 || op == 7) /* BX or BLX */
8297 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8298 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8299 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8301 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8305 default: /* LDR (literal) */
8306 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8309 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8310 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8313 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8314 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8315 else /* Generate SP-relative address */
8316 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8318 case 11: /* Misc 16-bit instructions */
8320 switch (bits (insn1, 8, 11))
8322 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8323 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8325 case 12: case 13: /* POP */
8326 if (bit (insn1, 8)) /* PC is in register list. */
8327 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8329 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8331 case 15: /* If-Then, and hints */
8332 if (bits (insn1, 0, 3))
8333 /* If-Then makes up to four following instructions conditional.
8334 IT instruction itself is not conditional, so handle it as a
8335 common unmodified instruction. */
8336 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8339 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8342 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8347 if (op_bit_10_11 < 2) /* Store multiple registers */
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8349 else /* Load multiple registers */
8350 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8352 case 13: /* Conditional branch and supervisor call */
8353 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8354 err = thumb_copy_b (gdbarch, insn1, dsc);
8356 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8358 case 14: /* Unconditional branch */
8359 err = thumb_copy_b (gdbarch, insn1, dsc);
8366 internal_error (__FILE__, __LINE__,
8367 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8371 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8372 uint16_t insn1, uint16_t insn2,
8373 struct regcache *regs,
8374 struct displaced_step_closure *dsc)
8376 int rt = bits (insn2, 12, 15);
8377 int rn = bits (insn1, 0, 3);
8378 int op1 = bits (insn1, 7, 8);
8381 switch (bits (insn1, 5, 6))
8383 case 0: /* Load byte and memory hints */
8384 if (rt == 0xf) /* PLD/PLI */
8387 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8388 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8390 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8395 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8396 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8399 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8400 "ldrb{reg, immediate}/ldrbt",
8405 case 1: /* Load halfword and memory hints. */
8406 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8407 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8408 "pld/unalloc memhint", dsc);
8412 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8415 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8419 case 2: /* Load word */
8421 int insn2_bit_8_11 = bits (insn2, 8, 11);
8424 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8425 else if (op1 == 0x1) /* Encoding T3 */
8426 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8428 else /* op1 == 0x0 */
8430 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8431 /* LDR (immediate) */
8432 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8433 dsc, bit (insn2, 8), 1);
8434 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8435 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8438 /* LDR (register) */
8439 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8445 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8452 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8453 uint16_t insn2, struct regcache *regs,
8454 struct displaced_step_closure *dsc)
8457 unsigned short op = bit (insn2, 15);
8458 unsigned int op1 = bits (insn1, 11, 12);
8464 switch (bits (insn1, 9, 10))
8469 /* Load/store {dual, execlusive}, table branch. */
8470 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8471 && bits (insn2, 5, 7) == 0)
8472 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8475 /* PC is not allowed to use in load/store {dual, exclusive}
8477 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8478 "load/store dual/ex", dsc);
8480 else /* load/store multiple */
8482 switch (bits (insn1, 7, 8))
8484 case 0: case 3: /* SRS, RFE */
8485 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8488 case 1: case 2: /* LDM/STM/PUSH/POP */
8489 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8496 /* Data-processing (shift register). */
8497 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8500 default: /* Coprocessor instructions. */
8501 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8506 case 2: /* op1 = 2 */
8507 if (op) /* Branch and misc control. */
8509 if (bit (insn2, 14) /* BLX/BL */
8510 || bit (insn2, 12) /* Unconditional branch */
8511 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8512 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8514 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8519 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8521 int op = bits (insn1, 4, 8);
8522 int rn = bits (insn1, 0, 3);
8523 if ((op == 0 || op == 0xa) && rn == 0xf)
8524 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8527 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8530 else /* Data processing (modified immeidate) */
8531 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8535 case 3: /* op1 = 3 */
8536 switch (bits (insn1, 9, 10))
8540 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8542 else /* NEON Load/Store and Store single data item */
8543 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8544 "neon elt/struct load/store",
8547 case 1: /* op1 = 3, bits (9, 10) == 1 */
8548 switch (bits (insn1, 7, 8))
8550 case 0: case 1: /* Data processing (register) */
8551 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8554 case 2: /* Multiply and absolute difference */
8555 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8556 "mul/mua/diff", dsc);
8558 case 3: /* Long multiply and divide */
8559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8564 default: /* Coprocessor instructions */
8565 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8574 internal_error (__FILE__, __LINE__,
8575 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8580 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8581 CORE_ADDR to, struct regcache *regs,
8582 struct displaced_step_closure *dsc)
8584 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8586 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8588 if (debug_displaced)
8589 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8590 "at %.8lx\n", insn1, (unsigned long) from);
8593 dsc->insn_size = thumb_insn_size (insn1);
8594 if (thumb_insn_size (insn1) == 4)
8597 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8598 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8601 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8605 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8606 CORE_ADDR to, struct regcache *regs,
8607 struct displaced_step_closure *dsc)
8610 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8613 /* Most displaced instructions use a 1-instruction scratch space, so set this
8614 here and override below if/when necessary. */
8616 dsc->insn_addr = from;
8617 dsc->scratch_base = to;
8618 dsc->cleanup = NULL;
8619 dsc->wrote_to_pc = 0;
8621 if (!displaced_in_arm_mode (regs))
8622 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8626 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8627 if (debug_displaced)
8628 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8629 "at %.8lx\n", (unsigned long) insn,
8630 (unsigned long) from);
8632 if ((insn & 0xf0000000) == 0xf0000000)
8633 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8634 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8636 case 0x0: case 0x1: case 0x2: case 0x3:
8637 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8640 case 0x4: case 0x5: case 0x6:
8641 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8645 err = arm_decode_media (gdbarch, insn, dsc);
8648 case 0x8: case 0x9: case 0xa: case 0xb:
8649 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8652 case 0xc: case 0xd: case 0xe: case 0xf:
8653 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8658 internal_error (__FILE__, __LINE__,
8659 _("arm_process_displaced_insn: Instruction decode error"));
8662 /* Actually set up the scratch space for a displaced instruction. */
8665 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8666 CORE_ADDR to, struct displaced_step_closure *dsc)
8668 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8669 unsigned int i, len, offset;
8670 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8671 int size = dsc->is_thumb? 2 : 4;
8672 const gdb_byte *bkp_insn;
8675 /* Poke modified instruction(s). */
8676 for (i = 0; i < dsc->numinsns; i++)
8678 if (debug_displaced)
8680 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8682 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8685 fprintf_unfiltered (gdb_stdlog, "%.4x",
8686 (unsigned short)dsc->modinsn[i]);
8688 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8689 (unsigned long) to + offset);
8692 write_memory_unsigned_integer (to + offset, size,
8693 byte_order_for_code,
8698 /* Choose the correct breakpoint instruction. */
8701 bkp_insn = tdep->thumb_breakpoint;
8702 len = tdep->thumb_breakpoint_size;
8706 bkp_insn = tdep->arm_breakpoint;
8707 len = tdep->arm_breakpoint_size;
8710 /* Put breakpoint afterwards. */
8711 write_memory (to + offset, bkp_insn, len);
8713 if (debug_displaced)
8714 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8715 paddress (gdbarch, from), paddress (gdbarch, to));
8718 /* Entry point for copying an instruction into scratch space for displaced
8721 struct displaced_step_closure *
8722 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8723 CORE_ADDR from, CORE_ADDR to,
8724 struct regcache *regs)
8726 struct displaced_step_closure *dsc
8727 = xmalloc (sizeof (struct displaced_step_closure));
8728 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8729 arm_displaced_init_closure (gdbarch, from, to, dsc);
8734 /* Entry point for cleaning things up after a displaced instruction has been
8738 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8739 struct displaced_step_closure *dsc,
8740 CORE_ADDR from, CORE_ADDR to,
8741 struct regcache *regs)
8744 dsc->cleanup (gdbarch, regs, dsc);
8746 if (!dsc->wrote_to_pc)
8747 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8748 dsc->insn_addr + dsc->insn_size);
8752 #include "bfd-in2.h"
8753 #include "libcoff.h"
8756 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8758 struct gdbarch *gdbarch = info->application_data;
8760 if (arm_pc_is_thumb (gdbarch, memaddr))
8762 static asymbol *asym;
8763 static combined_entry_type ce;
8764 static struct coff_symbol_struct csym;
8765 static struct bfd fake_bfd;
8766 static bfd_target fake_target;
8768 if (csym.native == NULL)
8770 /* Create a fake symbol vector containing a Thumb symbol.
8771 This is solely so that the code in print_insn_little_arm()
8772 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8773 the presence of a Thumb symbol and switch to decoding
8774 Thumb instructions. */
8776 fake_target.flavour = bfd_target_coff_flavour;
8777 fake_bfd.xvec = &fake_target;
8778 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8780 csym.symbol.the_bfd = &fake_bfd;
8781 csym.symbol.name = "fake";
8782 asym = (asymbol *) & csym;
8785 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8786 info->symbols = &asym;
8789 info->symbols = NULL;
8791 if (info->endian == BFD_ENDIAN_BIG)
8792 return print_insn_big_arm (memaddr, info);
8794 return print_insn_little_arm (memaddr, info);
8797 /* The following define instruction sequences that will cause ARM
8798 cpu's to take an undefined instruction trap. These are used to
8799 signal a breakpoint to GDB.
8801 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8802 modes. A different instruction is required for each mode. The ARM
8803 cpu's can also be big or little endian. Thus four different
8804 instructions are needed to support all cases.
8806 Note: ARMv4 defines several new instructions that will take the
8807 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8808 not in fact add the new instructions. The new undefined
8809 instructions in ARMv4 are all instructions that had no defined
8810 behaviour in earlier chips. There is no guarantee that they will
8811 raise an exception, but may be treated as NOP's. In practice, it
8812 may only safe to rely on instructions matching:
8814 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8815 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8816 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8818 Even this may only true if the condition predicate is true. The
8819 following use a condition predicate of ALWAYS so it is always TRUE.
8821 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8822 and NetBSD all use a software interrupt rather than an undefined
8823 instruction to force a trap. This can be handled by by the
8824 abi-specific code during establishment of the gdbarch vector. */
8826 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8827 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8828 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8829 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8831 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8832 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8833 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8834 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8836 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8837 the program counter value to determine whether a 16-bit or 32-bit
8838 breakpoint should be used. It returns a pointer to a string of
8839 bytes that encode a breakpoint instruction, stores the length of
8840 the string to *lenptr, and adjusts the program counter (if
8841 necessary) to point to the actual memory location where the
8842 breakpoint should be inserted. */
8844 static const unsigned char *
8845 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8847 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8848 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8850 if (arm_pc_is_thumb (gdbarch, *pcptr))
8852 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8854 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8855 check whether we are replacing a 32-bit instruction. */
8856 if (tdep->thumb2_breakpoint != NULL)
8859 if (target_read_memory (*pcptr, buf, 2) == 0)
8861 unsigned short inst1;
8862 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8863 if (thumb_insn_size (inst1) == 4)
8865 *lenptr = tdep->thumb2_breakpoint_size;
8866 return tdep->thumb2_breakpoint;
8871 *lenptr = tdep->thumb_breakpoint_size;
8872 return tdep->thumb_breakpoint;
8876 *lenptr = tdep->arm_breakpoint_size;
8877 return tdep->arm_breakpoint;
8882 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8885 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8887 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8888 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8889 that this is not confused with a 32-bit ARM breakpoint. */
8893 /* Extract from an array REGBUF containing the (raw) register state a
8894 function return value of type TYPE, and copy that, in virtual
8895 format, into VALBUF. */
8898 arm_extract_return_value (struct type *type, struct regcache *regs,
8901 struct gdbarch *gdbarch = get_regcache_arch (regs);
8902 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8904 if (TYPE_CODE_FLT == TYPE_CODE (type))
8906 switch (gdbarch_tdep (gdbarch)->fp_model)
8910 /* The value is in register F0 in internal format. We need to
8911 extract the raw value and then convert it to the desired
8913 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8915 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8916 convert_from_extended (floatformat_from_type (type), tmpbuf,
8917 valbuf, gdbarch_byte_order (gdbarch));
8921 case ARM_FLOAT_SOFT_FPA:
8922 case ARM_FLOAT_SOFT_VFP:
8923 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8924 not using the VFP ABI code. */
8926 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8927 if (TYPE_LENGTH (type) > 4)
8928 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8929 valbuf + INT_REGISTER_SIZE);
8933 internal_error (__FILE__, __LINE__,
8934 _("arm_extract_return_value: "
8935 "Floating point model not supported"));
8939 else if (TYPE_CODE (type) == TYPE_CODE_INT
8940 || TYPE_CODE (type) == TYPE_CODE_CHAR
8941 || TYPE_CODE (type) == TYPE_CODE_BOOL
8942 || TYPE_CODE (type) == TYPE_CODE_PTR
8943 || TYPE_CODE (type) == TYPE_CODE_REF
8944 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8946 /* If the type is a plain integer, then the access is
8947 straight-forward. Otherwise we have to play around a bit
8949 int len = TYPE_LENGTH (type);
8950 int regno = ARM_A1_REGNUM;
8955 /* By using store_unsigned_integer we avoid having to do
8956 anything special for small big-endian values. */
8957 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8958 store_unsigned_integer (valbuf,
8959 (len > INT_REGISTER_SIZE
8960 ? INT_REGISTER_SIZE : len),
8962 len -= INT_REGISTER_SIZE;
8963 valbuf += INT_REGISTER_SIZE;
8968 /* For a structure or union the behaviour is as if the value had
8969 been stored to word-aligned memory and then loaded into
8970 registers with 32-bit load instruction(s). */
8971 int len = TYPE_LENGTH (type);
8972 int regno = ARM_A1_REGNUM;
8973 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8977 regcache_cooked_read (regs, regno++, tmpbuf);
8978 memcpy (valbuf, tmpbuf,
8979 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8980 len -= INT_REGISTER_SIZE;
8981 valbuf += INT_REGISTER_SIZE;
8987 /* Will a function return an aggregate type in memory or in a
8988 register? Return 0 if an aggregate type can be returned in a
8989 register, 1 if it must be returned in memory. */
8992 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8995 enum type_code code;
8997 CHECK_TYPEDEF (type);
8999 /* In the ARM ABI, "integer" like aggregate types are returned in
9000 registers. For an aggregate type to be integer like, its size
9001 must be less than or equal to INT_REGISTER_SIZE and the
9002 offset of each addressable subfield must be zero. Note that bit
9003 fields are not addressable, and all addressable subfields of
9004 unions always start at offset zero.
9006 This function is based on the behaviour of GCC 2.95.1.
9007 See: gcc/arm.c: arm_return_in_memory() for details.
9009 Note: All versions of GCC before GCC 2.95.2 do not set up the
9010 parameters correctly for a function returning the following
9011 structure: struct { float f;}; This should be returned in memory,
9012 not a register. Richard Earnshaw sent me a patch, but I do not
9013 know of any way to detect if a function like the above has been
9014 compiled with the correct calling convention. */
9016 /* All aggregate types that won't fit in a register must be returned
9018 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9023 /* The AAPCS says all aggregates not larger than a word are returned
9025 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9028 /* The only aggregate types that can be returned in a register are
9029 structs and unions. Arrays must be returned in memory. */
9030 code = TYPE_CODE (type);
9031 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9036 /* Assume all other aggregate types can be returned in a register.
9037 Run a check for structures, unions and arrays. */
9040 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9043 /* Need to check if this struct/union is "integer" like. For
9044 this to be true, its size must be less than or equal to
9045 INT_REGISTER_SIZE and the offset of each addressable
9046 subfield must be zero. Note that bit fields are not
9047 addressable, and unions always start at offset zero. If any
9048 of the subfields is a floating point type, the struct/union
9049 cannot be an integer type. */
9051 /* For each field in the object, check:
9052 1) Is it FP? --> yes, nRc = 1;
9053 2) Is it addressable (bitpos != 0) and
9054 not packed (bitsize == 0)?
9058 for (i = 0; i < TYPE_NFIELDS (type); i++)
9060 enum type_code field_type_code;
9061 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9064 /* Is it a floating point type field? */
9065 if (field_type_code == TYPE_CODE_FLT)
9071 /* If bitpos != 0, then we have to care about it. */
9072 if (TYPE_FIELD_BITPOS (type, i) != 0)
9074 /* Bitfields are not addressable. If the field bitsize is
9075 zero, then the field is not packed. Hence it cannot be
9076 a bitfield or any other packed type. */
9077 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9089 /* Write into appropriate registers a function return value of type
9090 TYPE, given in virtual format. */
9093 arm_store_return_value (struct type *type, struct regcache *regs,
9094 const gdb_byte *valbuf)
9096 struct gdbarch *gdbarch = get_regcache_arch (regs);
9097 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9099 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9101 gdb_byte buf[MAX_REGISTER_SIZE];
9103 switch (gdbarch_tdep (gdbarch)->fp_model)
9107 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9108 gdbarch_byte_order (gdbarch));
9109 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9112 case ARM_FLOAT_SOFT_FPA:
9113 case ARM_FLOAT_SOFT_VFP:
9114 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9115 not using the VFP ABI code. */
9117 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9118 if (TYPE_LENGTH (type) > 4)
9119 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9120 valbuf + INT_REGISTER_SIZE);
9124 internal_error (__FILE__, __LINE__,
9125 _("arm_store_return_value: Floating "
9126 "point model not supported"));
9130 else if (TYPE_CODE (type) == TYPE_CODE_INT
9131 || TYPE_CODE (type) == TYPE_CODE_CHAR
9132 || TYPE_CODE (type) == TYPE_CODE_BOOL
9133 || TYPE_CODE (type) == TYPE_CODE_PTR
9134 || TYPE_CODE (type) == TYPE_CODE_REF
9135 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9137 if (TYPE_LENGTH (type) <= 4)
9139 /* Values of one word or less are zero/sign-extended and
9141 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9142 LONGEST val = unpack_long (type, valbuf);
9144 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9145 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9149 /* Integral values greater than one word are stored in consecutive
9150 registers starting with r0. This will always be a multiple of
9151 the regiser size. */
9152 int len = TYPE_LENGTH (type);
9153 int regno = ARM_A1_REGNUM;
9157 regcache_cooked_write (regs, regno++, valbuf);
9158 len -= INT_REGISTER_SIZE;
9159 valbuf += INT_REGISTER_SIZE;
9165 /* For a structure or union the behaviour is as if the value had
9166 been stored to word-aligned memory and then loaded into
9167 registers with 32-bit load instruction(s). */
9168 int len = TYPE_LENGTH (type);
9169 int regno = ARM_A1_REGNUM;
9170 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9174 memcpy (tmpbuf, valbuf,
9175 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9176 regcache_cooked_write (regs, regno++, tmpbuf);
9177 len -= INT_REGISTER_SIZE;
9178 valbuf += INT_REGISTER_SIZE;
9184 /* Handle function return values. */
9186 static enum return_value_convention
9187 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9188 struct type *valtype, struct regcache *regcache,
9189 gdb_byte *readbuf, const gdb_byte *writebuf)
9191 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9192 struct type *func_type = function ? value_type (function) : NULL;
9193 enum arm_vfp_cprc_base_type vfp_base_type;
9196 if (arm_vfp_abi_for_function (gdbarch, func_type)
9197 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9199 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9200 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9202 for (i = 0; i < vfp_base_count; i++)
9204 if (reg_char == 'q')
9207 arm_neon_quad_write (gdbarch, regcache, i,
9208 writebuf + i * unit_length);
9211 arm_neon_quad_read (gdbarch, regcache, i,
9212 readbuf + i * unit_length);
9219 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9220 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9223 regcache_cooked_write (regcache, regnum,
9224 writebuf + i * unit_length);
9226 regcache_cooked_read (regcache, regnum,
9227 readbuf + i * unit_length);
9230 return RETURN_VALUE_REGISTER_CONVENTION;
9233 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9234 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9235 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9237 if (tdep->struct_return == pcc_struct_return
9238 || arm_return_in_memory (gdbarch, valtype))
9239 return RETURN_VALUE_STRUCT_CONVENTION;
9242 /* AAPCS returns complex types longer than a register in memory. */
9243 if (tdep->arm_abi != ARM_ABI_APCS
9244 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9245 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9246 return RETURN_VALUE_STRUCT_CONVENTION;
9249 arm_store_return_value (valtype, regcache, writebuf);
9252 arm_extract_return_value (valtype, regcache, readbuf);
9254 return RETURN_VALUE_REGISTER_CONVENTION;
9259 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9261 struct gdbarch *gdbarch = get_frame_arch (frame);
9262 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9263 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9265 gdb_byte buf[INT_REGISTER_SIZE];
9267 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9269 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9273 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9277 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9278 return the target PC. Otherwise return 0. */
9281 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9285 CORE_ADDR start_addr;
9287 /* Find the starting address and name of the function containing the PC. */
9288 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9290 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9292 start_addr = arm_skip_bx_reg (frame, pc);
9293 if (start_addr != 0)
9299 /* If PC is in a Thumb call or return stub, return the address of the
9300 target PC, which is in a register. The thunk functions are called
9301 _call_via_xx, where x is the register name. The possible names
9302 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9303 functions, named __ARM_call_via_r[0-7]. */
9304 if (strncmp (name, "_call_via_", 10) == 0
9305 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9307 /* Use the name suffix to determine which register contains the
9309 static char *table[15] =
9310 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9311 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9314 int offset = strlen (name) - 2;
9316 for (regno = 0; regno <= 14; regno++)
9317 if (strcmp (&name[offset], table[regno]) == 0)
9318 return get_frame_register_unsigned (frame, regno);
9321 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9322 non-interworking calls to foo. We could decode the stubs
9323 to find the target but it's easier to use the symbol table. */
9324 namelen = strlen (name);
9325 if (name[0] == '_' && name[1] == '_'
9326 && ((namelen > 2 + strlen ("_from_thumb")
9327 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9328 strlen ("_from_thumb")) == 0)
9329 || (namelen > 2 + strlen ("_from_arm")
9330 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9331 strlen ("_from_arm")) == 0)))
9334 int target_len = namelen - 2;
9335 struct bound_minimal_symbol minsym;
9336 struct objfile *objfile;
9337 struct obj_section *sec;
9339 if (name[namelen - 1] == 'b')
9340 target_len -= strlen ("_from_thumb");
9342 target_len -= strlen ("_from_arm");
9344 target_name = alloca (target_len + 1);
9345 memcpy (target_name, name + 2, target_len);
9346 target_name[target_len] = '\0';
9348 sec = find_pc_section (pc);
9349 objfile = (sec == NULL) ? NULL : sec->objfile;
9350 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9351 if (minsym.minsym != NULL)
9352 return BMSYMBOL_VALUE_ADDRESS (minsym);
9357 return 0; /* not a stub */
9361 set_arm_command (char *args, int from_tty)
9363 printf_unfiltered (_("\
9364 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9365 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9369 show_arm_command (char *args, int from_tty)
9371 cmd_show_list (showarmcmdlist, from_tty, "");
9375 arm_update_current_architecture (void)
9377 struct gdbarch_info info;
9379 /* If the current architecture is not ARM, we have nothing to do. */
9380 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9383 /* Update the architecture. */
9384 gdbarch_info_init (&info);
9386 if (!gdbarch_update_p (info))
9387 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9391 set_fp_model_sfunc (char *args, int from_tty,
9392 struct cmd_list_element *c)
9394 enum arm_float_model fp_model;
9396 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9397 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9399 arm_fp_model = fp_model;
9403 if (fp_model == ARM_FLOAT_LAST)
9404 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9407 arm_update_current_architecture ();
9411 show_fp_model (struct ui_file *file, int from_tty,
9412 struct cmd_list_element *c, const char *value)
9414 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9416 if (arm_fp_model == ARM_FLOAT_AUTO
9417 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9418 fprintf_filtered (file, _("\
9419 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9420 fp_model_strings[tdep->fp_model]);
9422 fprintf_filtered (file, _("\
9423 The current ARM floating point model is \"%s\".\n"),
9424 fp_model_strings[arm_fp_model]);
9428 arm_set_abi (char *args, int from_tty,
9429 struct cmd_list_element *c)
9431 enum arm_abi_kind arm_abi;
9433 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9434 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9436 arm_abi_global = arm_abi;
9440 if (arm_abi == ARM_ABI_LAST)
9441 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9444 arm_update_current_architecture ();
9448 arm_show_abi (struct ui_file *file, int from_tty,
9449 struct cmd_list_element *c, const char *value)
9451 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9453 if (arm_abi_global == ARM_ABI_AUTO
9454 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9455 fprintf_filtered (file, _("\
9456 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9457 arm_abi_strings[tdep->arm_abi]);
9459 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9464 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9465 struct cmd_list_element *c, const char *value)
9467 fprintf_filtered (file,
9468 _("The current execution mode assumed "
9469 "(when symbols are unavailable) is \"%s\".\n"),
9470 arm_fallback_mode_string);
9474 arm_show_force_mode (struct ui_file *file, int from_tty,
9475 struct cmd_list_element *c, const char *value)
9477 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9479 fprintf_filtered (file,
9480 _("The current execution mode assumed "
9481 "(even when symbols are available) is \"%s\".\n"),
9482 arm_force_mode_string);
9485 /* If the user changes the register disassembly style used for info
9486 register and other commands, we have to also switch the style used
9487 in opcodes for disassembly output. This function is run in the "set
9488 arm disassembly" command, and does that. */
9491 set_disassembly_style_sfunc (char *args, int from_tty,
9492 struct cmd_list_element *c)
9494 set_disassembly_style ();
9497 /* Return the ARM register name corresponding to register I. */
9499 arm_register_name (struct gdbarch *gdbarch, int i)
9501 const int num_regs = gdbarch_num_regs (gdbarch);
9503 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9504 && i >= num_regs && i < num_regs + 32)
9506 static const char *const vfp_pseudo_names[] = {
9507 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9508 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9509 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9510 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9513 return vfp_pseudo_names[i - num_regs];
9516 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9517 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9519 static const char *const neon_pseudo_names[] = {
9520 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9521 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9524 return neon_pseudo_names[i - num_regs - 32];
9527 if (i >= ARRAY_SIZE (arm_register_names))
9528 /* These registers are only supported on targets which supply
9529 an XML description. */
9532 return arm_register_names[i];
9536 set_disassembly_style (void)
9540 /* Find the style that the user wants. */
9541 for (current = 0; current < num_disassembly_options; current++)
9542 if (disassembly_style == valid_disassembly_styles[current])
9544 gdb_assert (current < num_disassembly_options);
9546 /* Synchronize the disassembler. */
9547 set_arm_regname_option (current);
9550 /* Test whether the coff symbol specific value corresponds to a Thumb
9554 coff_sym_is_thumb (int val)
9556 return (val == C_THUMBEXT
9557 || val == C_THUMBSTAT
9558 || val == C_THUMBEXTFUNC
9559 || val == C_THUMBSTATFUNC
9560 || val == C_THUMBLABEL);
9563 /* arm_coff_make_msymbol_special()
9564 arm_elf_make_msymbol_special()
9566 These functions test whether the COFF or ELF symbol corresponds to
9567 an address in thumb code, and set a "special" bit in a minimal
9568 symbol to indicate that it does. */
9571 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9573 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9574 == ST_BRANCH_TO_THUMB)
9575 MSYMBOL_SET_SPECIAL (msym);
9579 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9581 if (coff_sym_is_thumb (val))
9582 MSYMBOL_SET_SPECIAL (msym);
9586 arm_objfile_data_free (struct objfile *objfile, void *arg)
9588 struct arm_per_objfile *data = arg;
9591 for (i = 0; i < objfile->obfd->section_count; i++)
9592 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9596 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9599 const char *name = bfd_asymbol_name (sym);
9600 struct arm_per_objfile *data;
9601 VEC(arm_mapping_symbol_s) **map_p;
9602 struct arm_mapping_symbol new_map_sym;
9604 gdb_assert (name[0] == '$');
9605 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9608 data = objfile_data (objfile, arm_objfile_data_key);
9611 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9612 struct arm_per_objfile);
9613 set_objfile_data (objfile, arm_objfile_data_key, data);
9614 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9615 objfile->obfd->section_count,
9616 VEC(arm_mapping_symbol_s) *);
9618 map_p = &data->section_maps[bfd_get_section (sym)->index];
9620 new_map_sym.value = sym->value;
9621 new_map_sym.type = name[1];
9623 /* Assume that most mapping symbols appear in order of increasing
9624 value. If they were randomly distributed, it would be faster to
9625 always push here and then sort at first use. */
9626 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9628 struct arm_mapping_symbol *prev_map_sym;
9630 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9631 if (prev_map_sym->value >= sym->value)
9634 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9635 arm_compare_mapping_symbols);
9636 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9641 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9645 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9647 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9648 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9650 /* If necessary, set the T bit. */
9653 ULONGEST val, t_bit;
9654 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9655 t_bit = arm_psr_thumb_bit (gdbarch);
9656 if (arm_pc_is_thumb (gdbarch, pc))
9657 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9660 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9665 /* Read the contents of a NEON quad register, by reading from two
9666 double registers. This is used to implement the quad pseudo
9667 registers, and for argument passing in case the quad registers are
9668 missing; vectors are passed in quad registers when using the VFP
9669 ABI, even if a NEON unit is not present. REGNUM is the index of
9670 the quad register, in [0, 15]. */
9672 static enum register_status
9673 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9674 int regnum, gdb_byte *buf)
9677 gdb_byte reg_buf[8];
9678 int offset, double_regnum;
9679 enum register_status status;
9681 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9682 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9685 /* d0 is always the least significant half of q0. */
9686 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9691 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9692 if (status != REG_VALID)
9694 memcpy (buf + offset, reg_buf, 8);
9696 offset = 8 - offset;
9697 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9698 if (status != REG_VALID)
9700 memcpy (buf + offset, reg_buf, 8);
9705 static enum register_status
9706 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9707 int regnum, gdb_byte *buf)
9709 const int num_regs = gdbarch_num_regs (gdbarch);
9711 gdb_byte reg_buf[8];
9712 int offset, double_regnum;
9714 gdb_assert (regnum >= num_regs);
9717 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9718 /* Quad-precision register. */
9719 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9722 enum register_status status;
9724 /* Single-precision register. */
9725 gdb_assert (regnum < 32);
9727 /* s0 is always the least significant half of d0. */
9728 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9729 offset = (regnum & 1) ? 0 : 4;
9731 offset = (regnum & 1) ? 4 : 0;
9733 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9734 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9737 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9738 if (status == REG_VALID)
9739 memcpy (buf, reg_buf + offset, 4);
9744 /* Store the contents of BUF to a NEON quad register, by writing to
9745 two double registers. This is used to implement the quad pseudo
9746 registers, and for argument passing in case the quad registers are
9747 missing; vectors are passed in quad registers when using the VFP
9748 ABI, even if a NEON unit is not present. REGNUM is the index
9749 of the quad register, in [0, 15]. */
9752 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9753 int regnum, const gdb_byte *buf)
9756 int offset, double_regnum;
9758 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9759 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9762 /* d0 is always the least significant half of q0. */
9763 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9768 regcache_raw_write (regcache, double_regnum, buf + offset);
9769 offset = 8 - offset;
9770 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9774 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9775 int regnum, const gdb_byte *buf)
9777 const int num_regs = gdbarch_num_regs (gdbarch);
9779 gdb_byte reg_buf[8];
9780 int offset, double_regnum;
9782 gdb_assert (regnum >= num_regs);
9785 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9786 /* Quad-precision register. */
9787 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9790 /* Single-precision register. */
9791 gdb_assert (regnum < 32);
9793 /* s0 is always the least significant half of d0. */
9794 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9795 offset = (regnum & 1) ? 0 : 4;
9797 offset = (regnum & 1) ? 4 : 0;
9799 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9800 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9803 regcache_raw_read (regcache, double_regnum, reg_buf);
9804 memcpy (reg_buf + offset, buf, 4);
9805 regcache_raw_write (regcache, double_regnum, reg_buf);
9809 static struct value *
9810 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9812 const int *reg_p = baton;
9813 return value_of_register (*reg_p, frame);
9816 static enum gdb_osabi
9817 arm_elf_osabi_sniffer (bfd *abfd)
9819 unsigned int elfosabi;
9820 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9822 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9824 if (elfosabi == ELFOSABI_ARM)
9825 /* GNU tools use this value. Check note sections in this case,
9827 bfd_map_over_sections (abfd,
9828 generic_elf_osabi_sniff_abi_tag_sections,
9831 /* Anything else will be handled by the generic ELF sniffer. */
9836 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9837 struct reggroup *group)
9839 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9840 this, FPS register belongs to save_regroup, restore_reggroup, and
9841 all_reggroup, of course. */
9842 if (regnum == ARM_FPS_REGNUM)
9843 return (group == float_reggroup
9844 || group == save_reggroup
9845 || group == restore_reggroup
9846 || group == all_reggroup);
9848 return default_register_reggroup_p (gdbarch, regnum, group);
9852 /* For backward-compatibility we allow two 'g' packet lengths with
9853 the remote protocol depending on whether FPA registers are
9854 supplied. M-profile targets do not have FPA registers, but some
9855 stubs already exist in the wild which use a 'g' packet which
9856 supplies them albeit with dummy values. The packet format which
9857 includes FPA registers should be considered deprecated for
9858 M-profile targets. */
9861 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9863 if (gdbarch_tdep (gdbarch)->is_m)
9865 /* If we know from the executable this is an M-profile target,
9866 cater for remote targets whose register set layout is the
9867 same as the FPA layout. */
9868 register_remote_g_packet_guess (gdbarch,
9869 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9870 (16 * INT_REGISTER_SIZE)
9871 + (8 * FP_REGISTER_SIZE)
9872 + (2 * INT_REGISTER_SIZE),
9873 tdesc_arm_with_m_fpa_layout);
9875 /* The regular M-profile layout. */
9876 register_remote_g_packet_guess (gdbarch,
9877 /* r0-r12,sp,lr,pc; xpsr */
9878 (16 * INT_REGISTER_SIZE)
9879 + INT_REGISTER_SIZE,
9882 /* M-profile plus M4F VFP. */
9883 register_remote_g_packet_guess (gdbarch,
9884 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9885 (16 * INT_REGISTER_SIZE)
9886 + (16 * VFP_REGISTER_SIZE)
9887 + (2 * INT_REGISTER_SIZE),
9888 tdesc_arm_with_m_vfp_d16);
9891 /* Otherwise we don't have a useful guess. */
9895 /* Initialize the current architecture based on INFO. If possible,
9896 re-use an architecture from ARCHES, which is a list of
9897 architectures already created during this debugging session.
9899 Called e.g. at program startup, when reading a core file, and when
9900 reading a binary file. */
9902 static struct gdbarch *
9903 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9905 struct gdbarch_tdep *tdep;
9906 struct gdbarch *gdbarch;
9907 struct gdbarch_list *best_arch;
9908 enum arm_abi_kind arm_abi = arm_abi_global;
9909 enum arm_float_model fp_model = arm_fp_model;
9910 struct tdesc_arch_data *tdesc_data = NULL;
9912 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9914 int have_fpa_registers = 1;
9915 const struct target_desc *tdesc = info.target_desc;
9917 /* If we have an object to base this architecture on, try to determine
9920 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9922 int ei_osabi, e_flags;
9924 switch (bfd_get_flavour (info.abfd))
9926 case bfd_target_aout_flavour:
9927 /* Assume it's an old APCS-style ABI. */
9928 arm_abi = ARM_ABI_APCS;
9931 case bfd_target_coff_flavour:
9932 /* Assume it's an old APCS-style ABI. */
9934 arm_abi = ARM_ABI_APCS;
9937 case bfd_target_elf_flavour:
9938 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9939 e_flags = elf_elfheader (info.abfd)->e_flags;
9941 if (ei_osabi == ELFOSABI_ARM)
9943 /* GNU tools used to use this value, but do not for EABI
9944 objects. There's nowhere to tag an EABI version
9945 anyway, so assume APCS. */
9946 arm_abi = ARM_ABI_APCS;
9948 else if (ei_osabi == ELFOSABI_NONE)
9950 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9951 int attr_arch, attr_profile;
9955 case EF_ARM_EABI_UNKNOWN:
9956 /* Assume GNU tools. */
9957 arm_abi = ARM_ABI_APCS;
9960 case EF_ARM_EABI_VER4:
9961 case EF_ARM_EABI_VER5:
9962 arm_abi = ARM_ABI_AAPCS;
9963 /* EABI binaries default to VFP float ordering.
9964 They may also contain build attributes that can
9965 be used to identify if the VFP argument-passing
9967 if (fp_model == ARM_FLOAT_AUTO)
9970 switch (bfd_elf_get_obj_attr_int (info.abfd,
9975 /* "The user intended FP parameter/result
9976 passing to conform to AAPCS, base
9978 fp_model = ARM_FLOAT_SOFT_VFP;
9981 /* "The user intended FP parameter/result
9982 passing to conform to AAPCS, VFP
9984 fp_model = ARM_FLOAT_VFP;
9987 /* "The user intended FP parameter/result
9988 passing to conform to tool chain-specific
9989 conventions" - we don't know any such
9990 conventions, so leave it as "auto". */
9993 /* Attribute value not mentioned in the
9994 October 2008 ABI, so leave it as
9999 fp_model = ARM_FLOAT_SOFT_VFP;
10005 /* Leave it as "auto". */
10006 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10011 /* Detect M-profile programs. This only works if the
10012 executable file includes build attributes; GCC does
10013 copy them to the executable, but e.g. RealView does
10015 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10017 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10019 Tag_CPU_arch_profile);
10020 /* GCC specifies the profile for v6-M; RealView only
10021 specifies the profile for architectures starting with
10022 V7 (as opposed to architectures with a tag
10023 numerically greater than TAG_CPU_ARCH_V7). */
10024 if (!tdesc_has_registers (tdesc)
10025 && (attr_arch == TAG_CPU_ARCH_V6_M
10026 || attr_arch == TAG_CPU_ARCH_V6S_M
10027 || attr_profile == 'M'))
10032 if (fp_model == ARM_FLOAT_AUTO)
10034 int e_flags = elf_elfheader (info.abfd)->e_flags;
10036 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10039 /* Leave it as "auto". Strictly speaking this case
10040 means FPA, but almost nobody uses that now, and
10041 many toolchains fail to set the appropriate bits
10042 for the floating-point model they use. */
10044 case EF_ARM_SOFT_FLOAT:
10045 fp_model = ARM_FLOAT_SOFT_FPA;
10047 case EF_ARM_VFP_FLOAT:
10048 fp_model = ARM_FLOAT_VFP;
10050 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10051 fp_model = ARM_FLOAT_SOFT_VFP;
10056 if (e_flags & EF_ARM_BE8)
10057 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10062 /* Leave it as "auto". */
10067 /* Check any target description for validity. */
10068 if (tdesc_has_registers (tdesc))
10070 /* For most registers we require GDB's default names; but also allow
10071 the numeric names for sp / lr / pc, as a convenience. */
10072 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10073 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10074 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10076 const struct tdesc_feature *feature;
10079 feature = tdesc_find_feature (tdesc,
10080 "org.gnu.gdb.arm.core");
10081 if (feature == NULL)
10083 feature = tdesc_find_feature (tdesc,
10084 "org.gnu.gdb.arm.m-profile");
10085 if (feature == NULL)
10091 tdesc_data = tdesc_data_alloc ();
10094 for (i = 0; i < ARM_SP_REGNUM; i++)
10095 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10096 arm_register_names[i]);
10097 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10100 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10103 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10107 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10108 ARM_PS_REGNUM, "xpsr");
10110 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10111 ARM_PS_REGNUM, "cpsr");
10115 tdesc_data_cleanup (tdesc_data);
10119 feature = tdesc_find_feature (tdesc,
10120 "org.gnu.gdb.arm.fpa");
10121 if (feature != NULL)
10124 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10125 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10126 arm_register_names[i]);
10129 tdesc_data_cleanup (tdesc_data);
10134 have_fpa_registers = 0;
10136 feature = tdesc_find_feature (tdesc,
10137 "org.gnu.gdb.xscale.iwmmxt");
10138 if (feature != NULL)
10140 static const char *const iwmmxt_names[] = {
10141 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10142 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10143 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10144 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10148 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10150 &= tdesc_numbered_register (feature, tdesc_data, i,
10151 iwmmxt_names[i - ARM_WR0_REGNUM]);
10153 /* Check for the control registers, but do not fail if they
10155 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10156 tdesc_numbered_register (feature, tdesc_data, i,
10157 iwmmxt_names[i - ARM_WR0_REGNUM]);
10159 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10161 &= tdesc_numbered_register (feature, tdesc_data, i,
10162 iwmmxt_names[i - ARM_WR0_REGNUM]);
10166 tdesc_data_cleanup (tdesc_data);
10171 /* If we have a VFP unit, check whether the single precision registers
10172 are present. If not, then we will synthesize them as pseudo
10174 feature = tdesc_find_feature (tdesc,
10175 "org.gnu.gdb.arm.vfp");
10176 if (feature != NULL)
10178 static const char *const vfp_double_names[] = {
10179 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10180 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10181 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10182 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10185 /* Require the double precision registers. There must be either
10188 for (i = 0; i < 32; i++)
10190 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10192 vfp_double_names[i]);
10196 if (!valid_p && i == 16)
10199 /* Also require FPSCR. */
10200 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10201 ARM_FPSCR_REGNUM, "fpscr");
10204 tdesc_data_cleanup (tdesc_data);
10208 if (tdesc_unnumbered_register (feature, "s0") == 0)
10209 have_vfp_pseudos = 1;
10211 have_vfp_registers = 1;
10213 /* If we have VFP, also check for NEON. The architecture allows
10214 NEON without VFP (integer vector operations only), but GDB
10215 does not support that. */
10216 feature = tdesc_find_feature (tdesc,
10217 "org.gnu.gdb.arm.neon");
10218 if (feature != NULL)
10220 /* NEON requires 32 double-precision registers. */
10223 tdesc_data_cleanup (tdesc_data);
10227 /* If there are quad registers defined by the stub, use
10228 their type; otherwise (normally) provide them with
10229 the default type. */
10230 if (tdesc_unnumbered_register (feature, "q0") == 0)
10231 have_neon_pseudos = 1;
10238 /* If there is already a candidate, use it. */
10239 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10241 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10243 if (arm_abi != ARM_ABI_AUTO
10244 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10247 if (fp_model != ARM_FLOAT_AUTO
10248 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10251 /* There are various other properties in tdep that we do not
10252 need to check here: those derived from a target description,
10253 since gdbarches with a different target description are
10254 automatically disqualified. */
10256 /* Do check is_m, though, since it might come from the binary. */
10257 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10260 /* Found a match. */
10264 if (best_arch != NULL)
10266 if (tdesc_data != NULL)
10267 tdesc_data_cleanup (tdesc_data);
10268 return best_arch->gdbarch;
10271 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10272 gdbarch = gdbarch_alloc (&info, tdep);
10274 /* Record additional information about the architecture we are defining.
10275 These are gdbarch discriminators, like the OSABI. */
10276 tdep->arm_abi = arm_abi;
10277 tdep->fp_model = fp_model;
10279 tdep->have_fpa_registers = have_fpa_registers;
10280 tdep->have_vfp_registers = have_vfp_registers;
10281 tdep->have_vfp_pseudos = have_vfp_pseudos;
10282 tdep->have_neon_pseudos = have_neon_pseudos;
10283 tdep->have_neon = have_neon;
10285 arm_register_g_packet_guesses (gdbarch);
10288 switch (info.byte_order_for_code)
10290 case BFD_ENDIAN_BIG:
10291 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10292 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10293 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10294 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10298 case BFD_ENDIAN_LITTLE:
10299 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10300 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10301 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10302 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10307 internal_error (__FILE__, __LINE__,
10308 _("arm_gdbarch_init: bad byte order for float format"));
10311 /* On ARM targets char defaults to unsigned. */
10312 set_gdbarch_char_signed (gdbarch, 0);
10314 /* Note: for displaced stepping, this includes the breakpoint, and one word
10315 of additional scratch space. This setting isn't used for anything beside
10316 displaced stepping at present. */
10317 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10319 /* This should be low enough for everything. */
10320 tdep->lowest_pc = 0x20;
10321 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10323 /* The default, for both APCS and AAPCS, is to return small
10324 structures in registers. */
10325 tdep->struct_return = reg_struct_return;
10327 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10328 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10330 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10332 /* Frame handling. */
10333 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10334 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10335 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10337 frame_base_set_default (gdbarch, &arm_normal_base);
10339 /* Address manipulation. */
10340 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10342 /* Advance PC across function entry code. */
10343 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10345 /* Detect whether PC is in function epilogue. */
10346 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10348 /* Skip trampolines. */
10349 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10351 /* The stack grows downward. */
10352 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10354 /* Breakpoint manipulation. */
10355 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10356 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10357 arm_remote_breakpoint_from_pc);
10359 /* Information about registers, etc. */
10360 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10361 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10362 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10363 set_gdbarch_register_type (gdbarch, arm_register_type);
10364 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10366 /* This "info float" is FPA-specific. Use the generic version if we
10367 do not have FPA. */
10368 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10369 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10371 /* Internal <-> external register number maps. */
10372 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10373 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10375 set_gdbarch_register_name (gdbarch, arm_register_name);
10377 /* Returning results. */
10378 set_gdbarch_return_value (gdbarch, arm_return_value);
10381 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10383 /* Minsymbol frobbing. */
10384 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10385 set_gdbarch_coff_make_msymbol_special (gdbarch,
10386 arm_coff_make_msymbol_special);
10387 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10389 /* Thumb-2 IT block support. */
10390 set_gdbarch_adjust_breakpoint_address (gdbarch,
10391 arm_adjust_breakpoint_address);
10393 /* Virtual tables. */
10394 set_gdbarch_vbit_in_delta (gdbarch, 1);
10396 /* Hook in the ABI-specific overrides, if they have been registered. */
10397 gdbarch_init_osabi (info, gdbarch);
10399 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10401 /* Add some default predicates. */
10403 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10404 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10405 dwarf2_append_unwinders (gdbarch);
10406 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10407 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10409 /* Now we have tuned the configuration, set a few final things,
10410 based on what the OS ABI has told us. */
10412 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10413 binaries are always marked. */
10414 if (tdep->arm_abi == ARM_ABI_AUTO)
10415 tdep->arm_abi = ARM_ABI_APCS;
10417 /* Watchpoints are not steppable. */
10418 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10420 /* We used to default to FPA for generic ARM, but almost nobody
10421 uses that now, and we now provide a way for the user to force
10422 the model. So default to the most useful variant. */
10423 if (tdep->fp_model == ARM_FLOAT_AUTO)
10424 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10426 if (tdep->jb_pc >= 0)
10427 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10429 /* Floating point sizes and format. */
10430 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10431 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10433 set_gdbarch_double_format
10434 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10435 set_gdbarch_long_double_format
10436 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10440 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10441 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10444 if (have_vfp_pseudos)
10446 /* NOTE: These are the only pseudo registers used by
10447 the ARM target at the moment. If more are added, a
10448 little more care in numbering will be needed. */
10450 int num_pseudos = 32;
10451 if (have_neon_pseudos)
10453 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10454 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10455 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10460 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10462 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10464 /* Override tdesc_register_type to adjust the types of VFP
10465 registers for NEON. */
10466 set_gdbarch_register_type (gdbarch, arm_register_type);
10469 /* Add standard register aliases. We add aliases even for those
10470 nanes which are used by the current architecture - it's simpler,
10471 and does no harm, since nothing ever lists user registers. */
10472 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10473 user_reg_add (gdbarch, arm_register_aliases[i].name,
10474 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10480 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10482 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10487 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10488 (unsigned long) tdep->lowest_pc);
10491 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10494 _initialize_arm_tdep (void)
10496 struct ui_file *stb;
10498 struct cmd_list_element *new_set, *new_show;
10499 const char *setname;
10500 const char *setdesc;
10501 const char *const *regnames;
10503 static char *helptext;
10504 char regdesc[1024], *rdptr = regdesc;
10505 size_t rest = sizeof (regdesc);
10507 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10509 arm_objfile_data_key
10510 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10512 /* Add ourselves to objfile event chain. */
10513 observer_attach_new_objfile (arm_exidx_new_objfile);
10515 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10517 /* Register an ELF OS ABI sniffer for ARM binaries. */
10518 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10519 bfd_target_elf_flavour,
10520 arm_elf_osabi_sniffer);
10522 /* Initialize the standard target descriptions. */
10523 initialize_tdesc_arm_with_m ();
10524 initialize_tdesc_arm_with_m_fpa_layout ();
10525 initialize_tdesc_arm_with_m_vfp_d16 ();
10526 initialize_tdesc_arm_with_iwmmxt ();
10527 initialize_tdesc_arm_with_vfpv2 ();
10528 initialize_tdesc_arm_with_vfpv3 ();
10529 initialize_tdesc_arm_with_neon ();
10531 /* Get the number of possible sets of register names defined in opcodes. */
10532 num_disassembly_options = get_arm_regname_num_options ();
10534 /* Add root prefix command for all "set arm"/"show arm" commands. */
10535 add_prefix_cmd ("arm", no_class, set_arm_command,
10536 _("Various ARM-specific commands."),
10537 &setarmcmdlist, "set arm ", 0, &setlist);
10539 add_prefix_cmd ("arm", no_class, show_arm_command,
10540 _("Various ARM-specific commands."),
10541 &showarmcmdlist, "show arm ", 0, &showlist);
10543 /* Sync the opcode insn printer with our register viewer. */
10544 parse_arm_disassembler_option ("reg-names-std");
10546 /* Initialize the array that will be passed to
10547 add_setshow_enum_cmd(). */
10548 valid_disassembly_styles
10549 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10550 for (i = 0; i < num_disassembly_options; i++)
10552 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10553 valid_disassembly_styles[i] = setname;
10554 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10557 /* When we find the default names, tell the disassembler to use
10559 if (!strcmp (setname, "std"))
10561 disassembly_style = setname;
10562 set_arm_regname_option (i);
10565 /* Mark the end of valid options. */
10566 valid_disassembly_styles[num_disassembly_options] = NULL;
10568 /* Create the help text. */
10569 stb = mem_fileopen ();
10570 fprintf_unfiltered (stb, "%s%s%s",
10571 _("The valid values are:\n"),
10573 _("The default is \"std\"."));
10574 helptext = ui_file_xstrdup (stb, NULL);
10575 ui_file_delete (stb);
10577 add_setshow_enum_cmd("disassembler", no_class,
10578 valid_disassembly_styles, &disassembly_style,
10579 _("Set the disassembly style."),
10580 _("Show the disassembly style."),
10582 set_disassembly_style_sfunc,
10583 NULL, /* FIXME: i18n: The disassembly style is
10585 &setarmcmdlist, &showarmcmdlist);
10587 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10588 _("Set usage of ARM 32-bit mode."),
10589 _("Show usage of ARM 32-bit mode."),
10590 _("When off, a 26-bit PC will be used."),
10592 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10594 &setarmcmdlist, &showarmcmdlist);
10596 /* Add a command to allow the user to force the FPU model. */
10597 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10598 _("Set the floating point type."),
10599 _("Show the floating point type."),
10600 _("auto - Determine the FP typefrom the OS-ABI.\n\
10601 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10602 fpa - FPA co-processor (GCC compiled).\n\
10603 softvfp - Software FP with pure-endian doubles.\n\
10604 vfp - VFP co-processor."),
10605 set_fp_model_sfunc, show_fp_model,
10606 &setarmcmdlist, &showarmcmdlist);
10608 /* Add a command to allow the user to force the ABI. */
10609 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10611 _("Show the ABI."),
10612 NULL, arm_set_abi, arm_show_abi,
10613 &setarmcmdlist, &showarmcmdlist);
10615 /* Add two commands to allow the user to force the assumed
10617 add_setshow_enum_cmd ("fallback-mode", class_support,
10618 arm_mode_strings, &arm_fallback_mode_string,
10619 _("Set the mode assumed when symbols are unavailable."),
10620 _("Show the mode assumed when symbols are unavailable."),
10621 NULL, NULL, arm_show_fallback_mode,
10622 &setarmcmdlist, &showarmcmdlist);
10623 add_setshow_enum_cmd ("force-mode", class_support,
10624 arm_mode_strings, &arm_force_mode_string,
10625 _("Set the mode assumed even when symbols are available."),
10626 _("Show the mode assumed even when symbols are available."),
10627 NULL, NULL, arm_show_force_mode,
10628 &setarmcmdlist, &showarmcmdlist);
10630 /* Debugging flag. */
10631 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10632 _("Set ARM debugging."),
10633 _("Show ARM debugging."),
10634 _("When on, arm-specific debugging is enabled."),
10636 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10637 &setdebuglist, &showdebuglist);
10640 /* ARM-reversible process record data structures. */
10642 #define ARM_INSN_SIZE_BYTES 4
10643 #define THUMB_INSN_SIZE_BYTES 2
10644 #define THUMB2_INSN_SIZE_BYTES 4
10647 #define INSN_S_L_BIT_NUM 20
10649 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10652 unsigned int reg_len = LENGTH; \
10655 REGS = XNEWVEC (uint32_t, reg_len); \
10656 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10661 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10664 unsigned int mem_len = LENGTH; \
10667 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10668 memcpy(&MEMS->len, &RECORD_BUF[0], \
10669 sizeof(struct arm_mem_r) * LENGTH); \
10674 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10675 #define INSN_RECORDED(ARM_RECORD) \
10676 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10678 /* ARM memory record structure. */
10681 uint32_t len; /* Record length. */
10682 uint32_t addr; /* Memory address. */
10685 /* ARM instruction record contains opcode of current insn
10686 and execution state (before entry to decode_insn()),
10687 contains list of to-be-modified registers and
10688 memory blocks (on return from decode_insn()). */
10690 typedef struct insn_decode_record_t
10692 struct gdbarch *gdbarch;
10693 struct regcache *regcache;
10694 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10695 uint32_t arm_insn; /* Should accommodate thumb. */
10696 uint32_t cond; /* Condition code. */
10697 uint32_t opcode; /* Insn opcode. */
10698 uint32_t decode; /* Insn decode bits. */
10699 uint32_t mem_rec_count; /* No of mem records. */
10700 uint32_t reg_rec_count; /* No of reg records. */
10701 uint32_t *arm_regs; /* Registers to be saved for this record. */
10702 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10703 } insn_decode_record;
10706 /* Checks ARM SBZ and SBO mandatory fields. */
10709 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10711 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10730 enum arm_record_result
10732 ARM_RECORD_SUCCESS = 0,
10733 ARM_RECORD_FAILURE = 1
10740 } arm_record_strx_t;
10751 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10752 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10755 struct regcache *reg_cache = arm_insn_r->regcache;
10756 ULONGEST u_regval[2]= {0};
10758 uint32_t reg_src1 = 0, reg_src2 = 0;
10759 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10760 uint32_t opcode1 = 0;
10762 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10763 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10764 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10767 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10769 /* 1) Handle misc store, immediate offset. */
10770 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10771 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10772 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10773 regcache_raw_read_unsigned (reg_cache, reg_src1,
10775 if (ARM_PC_REGNUM == reg_src1)
10777 /* If R15 was used as Rn, hence current PC+8. */
10778 u_regval[0] = u_regval[0] + 8;
10780 offset_8 = (immed_high << 4) | immed_low;
10781 /* Calculate target store address. */
10782 if (14 == arm_insn_r->opcode)
10784 tgt_mem_addr = u_regval[0] + offset_8;
10788 tgt_mem_addr = u_regval[0] - offset_8;
10790 if (ARM_RECORD_STRH == str_type)
10792 record_buf_mem[0] = 2;
10793 record_buf_mem[1] = tgt_mem_addr;
10794 arm_insn_r->mem_rec_count = 1;
10796 else if (ARM_RECORD_STRD == str_type)
10798 record_buf_mem[0] = 4;
10799 record_buf_mem[1] = tgt_mem_addr;
10800 record_buf_mem[2] = 4;
10801 record_buf_mem[3] = tgt_mem_addr + 4;
10802 arm_insn_r->mem_rec_count = 2;
10805 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10807 /* 2) Store, register offset. */
10809 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10811 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10812 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10813 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10814 if (15 == reg_src2)
10816 /* If R15 was used as Rn, hence current PC+8. */
10817 u_regval[0] = u_regval[0] + 8;
10819 /* Calculate target store address, Rn +/- Rm, register offset. */
10820 if (12 == arm_insn_r->opcode)
10822 tgt_mem_addr = u_regval[0] + u_regval[1];
10826 tgt_mem_addr = u_regval[1] - u_regval[0];
10828 if (ARM_RECORD_STRH == str_type)
10830 record_buf_mem[0] = 2;
10831 record_buf_mem[1] = tgt_mem_addr;
10832 arm_insn_r->mem_rec_count = 1;
10834 else if (ARM_RECORD_STRD == str_type)
10836 record_buf_mem[0] = 4;
10837 record_buf_mem[1] = tgt_mem_addr;
10838 record_buf_mem[2] = 4;
10839 record_buf_mem[3] = tgt_mem_addr + 4;
10840 arm_insn_r->mem_rec_count = 2;
10843 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10844 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10846 /* 3) Store, immediate pre-indexed. */
10847 /* 5) Store, immediate post-indexed. */
10848 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10849 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10850 offset_8 = (immed_high << 4) | immed_low;
10851 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10852 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10853 /* Calculate target store address, Rn +/- Rm, register offset. */
10854 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10856 tgt_mem_addr = u_regval[0] + offset_8;
10860 tgt_mem_addr = u_regval[0] - offset_8;
10862 if (ARM_RECORD_STRH == str_type)
10864 record_buf_mem[0] = 2;
10865 record_buf_mem[1] = tgt_mem_addr;
10866 arm_insn_r->mem_rec_count = 1;
10868 else if (ARM_RECORD_STRD == str_type)
10870 record_buf_mem[0] = 4;
10871 record_buf_mem[1] = tgt_mem_addr;
10872 record_buf_mem[2] = 4;
10873 record_buf_mem[3] = tgt_mem_addr + 4;
10874 arm_insn_r->mem_rec_count = 2;
10876 /* Record Rn also as it changes. */
10877 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10878 arm_insn_r->reg_rec_count = 1;
10880 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10881 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10883 /* 4) Store, register pre-indexed. */
10884 /* 6) Store, register post -indexed. */
10885 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10886 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10887 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10888 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10889 /* Calculate target store address, Rn +/- Rm, register offset. */
10890 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10892 tgt_mem_addr = u_regval[0] + u_regval[1];
10896 tgt_mem_addr = u_regval[1] - u_regval[0];
10898 if (ARM_RECORD_STRH == str_type)
10900 record_buf_mem[0] = 2;
10901 record_buf_mem[1] = tgt_mem_addr;
10902 arm_insn_r->mem_rec_count = 1;
10904 else if (ARM_RECORD_STRD == str_type)
10906 record_buf_mem[0] = 4;
10907 record_buf_mem[1] = tgt_mem_addr;
10908 record_buf_mem[2] = 4;
10909 record_buf_mem[3] = tgt_mem_addr + 4;
10910 arm_insn_r->mem_rec_count = 2;
10912 /* Record Rn also as it changes. */
10913 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10914 arm_insn_r->reg_rec_count = 1;
10919 /* Handling ARM extension space insns. */
10922 arm_record_extension_space (insn_decode_record *arm_insn_r)
10924 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10925 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10926 uint32_t record_buf[8], record_buf_mem[8];
10927 uint32_t reg_src1 = 0;
10928 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10929 struct regcache *reg_cache = arm_insn_r->regcache;
10930 ULONGEST u_regval = 0;
10932 gdb_assert (!INSN_RECORDED(arm_insn_r));
10933 /* Handle unconditional insn extension space. */
10935 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10936 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10937 if (arm_insn_r->cond)
10939 /* PLD has no affect on architectural state, it just affects
10941 if (5 == ((opcode1 & 0xE0) >> 5))
10944 record_buf[0] = ARM_PS_REGNUM;
10945 record_buf[1] = ARM_LR_REGNUM;
10946 arm_insn_r->reg_rec_count = 2;
10948 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10952 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10953 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10956 /* Undefined instruction on ARM V5; need to handle if later
10957 versions define it. */
10960 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10961 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10962 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10964 /* Handle arithmetic insn extension space. */
10965 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10966 && !INSN_RECORDED(arm_insn_r))
10968 /* Handle MLA(S) and MUL(S). */
10969 if (0 <= insn_op1 && 3 >= insn_op1)
10971 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10972 record_buf[1] = ARM_PS_REGNUM;
10973 arm_insn_r->reg_rec_count = 2;
10975 else if (4 <= insn_op1 && 15 >= insn_op1)
10977 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10978 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10979 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10980 record_buf[2] = ARM_PS_REGNUM;
10981 arm_insn_r->reg_rec_count = 3;
10985 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10986 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10987 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10989 /* Handle control insn extension space. */
10991 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10992 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10994 if (!bit (arm_insn_r->arm_insn,25))
10996 if (!bits (arm_insn_r->arm_insn, 4, 7))
10998 if ((0 == insn_op1) || (2 == insn_op1))
11001 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11002 arm_insn_r->reg_rec_count = 1;
11004 else if (1 == insn_op1)
11006 /* CSPR is going to be changed. */
11007 record_buf[0] = ARM_PS_REGNUM;
11008 arm_insn_r->reg_rec_count = 1;
11010 else if (3 == insn_op1)
11012 /* SPSR is going to be changed. */
11013 /* We need to get SPSR value, which is yet to be done. */
11014 printf_unfiltered (_("Process record does not support "
11015 "instruction 0x%0x at address %s.\n"),
11016 arm_insn_r->arm_insn,
11017 paddress (arm_insn_r->gdbarch,
11018 arm_insn_r->this_addr));
11022 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11027 record_buf[0] = ARM_PS_REGNUM;
11028 arm_insn_r->reg_rec_count = 1;
11030 else if (3 == insn_op1)
11033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11034 arm_insn_r->reg_rec_count = 1;
11037 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11040 record_buf[0] = ARM_PS_REGNUM;
11041 record_buf[1] = ARM_LR_REGNUM;
11042 arm_insn_r->reg_rec_count = 2;
11044 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11046 /* QADD, QSUB, QDADD, QDSUB */
11047 record_buf[0] = ARM_PS_REGNUM;
11048 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11049 arm_insn_r->reg_rec_count = 2;
11051 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11054 record_buf[0] = ARM_PS_REGNUM;
11055 record_buf[1] = ARM_LR_REGNUM;
11056 arm_insn_r->reg_rec_count = 2;
11058 /* Save SPSR also;how? */
11059 printf_unfiltered (_("Process record does not support "
11060 "instruction 0x%0x at address %s.\n"),
11061 arm_insn_r->arm_insn,
11062 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11065 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11066 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11067 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11068 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11071 if (0 == insn_op1 || 1 == insn_op1)
11073 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11074 /* We dont do optimization for SMULW<y> where we
11076 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11077 record_buf[1] = ARM_PS_REGNUM;
11078 arm_insn_r->reg_rec_count = 2;
11080 else if (2 == insn_op1)
11083 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11084 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11085 arm_insn_r->reg_rec_count = 2;
11087 else if (3 == insn_op1)
11090 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11091 arm_insn_r->reg_rec_count = 1;
11097 /* MSR : immediate form. */
11100 /* CSPR is going to be changed. */
11101 record_buf[0] = ARM_PS_REGNUM;
11102 arm_insn_r->reg_rec_count = 1;
11104 else if (3 == insn_op1)
11106 /* SPSR is going to be changed. */
11107 /* we need to get SPSR value, which is yet to be done */
11108 printf_unfiltered (_("Process record does not support "
11109 "instruction 0x%0x at address %s.\n"),
11110 arm_insn_r->arm_insn,
11111 paddress (arm_insn_r->gdbarch,
11112 arm_insn_r->this_addr));
11118 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11119 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11120 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11122 /* Handle load/store insn extension space. */
11124 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11125 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11126 && !INSN_RECORDED(arm_insn_r))
11131 /* These insn, changes register and memory as well. */
11132 /* SWP or SWPB insn. */
11133 /* Get memory address given by Rn. */
11134 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11135 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11136 /* SWP insn ?, swaps word. */
11137 if (8 == arm_insn_r->opcode)
11139 record_buf_mem[0] = 4;
11143 /* SWPB insn, swaps only byte. */
11144 record_buf_mem[0] = 1;
11146 record_buf_mem[1] = u_regval;
11147 arm_insn_r->mem_rec_count = 1;
11148 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11149 arm_insn_r->reg_rec_count = 1;
11151 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11154 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11157 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11160 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11161 record_buf[1] = record_buf[0] + 1;
11162 arm_insn_r->reg_rec_count = 2;
11164 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11167 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11170 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11172 /* LDRH, LDRSB, LDRSH. */
11173 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11174 arm_insn_r->reg_rec_count = 1;
11179 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11180 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11181 && !INSN_RECORDED(arm_insn_r))
11184 /* Handle coprocessor insn extension space. */
11187 /* To be done for ARMv5 and later; as of now we return -1. */
11189 printf_unfiltered (_("Process record does not support instruction x%0x "
11190 "at address %s.\n"),arm_insn_r->arm_insn,
11191 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11194 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11195 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11200 /* Handling opcode 000 insns. */
11203 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11205 struct regcache *reg_cache = arm_insn_r->regcache;
11206 uint32_t record_buf[8], record_buf_mem[8];
11207 ULONGEST u_regval[2] = {0};
11209 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11210 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11211 uint32_t opcode1 = 0;
11213 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11214 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11215 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11217 /* Data processing insn /multiply insn. */
11218 if (9 == arm_insn_r->decode
11219 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11220 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11222 /* Handle multiply instructions. */
11223 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11224 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11226 /* Handle MLA and MUL. */
11227 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11228 record_buf[1] = ARM_PS_REGNUM;
11229 arm_insn_r->reg_rec_count = 2;
11231 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11233 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11234 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11235 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11236 record_buf[2] = ARM_PS_REGNUM;
11237 arm_insn_r->reg_rec_count = 3;
11240 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11241 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11243 /* Handle misc load insns, as 20th bit (L = 1). */
11244 /* LDR insn has a capability to do branching, if
11245 MOV LR, PC is precceded by LDR insn having Rn as R15
11246 in that case, it emulates branch and link insn, and hence we
11247 need to save CSPR and PC as well. I am not sure this is right
11248 place; as opcode = 010 LDR insn make this happen, if R15 was
11250 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11251 if (15 != reg_dest)
11253 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11254 arm_insn_r->reg_rec_count = 1;
11258 record_buf[0] = reg_dest;
11259 record_buf[1] = ARM_PS_REGNUM;
11260 arm_insn_r->reg_rec_count = 2;
11263 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11264 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11265 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11266 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11268 /* Handle MSR insn. */
11269 if (9 == arm_insn_r->opcode)
11271 /* CSPR is going to be changed. */
11272 record_buf[0] = ARM_PS_REGNUM;
11273 arm_insn_r->reg_rec_count = 1;
11277 /* SPSR is going to be changed. */
11278 /* How to read SPSR value? */
11279 printf_unfiltered (_("Process record does not support instruction "
11280 "0x%0x at address %s.\n"),
11281 arm_insn_r->arm_insn,
11282 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11286 else if (9 == arm_insn_r->decode
11287 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11288 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11290 /* Handling SWP, SWPB. */
11291 /* These insn, changes register and memory as well. */
11292 /* SWP or SWPB insn. */
11294 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11295 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11296 /* SWP insn ?, swaps word. */
11297 if (8 == arm_insn_r->opcode)
11299 record_buf_mem[0] = 4;
11303 /* SWPB insn, swaps only byte. */
11304 record_buf_mem[0] = 1;
11306 record_buf_mem[1] = u_regval[0];
11307 arm_insn_r->mem_rec_count = 1;
11308 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11309 arm_insn_r->reg_rec_count = 1;
11311 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11312 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11314 /* Handle BLX, branch and link/exchange. */
11315 if (9 == arm_insn_r->opcode)
11317 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11318 and R14 stores the return address. */
11319 record_buf[0] = ARM_PS_REGNUM;
11320 record_buf[1] = ARM_LR_REGNUM;
11321 arm_insn_r->reg_rec_count = 2;
11324 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11326 /* Handle enhanced software breakpoint insn, BKPT. */
11327 /* CPSR is changed to be executed in ARM state, disabling normal
11328 interrupts, entering abort mode. */
11329 /* According to high vector configuration PC is set. */
11330 /* user hit breakpoint and type reverse, in
11331 that case, we need to go back with previous CPSR and
11332 Program Counter. */
11333 record_buf[0] = ARM_PS_REGNUM;
11334 record_buf[1] = ARM_LR_REGNUM;
11335 arm_insn_r->reg_rec_count = 2;
11337 /* Save SPSR also; how? */
11338 printf_unfiltered (_("Process record does not support instruction "
11339 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11340 paddress (arm_insn_r->gdbarch,
11341 arm_insn_r->this_addr));
11344 else if (11 == arm_insn_r->decode
11345 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11347 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11349 /* Handle str(x) insn */
11350 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11353 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11354 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11356 /* Handle BX, branch and link/exchange. */
11357 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11358 record_buf[0] = ARM_PS_REGNUM;
11359 arm_insn_r->reg_rec_count = 1;
11361 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11362 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11363 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11365 /* Count leading zeros: CLZ. */
11366 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11367 arm_insn_r->reg_rec_count = 1;
11369 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11370 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11371 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11372 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11375 /* Handle MRS insn. */
11376 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11377 arm_insn_r->reg_rec_count = 1;
11379 else if (arm_insn_r->opcode <= 15)
11381 /* Normal data processing insns. */
11382 /* Out of 11 shifter operands mode, all the insn modifies destination
11383 register, which is specified by 13-16 decode. */
11384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11385 record_buf[1] = ARM_PS_REGNUM;
11386 arm_insn_r->reg_rec_count = 2;
11393 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11394 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11398 /* Handling opcode 001 insns. */
11401 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11403 uint32_t record_buf[8], record_buf_mem[8];
11405 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11406 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11408 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11409 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11410 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11413 /* Handle MSR insn. */
11414 if (9 == arm_insn_r->opcode)
11416 /* CSPR is going to be changed. */
11417 record_buf[0] = ARM_PS_REGNUM;
11418 arm_insn_r->reg_rec_count = 1;
11422 /* SPSR is going to be changed. */
11425 else if (arm_insn_r->opcode <= 15)
11427 /* Normal data processing insns. */
11428 /* Out of 11 shifter operands mode, all the insn modifies destination
11429 register, which is specified by 13-16 decode. */
11430 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11431 record_buf[1] = ARM_PS_REGNUM;
11432 arm_insn_r->reg_rec_count = 2;
11439 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11440 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11444 /* Handling opcode 010 insns. */
11447 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11449 struct regcache *reg_cache = arm_insn_r->regcache;
11451 uint32_t reg_src1 = 0 , reg_dest = 0;
11452 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11453 uint32_t record_buf[8], record_buf_mem[8];
11455 ULONGEST u_regval = 0;
11457 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11458 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11460 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11462 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11463 /* LDR insn has a capability to do branching, if
11464 MOV LR, PC is precedded by LDR insn having Rn as R15
11465 in that case, it emulates branch and link insn, and hence we
11466 need to save CSPR and PC as well. */
11467 if (ARM_PC_REGNUM != reg_dest)
11469 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11470 arm_insn_r->reg_rec_count = 1;
11474 record_buf[0] = reg_dest;
11475 record_buf[1] = ARM_PS_REGNUM;
11476 arm_insn_r->reg_rec_count = 2;
11481 /* Store, immediate offset, immediate pre-indexed,
11482 immediate post-indexed. */
11483 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11484 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11485 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11487 if (bit (arm_insn_r->arm_insn, 23))
11489 tgt_mem_addr = u_regval + offset_12;
11493 tgt_mem_addr = u_regval - offset_12;
11496 switch (arm_insn_r->opcode)
11510 record_buf_mem[0] = 4;
11525 record_buf_mem[0] = 1;
11529 gdb_assert_not_reached ("no decoding pattern found");
11532 record_buf_mem[1] = tgt_mem_addr;
11533 arm_insn_r->mem_rec_count = 1;
11535 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11536 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11537 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11538 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11539 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11540 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11543 /* We are handling pre-indexed mode; post-indexed mode;
11544 where Rn is going to be changed. */
11545 record_buf[0] = reg_src1;
11546 arm_insn_r->reg_rec_count = 1;
11550 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11551 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11555 /* Handling opcode 011 insns. */
11558 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11560 struct regcache *reg_cache = arm_insn_r->regcache;
11562 uint32_t shift_imm = 0;
11563 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11564 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11565 uint32_t record_buf[8], record_buf_mem[8];
11568 ULONGEST u_regval[2];
11570 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11571 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11573 /* Handle enhanced store insns and LDRD DSP insn,
11574 order begins according to addressing modes for store insns
11578 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11580 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11581 /* LDR insn has a capability to do branching, if
11582 MOV LR, PC is precedded by LDR insn having Rn as R15
11583 in that case, it emulates branch and link insn, and hence we
11584 need to save CSPR and PC as well. */
11585 if (15 != reg_dest)
11587 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11588 arm_insn_r->reg_rec_count = 1;
11592 record_buf[0] = reg_dest;
11593 record_buf[1] = ARM_PS_REGNUM;
11594 arm_insn_r->reg_rec_count = 2;
11599 if (! bits (arm_insn_r->arm_insn, 4, 11))
11601 /* Store insn, register offset and register pre-indexed,
11602 register post-indexed. */
11604 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11606 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11607 regcache_raw_read_unsigned (reg_cache, reg_src1
11609 regcache_raw_read_unsigned (reg_cache, reg_src2
11611 if (15 == reg_src2)
11613 /* If R15 was used as Rn, hence current PC+8. */
11614 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11615 u_regval[0] = u_regval[0] + 8;
11617 /* Calculate target store address, Rn +/- Rm, register offset. */
11619 if (bit (arm_insn_r->arm_insn, 23))
11621 tgt_mem_addr = u_regval[0] + u_regval[1];
11625 tgt_mem_addr = u_regval[1] - u_regval[0];
11628 switch (arm_insn_r->opcode)
11642 record_buf_mem[0] = 4;
11657 record_buf_mem[0] = 1;
11661 gdb_assert_not_reached ("no decoding pattern found");
11664 record_buf_mem[1] = tgt_mem_addr;
11665 arm_insn_r->mem_rec_count = 1;
11667 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11668 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11669 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11670 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11671 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11672 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11675 /* Rn is going to be changed in pre-indexed mode and
11676 post-indexed mode as well. */
11677 record_buf[0] = reg_src2;
11678 arm_insn_r->reg_rec_count = 1;
11683 /* Store insn, scaled register offset; scaled pre-indexed. */
11684 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11686 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11688 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11689 /* Get shift_imm. */
11690 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11691 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11692 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11693 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11694 /* Offset_12 used as shift. */
11698 /* Offset_12 used as index. */
11699 offset_12 = u_regval[0] << shift_imm;
11703 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11709 if (bit (u_regval[0], 31))
11711 offset_12 = 0xFFFFFFFF;
11720 /* This is arithmetic shift. */
11721 offset_12 = s_word >> shift_imm;
11728 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11730 /* Get C flag value and shift it by 31. */
11731 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11732 | (u_regval[0]) >> 1);
11736 offset_12 = (u_regval[0] >> shift_imm) \
11738 (sizeof(uint32_t) - shift_imm));
11743 gdb_assert_not_reached ("no decoding pattern found");
11747 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11749 if (bit (arm_insn_r->arm_insn, 23))
11751 tgt_mem_addr = u_regval[1] + offset_12;
11755 tgt_mem_addr = u_regval[1] - offset_12;
11758 switch (arm_insn_r->opcode)
11772 record_buf_mem[0] = 4;
11787 record_buf_mem[0] = 1;
11791 gdb_assert_not_reached ("no decoding pattern found");
11794 record_buf_mem[1] = tgt_mem_addr;
11795 arm_insn_r->mem_rec_count = 1;
11797 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11798 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11799 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11800 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11801 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11802 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11805 /* Rn is going to be changed in register scaled pre-indexed
11806 mode,and scaled post indexed mode. */
11807 record_buf[0] = reg_src2;
11808 arm_insn_r->reg_rec_count = 1;
11813 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11814 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11818 /* Handling opcode 100 insns. */
11821 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11823 struct regcache *reg_cache = arm_insn_r->regcache;
11825 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11826 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11827 uint32_t start_address = 0, index = 0;
11828 uint32_t record_buf[24], record_buf_mem[48];
11830 ULONGEST u_regval[2] = {0};
11832 /* This mode is exclusively for load and store multiple. */
11833 /* Handle incremenrt after/before and decrment after.before mode;
11834 Rn is changing depending on W bit, but as of now we store Rn too
11835 without optimization. */
11837 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11839 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11841 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11843 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11848 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11852 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11853 while (register_bits)
11855 if (register_bits & 0x00000001)
11856 record_buf[index++] = register_count;
11857 register_bits = register_bits >> 1;
11861 /* Extra space for Base Register and CPSR; wihtout optimization. */
11862 record_buf[index++] = reg_src1;
11863 record_buf[index++] = ARM_PS_REGNUM;
11864 arm_insn_r->reg_rec_count = index;
11868 /* It handles both STM(1) and STM(2). */
11869 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11871 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11873 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11874 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11875 while (register_bits)
11877 if (register_bits & 0x00000001)
11879 register_bits = register_bits >> 1;
11884 /* Decrement after. */
11886 start_address = (u_regval[0]) - (register_count * 4) + 4;
11887 arm_insn_r->mem_rec_count = register_count;
11888 while (register_count)
11890 record_buf_mem[(register_count * 2) - 1] = start_address;
11891 record_buf_mem[(register_count * 2) - 2] = 4;
11892 start_address = start_address + 4;
11897 /* Increment after. */
11899 start_address = u_regval[0];
11900 arm_insn_r->mem_rec_count = register_count;
11901 while (register_count)
11903 record_buf_mem[(register_count * 2) - 1] = start_address;
11904 record_buf_mem[(register_count * 2) - 2] = 4;
11905 start_address = start_address + 4;
11910 /* Decrement before. */
11913 start_address = (u_regval[0]) - (register_count * 4);
11914 arm_insn_r->mem_rec_count = register_count;
11915 while (register_count)
11917 record_buf_mem[(register_count * 2) - 1] = start_address;
11918 record_buf_mem[(register_count * 2) - 2] = 4;
11919 start_address = start_address + 4;
11924 /* Increment before. */
11926 start_address = u_regval[0] + 4;
11927 arm_insn_r->mem_rec_count = register_count;
11928 while (register_count)
11930 record_buf_mem[(register_count * 2) - 1] = start_address;
11931 record_buf_mem[(register_count * 2) - 2] = 4;
11932 start_address = start_address + 4;
11938 gdb_assert_not_reached ("no decoding pattern found");
11942 /* Base register also changes; based on condition and W bit. */
11943 /* We save it anyway without optimization. */
11944 record_buf[0] = reg_src1;
11945 arm_insn_r->reg_rec_count = 1;
11948 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11949 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11953 /* Handling opcode 101 insns. */
11956 arm_record_b_bl (insn_decode_record *arm_insn_r)
11958 uint32_t record_buf[8];
11960 /* Handle B, BL, BLX(1) insns. */
11961 /* B simply branches so we do nothing here. */
11962 /* Note: BLX(1) doesnt fall here but instead it falls into
11963 extension space. */
11964 if (bit (arm_insn_r->arm_insn, 24))
11966 record_buf[0] = ARM_LR_REGNUM;
11967 arm_insn_r->reg_rec_count = 1;
11970 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11975 /* Handling opcode 110 insns. */
11978 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11980 printf_unfiltered (_("Process record does not support instruction "
11981 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11982 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11987 /* Handling opcode 111 insns. */
11990 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11992 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11993 struct regcache *reg_cache = arm_insn_r->regcache;
11994 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11995 ULONGEST u_regval = 0;
11997 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11999 /* Handle arm SWI/SVC system call instructions. */
12000 if (15 == arm_insn_r->opcode)
12002 if (tdep->arm_syscall_record != NULL)
12004 ULONGEST svc_operand, svc_number;
12006 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12008 if (svc_operand) /* OABI. */
12009 svc_number = svc_operand - 0x900000;
12011 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12013 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12017 printf_unfiltered (_("no syscall record support\n"));
12023 arm_record_unsupported_insn (arm_insn_r);
12030 /* Handling opcode 000 insns. */
12033 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12035 uint32_t record_buf[8];
12036 uint32_t reg_src1 = 0;
12038 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12040 record_buf[0] = ARM_PS_REGNUM;
12041 record_buf[1] = reg_src1;
12042 thumb_insn_r->reg_rec_count = 2;
12044 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12050 /* Handling opcode 001 insns. */
12053 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12055 uint32_t record_buf[8];
12056 uint32_t reg_src1 = 0;
12058 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12060 record_buf[0] = ARM_PS_REGNUM;
12061 record_buf[1] = reg_src1;
12062 thumb_insn_r->reg_rec_count = 2;
12064 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12069 /* Handling opcode 010 insns. */
12072 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12074 struct regcache *reg_cache = thumb_insn_r->regcache;
12075 uint32_t record_buf[8], record_buf_mem[8];
12077 uint32_t reg_src1 = 0, reg_src2 = 0;
12078 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12080 ULONGEST u_regval[2] = {0};
12082 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12084 if (bit (thumb_insn_r->arm_insn, 12))
12086 /* Handle load/store register offset. */
12087 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12088 if (opcode2 >= 12 && opcode2 <= 15)
12090 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12091 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12092 record_buf[0] = reg_src1;
12093 thumb_insn_r->reg_rec_count = 1;
12095 else if (opcode2 >= 8 && opcode2 <= 10)
12097 /* STR(2), STRB(2), STRH(2) . */
12098 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12099 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12100 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12101 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12103 record_buf_mem[0] = 4; /* STR (2). */
12104 else if (10 == opcode2)
12105 record_buf_mem[0] = 1; /* STRB (2). */
12106 else if (9 == opcode2)
12107 record_buf_mem[0] = 2; /* STRH (2). */
12108 record_buf_mem[1] = u_regval[0] + u_regval[1];
12109 thumb_insn_r->mem_rec_count = 1;
12112 else if (bit (thumb_insn_r->arm_insn, 11))
12114 /* Handle load from literal pool. */
12116 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12117 record_buf[0] = reg_src1;
12118 thumb_insn_r->reg_rec_count = 1;
12122 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12123 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12124 if ((3 == opcode2) && (!opcode3))
12126 /* Branch with exchange. */
12127 record_buf[0] = ARM_PS_REGNUM;
12128 thumb_insn_r->reg_rec_count = 1;
12132 /* Format 8; special data processing insns. */
12133 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12134 record_buf[0] = ARM_PS_REGNUM;
12135 record_buf[1] = reg_src1;
12136 thumb_insn_r->reg_rec_count = 2;
12141 /* Format 5; data processing insns. */
12142 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12143 if (bit (thumb_insn_r->arm_insn, 7))
12145 reg_src1 = reg_src1 + 8;
12147 record_buf[0] = ARM_PS_REGNUM;
12148 record_buf[1] = reg_src1;
12149 thumb_insn_r->reg_rec_count = 2;
12152 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12153 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12159 /* Handling opcode 001 insns. */
12162 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12164 struct regcache *reg_cache = thumb_insn_r->regcache;
12165 uint32_t record_buf[8], record_buf_mem[8];
12167 uint32_t reg_src1 = 0;
12168 uint32_t opcode = 0, immed_5 = 0;
12170 ULONGEST u_regval = 0;
12172 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12177 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12178 record_buf[0] = reg_src1;
12179 thumb_insn_r->reg_rec_count = 1;
12184 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12185 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12186 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12187 record_buf_mem[0] = 4;
12188 record_buf_mem[1] = u_regval + (immed_5 * 4);
12189 thumb_insn_r->mem_rec_count = 1;
12192 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12193 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12199 /* Handling opcode 100 insns. */
12202 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12204 struct regcache *reg_cache = thumb_insn_r->regcache;
12205 uint32_t record_buf[8], record_buf_mem[8];
12207 uint32_t reg_src1 = 0;
12208 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12210 ULONGEST u_regval = 0;
12212 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12217 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12218 record_buf[0] = reg_src1;
12219 thumb_insn_r->reg_rec_count = 1;
12221 else if (1 == opcode)
12224 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12225 record_buf[0] = reg_src1;
12226 thumb_insn_r->reg_rec_count = 1;
12228 else if (2 == opcode)
12231 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12232 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12233 record_buf_mem[0] = 4;
12234 record_buf_mem[1] = u_regval + (immed_8 * 4);
12235 thumb_insn_r->mem_rec_count = 1;
12237 else if (0 == opcode)
12240 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12241 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12242 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12243 record_buf_mem[0] = 2;
12244 record_buf_mem[1] = u_regval + (immed_5 * 2);
12245 thumb_insn_r->mem_rec_count = 1;
12248 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12249 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12255 /* Handling opcode 101 insns. */
12258 thumb_record_misc (insn_decode_record *thumb_insn_r)
12260 struct regcache *reg_cache = thumb_insn_r->regcache;
12262 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12263 uint32_t register_bits = 0, register_count = 0;
12264 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12265 uint32_t record_buf[24], record_buf_mem[48];
12268 ULONGEST u_regval = 0;
12270 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12271 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12272 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12277 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12278 while (register_bits)
12280 if (register_bits & 0x00000001)
12281 record_buf[index++] = register_count;
12282 register_bits = register_bits >> 1;
12285 record_buf[index++] = ARM_PS_REGNUM;
12286 record_buf[index++] = ARM_SP_REGNUM;
12287 thumb_insn_r->reg_rec_count = index;
12289 else if (10 == opcode2)
12292 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12293 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12294 while (register_bits)
12296 if (register_bits & 0x00000001)
12298 register_bits = register_bits >> 1;
12300 start_address = u_regval - \
12301 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12302 thumb_insn_r->mem_rec_count = register_count;
12303 while (register_count)
12305 record_buf_mem[(register_count * 2) - 1] = start_address;
12306 record_buf_mem[(register_count * 2) - 2] = 4;
12307 start_address = start_address + 4;
12310 record_buf[0] = ARM_SP_REGNUM;
12311 thumb_insn_r->reg_rec_count = 1;
12313 else if (0x1E == opcode1)
12316 /* Handle enhanced software breakpoint insn, BKPT. */
12317 /* CPSR is changed to be executed in ARM state, disabling normal
12318 interrupts, entering abort mode. */
12319 /* According to high vector configuration PC is set. */
12320 /* User hits breakpoint and type reverse, in that case, we need to go back with
12321 previous CPSR and Program Counter. */
12322 record_buf[0] = ARM_PS_REGNUM;
12323 record_buf[1] = ARM_LR_REGNUM;
12324 thumb_insn_r->reg_rec_count = 2;
12325 /* We need to save SPSR value, which is not yet done. */
12326 printf_unfiltered (_("Process record does not support instruction "
12327 "0x%0x at address %s.\n"),
12328 thumb_insn_r->arm_insn,
12329 paddress (thumb_insn_r->gdbarch,
12330 thumb_insn_r->this_addr));
12333 else if ((0 == opcode) || (1 == opcode))
12335 /* ADD(5), ADD(6). */
12336 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12337 record_buf[0] = reg_src1;
12338 thumb_insn_r->reg_rec_count = 1;
12340 else if (2 == opcode)
12342 /* ADD(7), SUB(4). */
12343 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12344 record_buf[0] = ARM_SP_REGNUM;
12345 thumb_insn_r->reg_rec_count = 1;
12348 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12349 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12355 /* Handling opcode 110 insns. */
12358 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12360 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12361 struct regcache *reg_cache = thumb_insn_r->regcache;
12363 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12364 uint32_t reg_src1 = 0;
12365 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12366 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12367 uint32_t record_buf[24], record_buf_mem[48];
12369 ULONGEST u_regval = 0;
12371 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12372 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12378 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12380 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12381 while (register_bits)
12383 if (register_bits & 0x00000001)
12384 record_buf[index++] = register_count;
12385 register_bits = register_bits >> 1;
12388 record_buf[index++] = reg_src1;
12389 thumb_insn_r->reg_rec_count = index;
12391 else if (0 == opcode2)
12393 /* It handles both STMIA. */
12394 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12396 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12397 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12398 while (register_bits)
12400 if (register_bits & 0x00000001)
12402 register_bits = register_bits >> 1;
12404 start_address = u_regval;
12405 thumb_insn_r->mem_rec_count = register_count;
12406 while (register_count)
12408 record_buf_mem[(register_count * 2) - 1] = start_address;
12409 record_buf_mem[(register_count * 2) - 2] = 4;
12410 start_address = start_address + 4;
12414 else if (0x1F == opcode1)
12416 /* Handle arm syscall insn. */
12417 if (tdep->arm_syscall_record != NULL)
12419 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12420 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12424 printf_unfiltered (_("no syscall record support\n"));
12429 /* B (1), conditional branch is automatically taken care in process_record,
12430 as PC is saved there. */
12432 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12433 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12439 /* Handling opcode 111 insns. */
12442 thumb_record_branch (insn_decode_record *thumb_insn_r)
12444 uint32_t record_buf[8];
12445 uint32_t bits_h = 0;
12447 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12449 if (2 == bits_h || 3 == bits_h)
12452 record_buf[0] = ARM_LR_REGNUM;
12453 thumb_insn_r->reg_rec_count = 1;
12455 else if (1 == bits_h)
12458 record_buf[0] = ARM_PS_REGNUM;
12459 record_buf[1] = ARM_LR_REGNUM;
12460 thumb_insn_r->reg_rec_count = 2;
12463 /* B(2) is automatically taken care in process_record, as PC is
12466 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12471 /* Handler for thumb2 load/store multiple instructions. */
12474 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12476 struct regcache *reg_cache = thumb2_insn_r->regcache;
12478 uint32_t reg_rn, op;
12479 uint32_t register_bits = 0, register_count = 0;
12480 uint32_t index = 0, start_address = 0;
12481 uint32_t record_buf[24], record_buf_mem[48];
12483 ULONGEST u_regval = 0;
12485 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12486 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12488 if (0 == op || 3 == op)
12490 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12492 /* Handle RFE instruction. */
12493 record_buf[0] = ARM_PS_REGNUM;
12494 thumb2_insn_r->reg_rec_count = 1;
12498 /* Handle SRS instruction after reading banked SP. */
12499 return arm_record_unsupported_insn (thumb2_insn_r);
12502 else if (1 == op || 2 == op)
12504 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12506 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12507 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12508 while (register_bits)
12510 if (register_bits & 0x00000001)
12511 record_buf[index++] = register_count;
12514 register_bits = register_bits >> 1;
12516 record_buf[index++] = reg_rn;
12517 record_buf[index++] = ARM_PS_REGNUM;
12518 thumb2_insn_r->reg_rec_count = index;
12522 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12523 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12524 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12525 while (register_bits)
12527 if (register_bits & 0x00000001)
12530 register_bits = register_bits >> 1;
12535 /* Start address calculation for LDMDB/LDMEA. */
12536 start_address = u_regval;
12540 /* Start address calculation for LDMDB/LDMEA. */
12541 start_address = u_regval - register_count * 4;
12544 thumb2_insn_r->mem_rec_count = register_count;
12545 while (register_count)
12547 record_buf_mem[register_count * 2 - 1] = start_address;
12548 record_buf_mem[register_count * 2 - 2] = 4;
12549 start_address = start_address + 4;
12552 record_buf[0] = reg_rn;
12553 record_buf[1] = ARM_PS_REGNUM;
12554 thumb2_insn_r->reg_rec_count = 2;
12558 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12560 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12562 return ARM_RECORD_SUCCESS;
12565 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12569 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12571 struct regcache *reg_cache = thumb2_insn_r->regcache;
12573 uint32_t reg_rd, reg_rn, offset_imm;
12574 uint32_t reg_dest1, reg_dest2;
12575 uint32_t address, offset_addr;
12576 uint32_t record_buf[8], record_buf_mem[8];
12577 uint32_t op1, op2, op3;
12580 ULONGEST u_regval[2];
12582 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12583 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12584 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12586 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12588 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12590 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12591 record_buf[0] = reg_dest1;
12592 record_buf[1] = ARM_PS_REGNUM;
12593 thumb2_insn_r->reg_rec_count = 2;
12596 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12598 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12599 record_buf[2] = reg_dest2;
12600 thumb2_insn_r->reg_rec_count = 3;
12605 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12606 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12608 if (0 == op1 && 0 == op2)
12610 /* Handle STREX. */
12611 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12612 address = u_regval[0] + (offset_imm * 4);
12613 record_buf_mem[0] = 4;
12614 record_buf_mem[1] = address;
12615 thumb2_insn_r->mem_rec_count = 1;
12616 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12617 record_buf[0] = reg_rd;
12618 thumb2_insn_r->reg_rec_count = 1;
12620 else if (1 == op1 && 0 == op2)
12622 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12623 record_buf[0] = reg_rd;
12624 thumb2_insn_r->reg_rec_count = 1;
12625 address = u_regval[0];
12626 record_buf_mem[1] = address;
12630 /* Handle STREXB. */
12631 record_buf_mem[0] = 1;
12632 thumb2_insn_r->mem_rec_count = 1;
12636 /* Handle STREXH. */
12637 record_buf_mem[0] = 2 ;
12638 thumb2_insn_r->mem_rec_count = 1;
12642 /* Handle STREXD. */
12643 address = u_regval[0];
12644 record_buf_mem[0] = 4;
12645 record_buf_mem[2] = 4;
12646 record_buf_mem[3] = address + 4;
12647 thumb2_insn_r->mem_rec_count = 2;
12652 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12654 if (bit (thumb2_insn_r->arm_insn, 24))
12656 if (bit (thumb2_insn_r->arm_insn, 23))
12657 offset_addr = u_regval[0] + (offset_imm * 4);
12659 offset_addr = u_regval[0] - (offset_imm * 4);
12661 address = offset_addr;
12664 address = u_regval[0];
12666 record_buf_mem[0] = 4;
12667 record_buf_mem[1] = address;
12668 record_buf_mem[2] = 4;
12669 record_buf_mem[3] = address + 4;
12670 thumb2_insn_r->mem_rec_count = 2;
12671 record_buf[0] = reg_rn;
12672 thumb2_insn_r->reg_rec_count = 1;
12676 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12678 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12680 return ARM_RECORD_SUCCESS;
12683 /* Handler for thumb2 data processing (shift register and modified immediate)
12687 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12689 uint32_t reg_rd, op;
12690 uint32_t record_buf[8];
12692 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12693 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12695 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12697 record_buf[0] = ARM_PS_REGNUM;
12698 thumb2_insn_r->reg_rec_count = 1;
12702 record_buf[0] = reg_rd;
12703 record_buf[1] = ARM_PS_REGNUM;
12704 thumb2_insn_r->reg_rec_count = 2;
12707 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12709 return ARM_RECORD_SUCCESS;
12712 /* Generic handler for thumb2 instructions which effect destination and PS
12716 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12719 uint32_t record_buf[8];
12721 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12723 record_buf[0] = reg_rd;
12724 record_buf[1] = ARM_PS_REGNUM;
12725 thumb2_insn_r->reg_rec_count = 2;
12727 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12729 return ARM_RECORD_SUCCESS;
12732 /* Handler for thumb2 branch and miscellaneous control instructions. */
12735 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12737 uint32_t op, op1, op2;
12738 uint32_t record_buf[8];
12740 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12741 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12742 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12744 /* Handle MSR insn. */
12745 if (!(op1 & 0x2) && 0x38 == op)
12749 /* CPSR is going to be changed. */
12750 record_buf[0] = ARM_PS_REGNUM;
12751 thumb2_insn_r->reg_rec_count = 1;
12755 arm_record_unsupported_insn(thumb2_insn_r);
12759 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12762 record_buf[0] = ARM_PS_REGNUM;
12763 record_buf[1] = ARM_LR_REGNUM;
12764 thumb2_insn_r->reg_rec_count = 2;
12767 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12769 return ARM_RECORD_SUCCESS;
12772 /* Handler for thumb2 store single data item instructions. */
12775 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12777 struct regcache *reg_cache = thumb2_insn_r->regcache;
12779 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12780 uint32_t address, offset_addr;
12781 uint32_t record_buf[8], record_buf_mem[8];
12784 ULONGEST u_regval[2];
12786 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12787 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12788 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12789 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12791 if (bit (thumb2_insn_r->arm_insn, 23))
12794 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12795 offset_addr = u_regval[0] + offset_imm;
12796 address = offset_addr;
12801 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12803 /* Handle STRB (register). */
12804 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12805 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12806 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12807 offset_addr = u_regval[1] << shift_imm;
12808 address = u_regval[0] + offset_addr;
12812 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12813 if (bit (thumb2_insn_r->arm_insn, 10))
12815 if (bit (thumb2_insn_r->arm_insn, 9))
12816 offset_addr = u_regval[0] + offset_imm;
12818 offset_addr = u_regval[0] - offset_imm;
12820 address = offset_addr;
12823 address = u_regval[0];
12829 /* Store byte instructions. */
12832 record_buf_mem[0] = 1;
12834 /* Store half word instructions. */
12837 record_buf_mem[0] = 2;
12839 /* Store word instructions. */
12842 record_buf_mem[0] = 4;
12846 gdb_assert_not_reached ("no decoding pattern found");
12850 record_buf_mem[1] = address;
12851 thumb2_insn_r->mem_rec_count = 1;
12852 record_buf[0] = reg_rn;
12853 thumb2_insn_r->reg_rec_count = 1;
12855 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12857 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12859 return ARM_RECORD_SUCCESS;
12862 /* Handler for thumb2 load memory hints instructions. */
12865 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12867 uint32_t record_buf[8];
12868 uint32_t reg_rt, reg_rn;
12870 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12871 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12873 if (ARM_PC_REGNUM != reg_rt)
12875 record_buf[0] = reg_rt;
12876 record_buf[1] = reg_rn;
12877 record_buf[2] = ARM_PS_REGNUM;
12878 thumb2_insn_r->reg_rec_count = 3;
12880 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12882 return ARM_RECORD_SUCCESS;
12885 return ARM_RECORD_FAILURE;
12888 /* Handler for thumb2 load word instructions. */
12891 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12893 uint32_t opcode1 = 0, opcode2 = 0;
12894 uint32_t record_buf[8];
12896 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12897 record_buf[1] = ARM_PS_REGNUM;
12898 thumb2_insn_r->reg_rec_count = 2;
12900 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12902 return ARM_RECORD_SUCCESS;
12905 /* Handler for thumb2 long multiply, long multiply accumulate, and
12906 divide instructions. */
12909 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12911 uint32_t opcode1 = 0, opcode2 = 0;
12912 uint32_t record_buf[8];
12913 uint32_t reg_src1 = 0;
12915 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12916 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12918 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12920 /* Handle SMULL, UMULL, SMULAL. */
12921 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12922 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12923 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12924 record_buf[2] = ARM_PS_REGNUM;
12925 thumb2_insn_r->reg_rec_count = 3;
12927 else if (1 == opcode1 || 3 == opcode2)
12929 /* Handle SDIV and UDIV. */
12930 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12931 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12932 record_buf[2] = ARM_PS_REGNUM;
12933 thumb2_insn_r->reg_rec_count = 3;
12936 return ARM_RECORD_FAILURE;
12938 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12940 return ARM_RECORD_SUCCESS;
12943 /* Decodes thumb2 instruction type and invokes its record handler. */
12945 static unsigned int
12946 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12948 uint32_t op, op1, op2;
12950 op = bit (thumb2_insn_r->arm_insn, 15);
12951 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12952 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12956 if (!(op2 & 0x64 ))
12958 /* Load/store multiple instruction. */
12959 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12961 else if (!((op2 & 0x64) ^ 0x04))
12963 /* Load/store (dual/exclusive) and table branch instruction. */
12964 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12966 else if (!((op2 & 0x20) ^ 0x20))
12968 /* Data-processing (shifted register). */
12969 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12971 else if (op2 & 0x40)
12973 /* Co-processor instructions. */
12974 arm_record_unsupported_insn (thumb2_insn_r);
12977 else if (op1 == 0x02)
12981 /* Branches and miscellaneous control instructions. */
12982 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12984 else if (op2 & 0x20)
12986 /* Data-processing (plain binary immediate) instruction. */
12987 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12991 /* Data-processing (modified immediate). */
12992 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12995 else if (op1 == 0x03)
12997 if (!(op2 & 0x71 ))
12999 /* Store single data item. */
13000 return thumb2_record_str_single_data (thumb2_insn_r);
13002 else if (!((op2 & 0x71) ^ 0x10))
13004 /* Advanced SIMD or structure load/store instructions. */
13005 return arm_record_unsupported_insn (thumb2_insn_r);
13007 else if (!((op2 & 0x67) ^ 0x01))
13009 /* Load byte, memory hints instruction. */
13010 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13012 else if (!((op2 & 0x67) ^ 0x03))
13014 /* Load halfword, memory hints instruction. */
13015 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13017 else if (!((op2 & 0x67) ^ 0x05))
13019 /* Load word instruction. */
13020 return thumb2_record_ld_word (thumb2_insn_r);
13022 else if (!((op2 & 0x70) ^ 0x20))
13024 /* Data-processing (register) instruction. */
13025 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13027 else if (!((op2 & 0x78) ^ 0x30))
13029 /* Multiply, multiply accumulate, abs diff instruction. */
13030 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13032 else if (!((op2 & 0x78) ^ 0x38))
13034 /* Long multiply, long multiply accumulate, and divide. */
13035 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13037 else if (op2 & 0x40)
13039 /* Co-processor instructions. */
13040 return arm_record_unsupported_insn (thumb2_insn_r);
13047 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13048 and positive val on fauilure. */
13051 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13053 gdb_byte buf[insn_size];
13055 memset (&buf[0], 0, insn_size);
13057 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13059 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13061 gdbarch_byte_order (insn_record->gdbarch));
13065 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13067 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13071 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13072 uint32_t insn_size)
13075 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13076 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13078 arm_record_data_proc_misc_ld_str, /* 000. */
13079 arm_record_data_proc_imm, /* 001. */
13080 arm_record_ld_st_imm_offset, /* 010. */
13081 arm_record_ld_st_reg_offset, /* 011. */
13082 arm_record_ld_st_multiple, /* 100. */
13083 arm_record_b_bl, /* 101. */
13084 arm_record_unsupported_insn, /* 110. */
13085 arm_record_coproc_data_proc /* 111. */
13088 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13089 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13091 thumb_record_shift_add_sub, /* 000. */
13092 thumb_record_add_sub_cmp_mov, /* 001. */
13093 thumb_record_ld_st_reg_offset, /* 010. */
13094 thumb_record_ld_st_imm_offset, /* 011. */
13095 thumb_record_ld_st_stack, /* 100. */
13096 thumb_record_misc, /* 101. */
13097 thumb_record_ldm_stm_swi, /* 110. */
13098 thumb_record_branch /* 111. */
13101 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13102 uint32_t insn_id = 0;
13104 if (extract_arm_insn (arm_record, insn_size))
13108 printf_unfiltered (_("Process record: error reading memory at "
13109 "addr %s len = %d.\n"),
13110 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13114 else if (ARM_RECORD == record_type)
13116 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13117 insn_id = bits (arm_record->arm_insn, 25, 27);
13118 ret = arm_record_extension_space (arm_record);
13119 /* If this insn has fallen into extension space
13120 then we need not decode it anymore. */
13121 if (ret != -1 && !INSN_RECORDED(arm_record))
13123 ret = arm_handle_insn[insn_id] (arm_record);
13126 else if (THUMB_RECORD == record_type)
13128 /* As thumb does not have condition codes, we set negative. */
13129 arm_record->cond = -1;
13130 insn_id = bits (arm_record->arm_insn, 13, 15);
13131 ret = thumb_handle_insn[insn_id] (arm_record);
13133 else if (THUMB2_RECORD == record_type)
13135 /* As thumb does not have condition codes, we set negative. */
13136 arm_record->cond = -1;
13138 /* Swap first half of 32bit thumb instruction with second half. */
13139 arm_record->arm_insn
13140 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13142 insn_id = thumb2_record_decode_insn_handler (arm_record);
13144 if (insn_id != ARM_RECORD_SUCCESS)
13146 arm_record_unsupported_insn (arm_record);
13152 /* Throw assertion. */
13153 gdb_assert_not_reached ("not a valid instruction, could not decode");
13160 /* Cleans up local record registers and memory allocations. */
13163 deallocate_reg_mem (insn_decode_record *record)
13165 xfree (record->arm_regs);
13166 xfree (record->arm_mems);
13170 /* Parse the current instruction and record the values of the registers and
13171 memory that will be changed in current instruction to record_arch_list".
13172 Return -1 if something is wrong. */
13175 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13176 CORE_ADDR insn_addr)
13179 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13180 uint32_t no_of_rec = 0;
13181 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13182 ULONGEST t_bit = 0, insn_id = 0;
13184 ULONGEST u_regval = 0;
13186 insn_decode_record arm_record;
13188 memset (&arm_record, 0, sizeof (insn_decode_record));
13189 arm_record.regcache = regcache;
13190 arm_record.this_addr = insn_addr;
13191 arm_record.gdbarch = gdbarch;
13194 if (record_debug > 1)
13196 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13198 paddress (gdbarch, arm_record.this_addr));
13201 if (extract_arm_insn (&arm_record, 2))
13205 printf_unfiltered (_("Process record: error reading memory at "
13206 "addr %s len = %d.\n"),
13207 paddress (arm_record.gdbarch,
13208 arm_record.this_addr), 2);
13213 /* Check the insn, whether it is thumb or arm one. */
13215 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13216 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13219 if (!(u_regval & t_bit))
13221 /* We are decoding arm insn. */
13222 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13226 insn_id = bits (arm_record.arm_insn, 11, 15);
13227 /* is it thumb2 insn? */
13228 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13230 ret = decode_insn (&arm_record, THUMB2_RECORD,
13231 THUMB2_INSN_SIZE_BYTES);
13235 /* We are decoding thumb insn. */
13236 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13242 /* Record registers. */
13243 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13244 if (arm_record.arm_regs)
13246 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13248 if (record_full_arch_list_add_reg
13249 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13253 /* Record memories. */
13254 if (arm_record.arm_mems)
13256 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13258 if (record_full_arch_list_add_mem
13259 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13260 arm_record.arm_mems[no_of_rec].len))
13265 if (record_full_arch_list_add_end ())
13270 deallocate_reg_mem (&arm_record);