1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
30 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
56 #include "gdb_assert.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
240 static int thumb_insn_size (unsigned short inst1);
242 struct arm_prologue_cache
244 /* The stack pointer at the time this frame was created; i.e. the
245 caller's stack pointer when this function was called. It is used
246 to identify this frame. */
249 /* The frame base for this frame is just prev_sp - frame size.
250 FRAMESIZE is the distance from the frame pointer to the
251 initial stack pointer. */
255 /* The register used to hold the frame pointer for this frame. */
258 /* Saved register offsets. */
259 struct trad_frame_saved_reg *saved_regs;
262 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
263 CORE_ADDR prologue_start,
264 CORE_ADDR prologue_end,
265 struct arm_prologue_cache *cache);
267 /* Architecture version for displaced stepping. This effects the behaviour of
268 certain instructions, and really should not be hard-wired. */
270 #define DISPLACED_STEPPING_ARCH_VERSION 5
272 /* Addresses for calling Thumb functions have the bit 0 set.
273 Here are some macros to test, set, or clear bit 0 of addresses. */
274 #define IS_THUMB_ADDR(addr) ((addr) & 1)
275 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
276 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
278 /* Set to true if the 32-bit mode is in use. */
282 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285 arm_psr_thumb_bit (struct gdbarch *gdbarch)
287 if (gdbarch_tdep (gdbarch)->is_m)
293 /* Determine if FRAME is executing in Thumb mode. */
296 arm_frame_is_thumb (struct frame_info *frame)
299 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
301 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
302 directly (from a signal frame or dummy frame) or by interpreting
303 the saved LR (from a prologue or DWARF frame). So consult it and
304 trust the unwinders. */
305 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
307 return (cpsr & t_bit) != 0;
310 /* Callback for VEC_lower_bound. */
313 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
314 const struct arm_mapping_symbol *rhs)
316 return lhs->value < rhs->value;
319 /* Search for the mapping symbol covering MEMADDR. If one is found,
320 return its type. Otherwise, return 0. If START is non-NULL,
321 set *START to the location of the mapping symbol. */
324 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
326 struct obj_section *sec;
328 /* If there are mapping symbols, consult them. */
329 sec = find_pc_section (memaddr);
332 struct arm_per_objfile *data;
333 VEC(arm_mapping_symbol_s) *map;
334 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
338 data = objfile_data (sec->objfile, arm_objfile_data_key);
341 map = data->section_maps[sec->the_bfd_section->index];
342 if (!VEC_empty (arm_mapping_symbol_s, map))
344 struct arm_mapping_symbol *map_sym;
346 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
347 arm_compare_mapping_symbols);
349 /* VEC_lower_bound finds the earliest ordered insertion
350 point. If the following symbol starts at this exact
351 address, we use that; otherwise, the preceding
352 mapping symbol covers this address. */
353 if (idx < VEC_length (arm_mapping_symbol_s, map))
355 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
356 if (map_sym->value == map_key.value)
359 *start = map_sym->value + obj_section_addr (sec);
360 return map_sym->type;
366 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
368 *start = map_sym->value + obj_section_addr (sec);
369 return map_sym->type;
378 /* Determine if the program counter specified in MEMADDR is in a Thumb
379 function. This function should be called for addresses unrelated to
380 any executing frame; otherwise, prefer arm_frame_is_thumb. */
383 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
385 struct bound_minimal_symbol sym;
387 struct displaced_step_closure* dsc
388 = get_displaced_step_closure_by_addr(memaddr);
390 /* If checking the mode of displaced instruction in copy area, the mode
391 should be determined by instruction on the original address. */
395 fprintf_unfiltered (gdb_stdlog,
396 "displaced: check mode of %.8lx instead of %.8lx\n",
397 (unsigned long) dsc->insn_addr,
398 (unsigned long) memaddr);
399 memaddr = dsc->insn_addr;
402 /* If bit 0 of the address is set, assume this is a Thumb address. */
403 if (IS_THUMB_ADDR (memaddr))
406 /* Respect internal mode override if active. */
407 if (arm_override_mode != -1)
408 return arm_override_mode;
410 /* If the user wants to override the symbol table, let him. */
411 if (strcmp (arm_force_mode_string, "arm") == 0)
413 if (strcmp (arm_force_mode_string, "thumb") == 0)
416 /* ARM v6-M and v7-M are always in Thumb mode. */
417 if (gdbarch_tdep (gdbarch)->is_m)
420 /* If there are mapping symbols, consult them. */
421 type = arm_find_mapping_symbol (memaddr, NULL);
425 /* Thumb functions have a "special" bit set in minimal symbols. */
426 sym = lookup_minimal_symbol_by_pc (memaddr);
428 return (MSYMBOL_IS_SPECIAL (sym.minsym));
430 /* If the user wants to override the fallback mode, let them. */
431 if (strcmp (arm_fallback_mode_string, "arm") == 0)
433 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
436 /* If we couldn't find any symbol, but we're talking to a running
437 target, then trust the current value of $cpsr. This lets
438 "display/i $pc" always show the correct mode (though if there is
439 a symbol table we will not reach here, so it still may not be
440 displayed in the mode it will be executed). */
441 if (target_has_registers)
442 return arm_frame_is_thumb (get_current_frame ());
444 /* Otherwise we're out of luck; we assume ARM. */
448 /* Remove useless bits from addresses in a running program. */
450 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
452 /* On M-profile devices, do not strip the low bit from EXC_RETURN
453 (the magic exception return address). */
454 if (gdbarch_tdep (gdbarch)->is_m
455 && (val & 0xfffffff0) == 0xfffffff0)
459 return UNMAKE_THUMB_ADDR (val);
461 return (val & 0x03fffffc);
464 /* Return 1 if PC is the start of a compiler helper function which
465 can be safely ignored during prologue skipping. IS_THUMB is true
466 if the function is known to be a Thumb function due to the way it
469 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
471 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
472 struct bound_minimal_symbol msym;
474 msym = lookup_minimal_symbol_by_pc (pc);
475 if (msym.minsym != NULL
476 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
477 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
479 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
481 /* The GNU linker's Thumb call stub to foo is named
483 if (strstr (name, "_from_thumb") != NULL)
486 /* On soft-float targets, __truncdfsf2 is called to convert promoted
487 arguments to their argument types in non-prototyped
489 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
491 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
494 /* Internal functions related to thread-local storage. */
495 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
497 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
502 /* If we run against a stripped glibc, we may be unable to identify
503 special functions by name. Check for one important case,
504 __aeabi_read_tp, by comparing the *code* against the default
505 implementation (this is hand-written ARM assembler in glibc). */
508 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
509 == 0xe3e00a0f /* mov r0, #0xffff0fff */
510 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
511 == 0xe240f01f) /* sub pc, r0, #31 */
518 /* Support routines for instruction parsing. */
519 #define submask(x) ((1L << ((x) + 1)) - 1)
520 #define bit(obj,st) (((obj) >> (st)) & 1)
521 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
522 #define sbits(obj,st,fn) \
523 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
524 #define BranchDest(addr,instr) \
525 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
527 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
528 the first 16-bit of instruction, and INSN2 is the second 16-bit of
530 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
531 ((bits ((insn1), 0, 3) << 12) \
532 | (bits ((insn1), 10, 10) << 11) \
533 | (bits ((insn2), 12, 14) << 8) \
534 | bits ((insn2), 0, 7))
536 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
537 the 32-bit instruction. */
538 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
539 ((bits ((insn), 16, 19) << 12) \
540 | bits ((insn), 0, 11))
542 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
545 thumb_expand_immediate (unsigned int imm)
547 unsigned int count = imm >> 7;
555 return (imm & 0xff) | ((imm & 0xff) << 16);
557 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
559 return (imm & 0xff) | ((imm & 0xff) << 8)
560 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
563 return (0x80 | (imm & 0x7f)) << (32 - count);
566 /* Return 1 if the 16-bit Thumb instruction INST might change
567 control flow, 0 otherwise. */
570 thumb_instruction_changes_pc (unsigned short inst)
572 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
575 if ((inst & 0xf000) == 0xd000) /* conditional branch */
578 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
581 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
584 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
587 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
593 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
594 might change control flow, 0 otherwise. */
597 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
599 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
601 /* Branches and miscellaneous control instructions. */
603 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
608 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
610 /* SUBS PC, LR, #imm8. */
613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
615 /* Conditional branch. */
622 if ((inst1 & 0xfe50) == 0xe810)
624 /* Load multiple or RFE. */
626 if (bit (inst1, 7) && !bit (inst1, 8))
632 else if (!bit (inst1, 7) && bit (inst1, 8))
638 else if (bit (inst1, 7) && bit (inst1, 8))
643 else if (!bit (inst1, 7) && !bit (inst1, 8))
652 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
654 /* MOV PC or MOVS PC. */
658 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
661 if (bits (inst1, 0, 3) == 15)
667 if ((inst2 & 0x0fc0) == 0x0000)
673 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
679 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
703 struct pv_area *stack;
704 struct cleanup *back_to;
706 CORE_ADDR unrecognized_pc = 0;
708 for (i = 0; i < 16; i++)
709 regs[i] = pv_register (i, 0);
710 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
711 back_to = make_cleanup_free_pv_area (stack);
713 while (start < limit)
717 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
719 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
727 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
728 whether to save LR (R14). */
729 mask = (insn & 0xff) | ((insn & 0x100) << 6);
731 /* Calculate offsets of saved R0-R7 and LR. */
732 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
733 if (mask & (1 << regno))
735 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
737 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
740 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
743 offset = (insn & 0x7f) << 2; /* get scaled offset */
744 if (insn & 0x80) /* Check for SUB. */
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
752 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
754 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
755 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
756 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
758 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
759 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
760 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
762 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
763 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
764 && pv_is_constant (regs[bits (insn, 3, 5)]))
765 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
766 regs[bits (insn, 6, 8)]);
767 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
768 && pv_is_constant (regs[bits (insn, 3, 6)]))
770 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
771 int rm = bits (insn, 3, 6);
772 regs[rd] = pv_add (regs[rd], regs[rm]);
774 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
776 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
777 int src_reg = (insn & 0x78) >> 3;
778 regs[dst_reg] = regs[src_reg];
780 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
782 /* Handle stores to the stack. Normally pushes are used,
783 but with GCC -mtpcs-frame, there may be other stores
784 in the prologue to create the frame. */
785 int regno = (insn >> 8) & 0x7;
788 offset = (insn & 0xff) << 2;
789 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
791 if (pv_area_store_would_trash (stack, addr))
794 pv_area_store (stack, addr, 4, regs[regno]);
796 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
798 int rd = bits (insn, 0, 2);
799 int rn = bits (insn, 3, 5);
802 offset = bits (insn, 6, 10) << 2;
803 addr = pv_add_constant (regs[rn], offset);
805 if (pv_area_store_would_trash (stack, addr))
808 pv_area_store (stack, addr, 4, regs[rd]);
810 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
811 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
812 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
813 /* Ignore stores of argument registers to the stack. */
815 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
816 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
817 /* Ignore block loads from the stack, potentially copying
818 parameters from memory. */
820 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
821 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
822 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
823 /* Similarly ignore single loads from the stack. */
825 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
826 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
827 /* Skip register copies, i.e. saves to another register
828 instead of the stack. */
830 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
831 /* Recognize constant loads; even with small stacks these are necessary
833 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
834 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
836 /* Constant pool loads, for the same reason. */
837 unsigned int constant;
840 loc = start + 4 + bits (insn, 0, 7) * 4;
841 constant = read_memory_unsigned_integer (loc, 4, byte_order);
842 regs[bits (insn, 8, 10)] = pv_constant (constant);
844 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
846 unsigned short inst2;
848 inst2 = read_memory_unsigned_integer (start + 2, 2,
849 byte_order_for_code);
851 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
853 /* BL, BLX. Allow some special function calls when
854 skipping the prologue; GCC generates these before
855 storing arguments to the stack. */
857 int j1, j2, imm1, imm2;
859 imm1 = sbits (insn, 0, 10);
860 imm2 = bits (inst2, 0, 10);
861 j1 = bit (inst2, 13);
862 j2 = bit (inst2, 11);
864 offset = ((imm1 << 12) + (imm2 << 1));
865 offset ^= ((!j2) << 22) | ((!j1) << 23);
867 nextpc = start + 4 + offset;
868 /* For BLX make sure to clear the low bits. */
869 if (bit (inst2, 12) == 0)
870 nextpc = nextpc & 0xfffffffc;
872 if (!skip_prologue_function (gdbarch, nextpc,
873 bit (inst2, 12) != 0))
877 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 pv_t addr = regs[bits (insn, 0, 3)];
884 if (pv_area_store_would_trash (stack, addr))
887 /* Calculate offsets of saved registers. */
888 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
889 if (inst2 & (1 << regno))
891 addr = pv_add_constant (addr, -4);
892 pv_area_store (stack, addr, 4, regs[regno]);
896 regs[bits (insn, 0, 3)] = addr;
899 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 int regno1 = bits (inst2, 12, 15);
904 int regno2 = bits (inst2, 8, 11);
905 pv_t addr = regs[bits (insn, 0, 3)];
907 offset = inst2 & 0xff;
909 addr = pv_add_constant (addr, offset);
911 addr = pv_add_constant (addr, -offset);
913 if (pv_area_store_would_trash (stack, addr))
916 pv_area_store (stack, addr, 4, regs[regno1]);
917 pv_area_store (stack, pv_add_constant (addr, 4),
921 regs[bits (insn, 0, 3)] = addr;
924 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
925 && (inst2 & 0x0c00) == 0x0c00
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 int regno = bits (inst2, 12, 15);
929 pv_t addr = regs[bits (insn, 0, 3)];
931 offset = inst2 & 0xff;
933 addr = pv_add_constant (addr, offset);
935 addr = pv_add_constant (addr, -offset);
937 if (pv_area_store_would_trash (stack, addr))
940 pv_area_store (stack, addr, 4, regs[regno]);
943 regs[bits (insn, 0, 3)] = addr;
946 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 int regno = bits (inst2, 12, 15);
952 offset = inst2 & 0xfff;
953 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
955 if (pv_area_store_would_trash (stack, addr))
958 pv_area_store (stack, addr, 4, regs[regno]);
961 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
962 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
963 /* Ignore stores of argument registers to the stack. */
966 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
967 && (inst2 & 0x0d00) == 0x0c00
968 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
969 /* Ignore stores of argument registers to the stack. */
972 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
974 && (inst2 & 0x8000) == 0x0000
975 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
976 /* Ignore block loads from the stack, potentially copying
977 parameters from memory. */
980 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore dual loads from the stack. */
986 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
987 && (inst2 & 0x0d00) == 0x0c00
988 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
989 /* Similarly ignore single loads from the stack. */
992 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
993 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
994 /* Similarly ignore single loads from the stack. */
997 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
998 && (inst2 & 0x8000) == 0x0000)
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1004 regs[bits (inst2, 8, 11)]
1005 = pv_add_constant (regs[bits (insn, 0, 3)],
1006 thumb_expand_immediate (imm));
1009 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1020 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1021 && (inst2 & 0x8000) == 0x0000)
1023 unsigned int imm = ((bits (insn, 10, 10) << 11)
1024 | (bits (inst2, 12, 14) << 8)
1025 | bits (inst2, 0, 7));
1027 regs[bits (inst2, 8, 11)]
1028 = pv_add_constant (regs[bits (insn, 0, 3)],
1029 - (CORE_ADDR) thumb_expand_immediate (imm));
1032 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1033 && (inst2 & 0x8000) == 0x0000)
1035 unsigned int imm = ((bits (insn, 10, 10) << 11)
1036 | (bits (inst2, 12, 14) << 8)
1037 | bits (inst2, 0, 7));
1039 regs[bits (inst2, 8, 11)]
1040 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1043 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1045 unsigned int imm = ((bits (insn, 10, 10) << 11)
1046 | (bits (inst2, 12, 14) << 8)
1047 | bits (inst2, 0, 7));
1049 regs[bits (inst2, 8, 11)]
1050 = pv_constant (thumb_expand_immediate (imm));
1053 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1056 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1058 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1061 else if (insn == 0xea5f /* mov.w Rd,Rm */
1062 && (inst2 & 0xf0f0) == 0)
1064 int dst_reg = (inst2 & 0x0f00) >> 8;
1065 int src_reg = inst2 & 0xf;
1066 regs[dst_reg] = regs[src_reg];
1069 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1071 /* Constant pool loads. */
1072 unsigned int constant;
1075 offset = bits (inst2, 0, 11);
1077 loc = start + 4 + offset;
1079 loc = start + 4 - offset;
1081 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1082 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1085 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1087 /* Constant pool loads. */
1088 unsigned int constant;
1091 offset = bits (inst2, 0, 7) << 2;
1093 loc = start + 4 + offset;
1095 loc = start + 4 - offset;
1097 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1098 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1100 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1101 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1104 else if (thumb2_instruction_changes_pc (insn, inst2))
1106 /* Don't scan past anything that might change control flow. */
1111 /* The optimizer might shove anything into the prologue,
1112 so we just skip what we don't recognize. */
1113 unrecognized_pc = start;
1118 else if (thumb_instruction_changes_pc (insn))
1120 /* Don't scan past anything that might change control flow. */
1125 /* The optimizer might shove anything into the prologue,
1126 so we just skip what we don't recognize. */
1127 unrecognized_pc = start;
1134 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1135 paddress (gdbarch, start));
1137 if (unrecognized_pc == 0)
1138 unrecognized_pc = start;
1142 do_cleanups (back_to);
1143 return unrecognized_pc;
1146 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1148 /* Frame pointer is fp. Frame size is constant. */
1149 cache->framereg = ARM_FP_REGNUM;
1150 cache->framesize = -regs[ARM_FP_REGNUM].k;
1152 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1154 /* Frame pointer is r7. Frame size is constant. */
1155 cache->framereg = THUMB_FP_REGNUM;
1156 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 /* Try the stack pointer... this is a bit desperate. */
1161 cache->framereg = ARM_SP_REGNUM;
1162 cache->framesize = -regs[ARM_SP_REGNUM].k;
1165 for (i = 0; i < 16; i++)
1166 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1167 cache->saved_regs[i].addr = offset;
1169 do_cleanups (back_to);
1170 return unrecognized_pc;
1174 /* Try to analyze the instructions starting from PC, which load symbol
1175 __stack_chk_guard. Return the address of instruction after loading this
1176 symbol, set the dest register number to *BASEREG, and set the size of
1177 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1182 unsigned int *destreg, int *offset)
1184 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1185 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1186 unsigned int low, high, address;
1191 unsigned short insn1
1192 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1194 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1196 *destreg = bits (insn1, 8, 10);
1198 address = bits (insn1, 0, 7);
1200 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1202 unsigned short insn2
1203 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1205 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1208 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1210 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1212 /* movt Rd, #const */
1213 if ((insn1 & 0xfbc0) == 0xf2c0)
1215 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1216 *destreg = bits (insn2, 8, 11);
1218 address = (high << 16 | low);
1225 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1227 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1229 address = bits (insn, 0, 11);
1230 *destreg = bits (insn, 12, 15);
1233 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1235 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1238 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1240 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1242 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1243 *destreg = bits (insn, 12, 15);
1245 address = (high << 16 | low);
1253 /* Try to skip a sequence of instructions used for stack protector. If PC
1254 points to the first instruction of this sequence, return the address of
1255 first instruction after this sequence, otherwise, return original PC.
1257 On arm, this sequence of instructions is composed of mainly three steps,
1258 Step 1: load symbol __stack_chk_guard,
1259 Step 2: load from address of __stack_chk_guard,
1260 Step 3: store it to somewhere else.
1262 Usually, instructions on step 2 and step 3 are the same on various ARM
1263 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1264 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1265 instructions in step 1 vary from different ARM architectures. On ARMv7,
1268 movw Rn, #:lower16:__stack_chk_guard
1269 movt Rn, #:upper16:__stack_chk_guard
1276 .word __stack_chk_guard
1278 Since ldr/str is a very popular instruction, we can't use them as
1279 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1280 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1281 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1284 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1286 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1287 unsigned int basereg;
1288 struct bound_minimal_symbol stack_chk_guard;
1290 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1293 /* Try to parse the instructions in Step 1. */
1294 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1299 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1300 /* If name of symbol doesn't start with '__stack_chk_guard', this
1301 instruction sequence is not for stack protector. If symbol is
1302 removed, we conservatively think this sequence is for stack protector. */
1303 if (stack_chk_guard.minsym
1304 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1305 "__stack_chk_guard",
1306 strlen ("__stack_chk_guard")) != 0)
1311 unsigned int destreg;
1313 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1315 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1316 if ((insn & 0xf800) != 0x6800)
1318 if (bits (insn, 3, 5) != basereg)
1320 destreg = bits (insn, 0, 2);
1322 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1323 byte_order_for_code);
1324 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6000)
1327 if (destreg != bits (insn, 0, 2))
1332 unsigned int destreg;
1334 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1336 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1337 if ((insn & 0x0e500000) != 0x04100000)
1339 if (bits (insn, 16, 19) != basereg)
1341 destreg = bits (insn, 12, 15);
1342 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1343 insn = read_memory_unsigned_integer (pc + offset + 4,
1344 4, byte_order_for_code);
1345 if ((insn & 0x0e500000) != 0x04000000)
1347 if (bits (insn, 12, 15) != destreg)
1350 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1353 return pc + offset + 4;
1355 return pc + offset + 8;
1358 /* Advance the PC across any function entry prologue instructions to
1359 reach some "real" code.
1361 The APCS (ARM Procedure Call Standard) defines the following
1365 [stmfd sp!, {a1,a2,a3,a4}]
1366 stmfd sp!, {...,fp,ip,lr,pc}
1367 [stfe f7, [sp, #-12]!]
1368 [stfe f6, [sp, #-12]!]
1369 [stfe f5, [sp, #-12]!]
1370 [stfe f4, [sp, #-12]!]
1371 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1374 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1376 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1379 CORE_ADDR func_addr, limit_pc;
1381 /* See if we can determine the end of the prologue via the symbol table.
1382 If so, then return either PC, or the PC after the prologue, whichever
1384 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1386 CORE_ADDR post_prologue_pc
1387 = skip_prologue_using_sal (gdbarch, func_addr);
1388 struct symtab *s = find_pc_symtab (func_addr);
1390 if (post_prologue_pc)
1392 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1395 /* GCC always emits a line note before the prologue and another
1396 one after, even if the two are at the same address or on the
1397 same line. Take advantage of this so that we do not need to
1398 know every instruction that might appear in the prologue. We
1399 will have producer information for most binaries; if it is
1400 missing (e.g. for -gstabs), assuming the GNU tools. */
1401 if (post_prologue_pc
1403 || s->producer == NULL
1404 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1405 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1406 return post_prologue_pc;
1408 if (post_prologue_pc != 0)
1410 CORE_ADDR analyzed_limit;
1412 /* For non-GCC compilers, make sure the entire line is an
1413 acceptable prologue; GDB will round this function's
1414 return value up to the end of the following line so we
1415 can not skip just part of a line (and we do not want to).
1417 RealView does not treat the prologue specially, but does
1418 associate prologue code with the opening brace; so this
1419 lets us skip the first line if we think it is the opening
1421 if (arm_pc_is_thumb (gdbarch, func_addr))
1422 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1423 post_prologue_pc, NULL);
1425 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1426 post_prologue_pc, NULL);
1428 if (analyzed_limit != post_prologue_pc)
1431 return post_prologue_pc;
1435 /* Can't determine prologue from the symbol table, need to examine
1438 /* Find an upper limit on the function prologue using the debug
1439 information. If the debug information could not be used to provide
1440 that bound, then use an arbitrary large number as the upper bound. */
1441 /* Like arm_scan_prologue, stop no later than pc + 64. */
1442 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1444 limit_pc = pc + 64; /* Magic. */
1447 /* Check if this is Thumb code. */
1448 if (arm_pc_is_thumb (gdbarch, pc))
1449 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1451 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1453 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1455 /* "mov ip, sp" is no longer a required part of the prologue. */
1456 if (inst == 0xe1a0c00d) /* mov ip, sp */
1459 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1462 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1465 /* Some prologues begin with "str lr, [sp, #-4]!". */
1466 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1469 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1472 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1475 /* Any insns after this point may float into the code, if it makes
1476 for better instruction scheduling, so we skip them only if we
1477 find them, but still consider the function to be frame-ful. */
1479 /* We may have either one sfmfd instruction here, or several stfe
1480 insns, depending on the version of floating point code we
1482 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1485 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1488 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1491 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1494 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1495 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1496 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1499 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1500 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1501 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1504 /* Un-recognized instruction; stop scanning. */
1508 return skip_pc; /* End of prologue. */
1512 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1513 This function decodes a Thumb function prologue to determine:
1514 1) the size of the stack frame
1515 2) which registers are saved on it
1516 3) the offsets of saved regs
1517 4) the offset from the stack pointer to the frame pointer
1519 A typical Thumb function prologue would create this stack frame
1520 (offsets relative to FP)
1521 old SP -> 24 stack parameters
1524 R7 -> 0 local variables (16 bytes)
1525 SP -> -12 additional stack space (12 bytes)
1526 The frame size would thus be 36 bytes, and the frame offset would be
1527 12 bytes. The frame register is R7.
1529 The comments for thumb_skip_prolog() describe the algorithm we use
1530 to detect the end of the prolog. */
1534 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1535 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1537 CORE_ADDR prologue_start;
1538 CORE_ADDR prologue_end;
1540 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1543 /* See comment in arm_scan_prologue for an explanation of
1545 if (prologue_end > prologue_start + 64)
1547 prologue_end = prologue_start + 64;
1551 /* We're in the boondocks: we have no idea where the start of the
1555 prologue_end = min (prologue_end, prev_pc);
1557 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1560 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1563 arm_instruction_changes_pc (uint32_t this_instr)
1565 if (bits (this_instr, 28, 31) == INST_NV)
1566 /* Unconditional instructions. */
1567 switch (bits (this_instr, 24, 27))
1571 /* Branch with Link and change to Thumb. */
1576 /* Coprocessor register transfer. */
1577 if (bits (this_instr, 12, 15) == 15)
1578 error (_("Invalid update to pc in instruction"));
1584 switch (bits (this_instr, 25, 27))
1587 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1589 /* Multiplies and extra load/stores. */
1590 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1591 /* Neither multiplies nor extension load/stores are allowed
1595 /* Otherwise, miscellaneous instructions. */
1597 /* BX <reg>, BXJ <reg>, BLX <reg> */
1598 if (bits (this_instr, 4, 27) == 0x12fff1
1599 || bits (this_instr, 4, 27) == 0x12fff2
1600 || bits (this_instr, 4, 27) == 0x12fff3)
1603 /* Other miscellaneous instructions are unpredictable if they
1607 /* Data processing instruction. Fall through. */
1610 if (bits (this_instr, 12, 15) == 15)
1617 /* Media instructions and architecturally undefined instructions. */
1618 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1622 if (bit (this_instr, 20) == 0)
1626 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1632 /* Load/store multiple. */
1633 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1639 /* Branch and branch with link. */
1644 /* Coprocessor transfers or SWIs can not affect PC. */
1648 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1652 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1653 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1654 fill it in. Return the first address not recognized as a prologue
1657 We recognize all the instructions typically found in ARM prologues,
1658 plus harmless instructions which can be skipped (either for analysis
1659 purposes, or a more restrictive set that can be skipped when finding
1660 the end of the prologue). */
1663 arm_analyze_prologue (struct gdbarch *gdbarch,
1664 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1665 struct arm_prologue_cache *cache)
1667 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1668 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1670 CORE_ADDR offset, current_pc;
1671 pv_t regs[ARM_FPS_REGNUM];
1672 struct pv_area *stack;
1673 struct cleanup *back_to;
1674 int framereg, framesize;
1675 CORE_ADDR unrecognized_pc = 0;
1677 /* Search the prologue looking for instructions that set up the
1678 frame pointer, adjust the stack pointer, and save registers.
1680 Be careful, however, and if it doesn't look like a prologue,
1681 don't try to scan it. If, for instance, a frameless function
1682 begins with stmfd sp!, then we will tell ourselves there is
1683 a frame, which will confuse stack traceback, as well as "finish"
1684 and other operations that rely on a knowledge of the stack
1687 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1688 regs[regno] = pv_register (regno, 0);
1689 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1690 back_to = make_cleanup_free_pv_area (stack);
1692 for (current_pc = prologue_start;
1693 current_pc < prologue_end;
1697 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1699 if (insn == 0xe1a0c00d) /* mov ip, sp */
1701 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1704 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1705 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1707 unsigned imm = insn & 0xff; /* immediate value */
1708 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1709 int rd = bits (insn, 12, 15);
1710 imm = (imm >> rot) | (imm << (32 - rot));
1711 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1714 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1715 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1717 unsigned imm = insn & 0xff; /* immediate value */
1718 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1719 int rd = bits (insn, 12, 15);
1720 imm = (imm >> rot) | (imm << (32 - rot));
1721 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1724 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1729 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1730 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1731 regs[bits (insn, 12, 15)]);
1734 else if ((insn & 0xffff0000) == 0xe92d0000)
1735 /* stmfd sp!, {..., fp, ip, lr, pc}
1737 stmfd sp!, {a1, a2, a3, a4} */
1739 int mask = insn & 0xffff;
1741 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1744 /* Calculate offsets of saved registers. */
1745 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1746 if (mask & (1 << regno))
1749 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1750 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1753 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1754 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1755 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1757 /* No need to add this to saved_regs -- it's just an arg reg. */
1760 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1761 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1762 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1764 /* No need to add this to saved_regs -- it's just an arg reg. */
1767 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1769 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1771 /* No need to add this to saved_regs -- it's just arg regs. */
1774 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1776 unsigned imm = insn & 0xff; /* immediate value */
1777 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1778 imm = (imm >> rot) | (imm << (32 - rot));
1779 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1781 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1783 unsigned imm = insn & 0xff; /* immediate value */
1784 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1785 imm = (imm >> rot) | (imm << (32 - rot));
1786 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1788 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1790 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1792 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1796 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1797 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1799 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1801 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1803 int n_saved_fp_regs;
1804 unsigned int fp_start_reg, fp_bound_reg;
1806 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1809 if ((insn & 0x800) == 0x800) /* N0 is set */
1811 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1812 n_saved_fp_regs = 3;
1814 n_saved_fp_regs = 1;
1818 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1819 n_saved_fp_regs = 2;
1821 n_saved_fp_regs = 4;
1824 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1825 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1826 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1828 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1829 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1830 regs[fp_start_reg++]);
1833 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1835 /* Allow some special function calls when skipping the
1836 prologue; GCC generates these before storing arguments to
1838 CORE_ADDR dest = BranchDest (current_pc, insn);
1840 if (skip_prologue_function (gdbarch, dest, 0))
1845 else if ((insn & 0xf0000000) != 0xe0000000)
1846 break; /* Condition not true, exit early. */
1847 else if (arm_instruction_changes_pc (insn))
1848 /* Don't scan past anything that might change control flow. */
1850 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1851 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1852 /* Ignore block loads from the stack, potentially copying
1853 parameters from memory. */
1855 else if ((insn & 0xfc500000) == 0xe4100000
1856 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1857 /* Similarly ignore single loads from the stack. */
1859 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1860 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1861 register instead of the stack. */
1865 /* The optimizer might shove anything into the prologue,
1866 so we just skip what we don't recognize. */
1867 unrecognized_pc = current_pc;
1872 if (unrecognized_pc == 0)
1873 unrecognized_pc = current_pc;
1875 /* The frame size is just the distance from the frame register
1876 to the original stack pointer. */
1877 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1879 /* Frame pointer is fp. */
1880 framereg = ARM_FP_REGNUM;
1881 framesize = -regs[ARM_FP_REGNUM].k;
1885 /* Try the stack pointer... this is a bit desperate. */
1886 framereg = ARM_SP_REGNUM;
1887 framesize = -regs[ARM_SP_REGNUM].k;
1892 cache->framereg = framereg;
1893 cache->framesize = framesize;
1895 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1896 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1897 cache->saved_regs[regno].addr = offset;
1901 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1902 paddress (gdbarch, unrecognized_pc));
1904 do_cleanups (back_to);
1905 return unrecognized_pc;
1909 arm_scan_prologue (struct frame_info *this_frame,
1910 struct arm_prologue_cache *cache)
1912 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1915 CORE_ADDR prologue_start, prologue_end, current_pc;
1916 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1917 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1918 pv_t regs[ARM_FPS_REGNUM];
1919 struct pv_area *stack;
1920 struct cleanup *back_to;
1923 /* Assume there is no frame until proven otherwise. */
1924 cache->framereg = ARM_SP_REGNUM;
1925 cache->framesize = 0;
1927 /* Check for Thumb prologue. */
1928 if (arm_frame_is_thumb (this_frame))
1930 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1934 /* Find the function prologue. If we can't find the function in
1935 the symbol table, peek in the stack frame to find the PC. */
1936 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1939 /* One way to find the end of the prologue (which works well
1940 for unoptimized code) is to do the following:
1942 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1945 prologue_end = prev_pc;
1946 else if (sal.end < prologue_end)
1947 prologue_end = sal.end;
1949 This mechanism is very accurate so long as the optimizer
1950 doesn't move any instructions from the function body into the
1951 prologue. If this happens, sal.end will be the last
1952 instruction in the first hunk of prologue code just before
1953 the first instruction that the scheduler has moved from
1954 the body to the prologue.
1956 In order to make sure that we scan all of the prologue
1957 instructions, we use a slightly less accurate mechanism which
1958 may scan more than necessary. To help compensate for this
1959 lack of accuracy, the prologue scanning loop below contains
1960 several clauses which'll cause the loop to terminate early if
1961 an implausible prologue instruction is encountered.
1967 is a suitable endpoint since it accounts for the largest
1968 possible prologue plus up to five instructions inserted by
1971 if (prologue_end > prologue_start + 64)
1973 prologue_end = prologue_start + 64; /* See above. */
1978 /* We have no symbol information. Our only option is to assume this
1979 function has a standard stack frame and the normal frame register.
1980 Then, we can find the value of our frame pointer on entrance to
1981 the callee (or at the present moment if this is the innermost frame).
1982 The value stored there should be the address of the stmfd + 8. */
1983 CORE_ADDR frame_loc;
1984 LONGEST return_value;
1986 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1987 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1991 prologue_start = gdbarch_addr_bits_remove
1992 (gdbarch, return_value) - 8;
1993 prologue_end = prologue_start + 64; /* See above. */
1997 if (prev_pc < prologue_end)
1998 prologue_end = prev_pc;
2000 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2003 static struct arm_prologue_cache *
2004 arm_make_prologue_cache (struct frame_info *this_frame)
2007 struct arm_prologue_cache *cache;
2008 CORE_ADDR unwound_fp;
2010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2013 arm_scan_prologue (this_frame, cache);
2015 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2016 if (unwound_fp == 0)
2019 cache->prev_sp = unwound_fp + cache->framesize;
2021 /* Calculate actual addresses of saved registers using offsets
2022 determined by arm_scan_prologue. */
2023 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2024 if (trad_frame_addr_p (cache->saved_regs, reg))
2025 cache->saved_regs[reg].addr += cache->prev_sp;
2030 /* Our frame ID for a normal frame is the current function's starting PC
2031 and the caller's SP when we were called. */
2034 arm_prologue_this_id (struct frame_info *this_frame,
2036 struct frame_id *this_id)
2038 struct arm_prologue_cache *cache;
2042 if (*this_cache == NULL)
2043 *this_cache = arm_make_prologue_cache (this_frame);
2044 cache = *this_cache;
2046 /* This is meant to halt the backtrace at "_start". */
2047 pc = get_frame_pc (this_frame);
2048 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2051 /* If we've hit a wall, stop. */
2052 if (cache->prev_sp == 0)
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 func = get_frame_func (this_frame);
2062 id = frame_id_build (cache->prev_sp, func);
2066 static struct value *
2067 arm_prologue_prev_register (struct frame_info *this_frame,
2071 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2072 struct arm_prologue_cache *cache;
2074 if (*this_cache == NULL)
2075 *this_cache = arm_make_prologue_cache (this_frame);
2076 cache = *this_cache;
2078 /* If we are asked to unwind the PC, then we need to return the LR
2079 instead. The prologue may save PC, but it will point into this
2080 frame's prologue, not the next frame's resume location. Also
2081 strip the saved T bit. A valid LR may have the low bit set, but
2082 a valid PC never does. */
2083 if (prev_regnum == ARM_PC_REGNUM)
2087 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2088 return frame_unwind_got_constant (this_frame, prev_regnum,
2089 arm_addr_bits_remove (gdbarch, lr));
2092 /* SP is generally not saved to the stack, but this frame is
2093 identified by the next frame's stack pointer at the time of the call.
2094 The value was already reconstructed into PREV_SP. */
2095 if (prev_regnum == ARM_SP_REGNUM)
2096 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2098 /* The CPSR may have been changed by the call instruction and by the
2099 called function. The only bit we can reconstruct is the T bit,
2100 by checking the low bit of LR as of the call. This is a reliable
2101 indicator of Thumb-ness except for some ARM v4T pre-interworking
2102 Thumb code, which could get away with a clear low bit as long as
2103 the called function did not use bx. Guess that all other
2104 bits are unchanged; the condition flags are presumably lost,
2105 but the processor status is likely valid. */
2106 if (prev_regnum == ARM_PS_REGNUM)
2109 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2111 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2112 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2113 if (IS_THUMB_ADDR (lr))
2117 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2120 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2124 struct frame_unwind arm_prologue_unwind = {
2126 default_frame_unwind_stop_reason,
2127 arm_prologue_this_id,
2128 arm_prologue_prev_register,
2130 default_frame_sniffer
2133 /* Maintain a list of ARM exception table entries per objfile, similar to the
2134 list of mapping symbols. We only cache entries for standard ARM-defined
2135 personality routines; the cache will contain only the frame unwinding
2136 instructions associated with the entry (not the descriptors). */
2138 static const struct objfile_data *arm_exidx_data_key;
2140 struct arm_exidx_entry
2145 typedef struct arm_exidx_entry arm_exidx_entry_s;
2146 DEF_VEC_O(arm_exidx_entry_s);
2148 struct arm_exidx_data
2150 VEC(arm_exidx_entry_s) **section_maps;
2154 arm_exidx_data_free (struct objfile *objfile, void *arg)
2156 struct arm_exidx_data *data = arg;
2159 for (i = 0; i < objfile->obfd->section_count; i++)
2160 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2164 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2165 const struct arm_exidx_entry *rhs)
2167 return lhs->addr < rhs->addr;
2170 static struct obj_section *
2171 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2173 struct obj_section *osect;
2175 ALL_OBJFILE_OSECTIONS (objfile, osect)
2176 if (bfd_get_section_flags (objfile->obfd,
2177 osect->the_bfd_section) & SEC_ALLOC)
2179 bfd_vma start, size;
2180 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2181 size = bfd_get_section_size (osect->the_bfd_section);
2183 if (start <= vma && vma < start + size)
2190 /* Parse contents of exception table and exception index sections
2191 of OBJFILE, and fill in the exception table entry cache.
2193 For each entry that refers to a standard ARM-defined personality
2194 routine, extract the frame unwinding instructions (from either
2195 the index or the table section). The unwinding instructions
2197 - extracting them from the rest of the table data
2198 - converting to host endianness
2199 - appending the implicit 0xb0 ("Finish") code
2201 The extracted and normalized instructions are stored for later
2202 retrieval by the arm_find_exidx_entry routine. */
2205 arm_exidx_new_objfile (struct objfile *objfile)
2207 struct cleanup *cleanups;
2208 struct arm_exidx_data *data;
2209 asection *exidx, *extab;
2210 bfd_vma exidx_vma = 0, extab_vma = 0;
2211 bfd_size_type exidx_size = 0, extab_size = 0;
2212 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2215 /* If we've already touched this file, do nothing. */
2216 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2218 cleanups = make_cleanup (null_cleanup, NULL);
2220 /* Read contents of exception table and index. */
2221 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2224 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2225 exidx_size = bfd_get_section_size (exidx);
2226 exidx_data = xmalloc (exidx_size);
2227 make_cleanup (xfree, exidx_data);
2229 if (!bfd_get_section_contents (objfile->obfd, exidx,
2230 exidx_data, 0, exidx_size))
2232 do_cleanups (cleanups);
2237 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2240 extab_vma = bfd_section_vma (objfile->obfd, extab);
2241 extab_size = bfd_get_section_size (extab);
2242 extab_data = xmalloc (extab_size);
2243 make_cleanup (xfree, extab_data);
2245 if (!bfd_get_section_contents (objfile->obfd, extab,
2246 extab_data, 0, extab_size))
2248 do_cleanups (cleanups);
2253 /* Allocate exception table data structure. */
2254 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2255 set_objfile_data (objfile, arm_exidx_data_key, data);
2256 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2257 objfile->obfd->section_count,
2258 VEC(arm_exidx_entry_s) *);
2260 /* Fill in exception table. */
2261 for (i = 0; i < exidx_size / 8; i++)
2263 struct arm_exidx_entry new_exidx_entry;
2264 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2265 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2266 bfd_vma addr = 0, word = 0;
2267 int n_bytes = 0, n_words = 0;
2268 struct obj_section *sec;
2269 gdb_byte *entry = NULL;
2271 /* Extract address of start of function. */
2272 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2273 idx += exidx_vma + i * 8;
2275 /* Find section containing function and compute section offset. */
2276 sec = arm_obj_section_from_vma (objfile, idx);
2279 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2281 /* Determine address of exception table entry. */
2284 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2286 else if ((val & 0xff000000) == 0x80000000)
2288 /* Exception table entry embedded in .ARM.exidx
2289 -- must be short form. */
2293 else if (!(val & 0x80000000))
2295 /* Exception table entry in .ARM.extab. */
2296 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2297 addr += exidx_vma + i * 8 + 4;
2299 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2301 word = bfd_h_get_32 (objfile->obfd,
2302 extab_data + addr - extab_vma);
2305 if ((word & 0xff000000) == 0x80000000)
2310 else if ((word & 0xff000000) == 0x81000000
2311 || (word & 0xff000000) == 0x82000000)
2315 n_words = ((word >> 16) & 0xff);
2317 else if (!(word & 0x80000000))
2320 struct obj_section *pers_sec;
2321 int gnu_personality = 0;
2323 /* Custom personality routine. */
2324 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2325 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2327 /* Check whether we've got one of the variants of the
2328 GNU personality routines. */
2329 pers_sec = arm_obj_section_from_vma (objfile, pers);
2332 static const char *personality[] =
2334 "__gcc_personality_v0",
2335 "__gxx_personality_v0",
2336 "__gcj_personality_v0",
2337 "__gnu_objc_personality_v0",
2341 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2344 for (k = 0; personality[k]; k++)
2345 if (lookup_minimal_symbol_by_pc_name
2346 (pc, personality[k], objfile))
2348 gnu_personality = 1;
2353 /* If so, the next word contains a word count in the high
2354 byte, followed by the same unwind instructions as the
2355 pre-defined forms. */
2357 && addr + 4 <= extab_vma + extab_size)
2359 word = bfd_h_get_32 (objfile->obfd,
2360 extab_data + addr - extab_vma);
2363 n_words = ((word >> 24) & 0xff);
2369 /* Sanity check address. */
2371 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2372 n_words = n_bytes = 0;
2374 /* The unwind instructions reside in WORD (only the N_BYTES least
2375 significant bytes are valid), followed by N_WORDS words in the
2376 extab section starting at ADDR. */
2377 if (n_bytes || n_words)
2379 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2380 n_bytes + n_words * 4 + 1);
2383 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2387 word = bfd_h_get_32 (objfile->obfd,
2388 extab_data + addr - extab_vma);
2391 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2392 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2393 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2394 *p++ = (gdb_byte) (word & 0xff);
2397 /* Implied "Finish" to terminate the list. */
2401 /* Push entry onto vector. They are guaranteed to always
2402 appear in order of increasing addresses. */
2403 new_exidx_entry.addr = idx;
2404 new_exidx_entry.entry = entry;
2405 VEC_safe_push (arm_exidx_entry_s,
2406 data->section_maps[sec->the_bfd_section->index],
2410 do_cleanups (cleanups);
2413 /* Search for the exception table entry covering MEMADDR. If one is found,
2414 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2415 set *START to the start of the region covered by this entry. */
2418 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2420 struct obj_section *sec;
2422 sec = find_pc_section (memaddr);
2425 struct arm_exidx_data *data;
2426 VEC(arm_exidx_entry_s) *map;
2427 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2430 data = objfile_data (sec->objfile, arm_exidx_data_key);
2433 map = data->section_maps[sec->the_bfd_section->index];
2434 if (!VEC_empty (arm_exidx_entry_s, map))
2436 struct arm_exidx_entry *map_sym;
2438 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2439 arm_compare_exidx_entries);
2441 /* VEC_lower_bound finds the earliest ordered insertion
2442 point. If the following symbol starts at this exact
2443 address, we use that; otherwise, the preceding
2444 exception table entry covers this address. */
2445 if (idx < VEC_length (arm_exidx_entry_s, map))
2447 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2448 if (map_sym->addr == map_key.addr)
2451 *start = map_sym->addr + obj_section_addr (sec);
2452 return map_sym->entry;
2458 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2470 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2471 instruction list from the ARM exception table entry ENTRY, allocate and
2472 return a prologue cache structure describing how to unwind this frame.
2474 Return NULL if the unwinding instruction list contains a "spare",
2475 "reserved" or "refuse to unwind" instruction as defined in section
2476 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2477 for the ARM Architecture" document. */
2479 static struct arm_prologue_cache *
2480 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2485 struct arm_prologue_cache *cache;
2486 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2487 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2493 /* Whenever we reload SP, we actually have to retrieve its
2494 actual value in the current frame. */
2497 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2499 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2500 vsp = get_frame_register_unsigned (this_frame, reg);
2504 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2505 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2511 /* Decode next unwind instruction. */
2514 if ((insn & 0xc0) == 0)
2516 int offset = insn & 0x3f;
2517 vsp += (offset << 2) + 4;
2519 else if ((insn & 0xc0) == 0x40)
2521 int offset = insn & 0x3f;
2522 vsp -= (offset << 2) + 4;
2524 else if ((insn & 0xf0) == 0x80)
2526 int mask = ((insn & 0xf) << 8) | *entry++;
2529 /* The special case of an all-zero mask identifies
2530 "Refuse to unwind". We return NULL to fall back
2531 to the prologue analyzer. */
2535 /* Pop registers r4..r15 under mask. */
2536 for (i = 0; i < 12; i++)
2537 if (mask & (1 << i))
2539 cache->saved_regs[4 + i].addr = vsp;
2543 /* Special-case popping SP -- we need to reload vsp. */
2544 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2547 else if ((insn & 0xf0) == 0x90)
2549 int reg = insn & 0xf;
2551 /* Reserved cases. */
2552 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2555 /* Set SP from another register and mark VSP for reload. */
2556 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2559 else if ((insn & 0xf0) == 0xa0)
2561 int count = insn & 0x7;
2562 int pop_lr = (insn & 0x8) != 0;
2565 /* Pop r4..r[4+count]. */
2566 for (i = 0; i <= count; i++)
2568 cache->saved_regs[4 + i].addr = vsp;
2572 /* If indicated by flag, pop LR as well. */
2575 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2579 else if (insn == 0xb0)
2581 /* We could only have updated PC by popping into it; if so, it
2582 will show up as address. Otherwise, copy LR into PC. */
2583 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2584 cache->saved_regs[ARM_PC_REGNUM]
2585 = cache->saved_regs[ARM_LR_REGNUM];
2590 else if (insn == 0xb1)
2592 int mask = *entry++;
2595 /* All-zero mask and mask >= 16 is "spare". */
2596 if (mask == 0 || mask >= 16)
2599 /* Pop r0..r3 under mask. */
2600 for (i = 0; i < 4; i++)
2601 if (mask & (1 << i))
2603 cache->saved_regs[i].addr = vsp;
2607 else if (insn == 0xb2)
2609 ULONGEST offset = 0;
2614 offset |= (*entry & 0x7f) << shift;
2617 while (*entry++ & 0x80);
2619 vsp += 0x204 + (offset << 2);
2621 else if (insn == 0xb3)
2623 int start = *entry >> 4;
2624 int count = (*entry++) & 0xf;
2627 /* Only registers D0..D15 are valid here. */
2628 if (start + count >= 16)
2631 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2632 for (i = 0; i <= count; i++)
2634 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2638 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2641 else if ((insn & 0xf8) == 0xb8)
2643 int count = insn & 0x7;
2646 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2647 for (i = 0; i <= count; i++)
2649 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2653 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2656 else if (insn == 0xc6)
2658 int start = *entry >> 4;
2659 int count = (*entry++) & 0xf;
2662 /* Only registers WR0..WR15 are valid. */
2663 if (start + count >= 16)
2666 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2667 for (i = 0; i <= count; i++)
2669 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2673 else if (insn == 0xc7)
2675 int mask = *entry++;
2678 /* All-zero mask and mask >= 16 is "spare". */
2679 if (mask == 0 || mask >= 16)
2682 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2683 for (i = 0; i < 4; i++)
2684 if (mask & (1 << i))
2686 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2690 else if ((insn & 0xf8) == 0xc0)
2692 int count = insn & 0x7;
2695 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2696 for (i = 0; i <= count; i++)
2698 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2702 else if (insn == 0xc8)
2704 int start = *entry >> 4;
2705 int count = (*entry++) & 0xf;
2708 /* Only registers D0..D31 are valid. */
2709 if (start + count >= 16)
2712 /* Pop VFP double-precision registers
2713 D[16+start]..D[16+start+count]. */
2714 for (i = 0; i <= count; i++)
2716 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2720 else if (insn == 0xc9)
2722 int start = *entry >> 4;
2723 int count = (*entry++) & 0xf;
2726 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2727 for (i = 0; i <= count; i++)
2729 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2733 else if ((insn & 0xf8) == 0xd0)
2735 int count = insn & 0x7;
2738 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2739 for (i = 0; i <= count; i++)
2741 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2747 /* Everything else is "spare". */
2752 /* If we restore SP from a register, assume this was the frame register.
2753 Otherwise just fall back to SP as frame register. */
2754 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2755 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2757 cache->framereg = ARM_SP_REGNUM;
2759 /* Determine offset to previous frame. */
2761 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2763 /* We already got the previous SP. */
2764 cache->prev_sp = vsp;
2769 /* Unwinding via ARM exception table entries. Note that the sniffer
2770 already computes a filled-in prologue cache, which is then used
2771 with the same arm_prologue_this_id and arm_prologue_prev_register
2772 routines also used for prologue-parsing based unwinding. */
2775 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2776 struct frame_info *this_frame,
2777 void **this_prologue_cache)
2779 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2780 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2781 CORE_ADDR addr_in_block, exidx_region, func_start;
2782 struct arm_prologue_cache *cache;
2785 /* See if we have an ARM exception table entry covering this address. */
2786 addr_in_block = get_frame_address_in_block (this_frame);
2787 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2791 /* The ARM exception table does not describe unwind information
2792 for arbitrary PC values, but is guaranteed to be correct only
2793 at call sites. We have to decide here whether we want to use
2794 ARM exception table information for this frame, or fall back
2795 to using prologue parsing. (Note that if we have DWARF CFI,
2796 this sniffer isn't even called -- CFI is always preferred.)
2798 Before we make this decision, however, we check whether we
2799 actually have *symbol* information for the current frame.
2800 If not, prologue parsing would not work anyway, so we might
2801 as well use the exception table and hope for the best. */
2802 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2806 /* If the next frame is "normal", we are at a call site in this
2807 frame, so exception information is guaranteed to be valid. */
2808 if (get_next_frame (this_frame)
2809 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2812 /* We also assume exception information is valid if we're currently
2813 blocked in a system call. The system library is supposed to
2814 ensure this, so that e.g. pthread cancellation works. */
2815 if (arm_frame_is_thumb (this_frame))
2819 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2820 byte_order_for_code, &insn)
2821 && (insn & 0xff00) == 0xdf00 /* svc */)
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2829 byte_order_for_code, &insn)
2830 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2834 /* Bail out if we don't know that exception information is valid. */
2838 /* The ARM exception index does not mark the *end* of the region
2839 covered by the entry, and some functions will not have any entry.
2840 To correctly recognize the end of the covered region, the linker
2841 should have inserted dummy records with a CANTUNWIND marker.
2843 Unfortunately, current versions of GNU ld do not reliably do
2844 this, and thus we may have found an incorrect entry above.
2845 As a (temporary) sanity check, we only use the entry if it
2846 lies *within* the bounds of the function. Note that this check
2847 might reject perfectly valid entries that just happen to cover
2848 multiple functions; therefore this check ought to be removed
2849 once the linker is fixed. */
2850 if (func_start > exidx_region)
2854 /* Decode the list of unwinding instructions into a prologue cache.
2855 Note that this may fail due to e.g. a "refuse to unwind" code. */
2856 cache = arm_exidx_fill_cache (this_frame, entry);
2860 *this_prologue_cache = cache;
2864 struct frame_unwind arm_exidx_unwind = {
2866 default_frame_unwind_stop_reason,
2867 arm_prologue_this_id,
2868 arm_prologue_prev_register,
2870 arm_exidx_unwind_sniffer
2873 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2874 trampoline, return the target PC. Otherwise return 0.
2876 void call0a (char c, short s, int i, long l) {}
2880 (*pointer_to_call0a) (c, s, i, l);
2883 Instead of calling a stub library function _call_via_xx (xx is
2884 the register name), GCC may inline the trampoline in the object
2885 file as below (register r2 has the address of call0a).
2888 .type main, %function
2897 The trampoline 'bx r2' doesn't belong to main. */
2900 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2902 /* The heuristics of recognizing such trampoline is that FRAME is
2903 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2904 if (arm_frame_is_thumb (frame))
2908 if (target_read_memory (pc, buf, 2) == 0)
2910 struct gdbarch *gdbarch = get_frame_arch (frame);
2911 enum bfd_endian byte_order_for_code
2912 = gdbarch_byte_order_for_code (gdbarch);
2914 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2916 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2919 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2921 /* Clear the LSB so that gdb core sets step-resume
2922 breakpoint at the right address. */
2923 return UNMAKE_THUMB_ADDR (dest);
2931 static struct arm_prologue_cache *
2932 arm_make_stub_cache (struct frame_info *this_frame)
2934 struct arm_prologue_cache *cache;
2936 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2937 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2939 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2944 /* Our frame ID for a stub frame is the current SP and LR. */
2947 arm_stub_this_id (struct frame_info *this_frame,
2949 struct frame_id *this_id)
2951 struct arm_prologue_cache *cache;
2953 if (*this_cache == NULL)
2954 *this_cache = arm_make_stub_cache (this_frame);
2955 cache = *this_cache;
2957 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2961 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2962 struct frame_info *this_frame,
2963 void **this_prologue_cache)
2965 CORE_ADDR addr_in_block;
2968 addr_in_block = get_frame_address_in_block (this_frame);
2969 if (in_plt_section (addr_in_block)
2970 /* We also use the stub winder if the target memory is unreadable
2971 to avoid having the prologue unwinder trying to read it. */
2972 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2978 struct frame_unwind arm_stub_unwind = {
2980 default_frame_unwind_stop_reason,
2982 arm_prologue_prev_register,
2984 arm_stub_unwind_sniffer
2987 /* Put here the code to store, into CACHE->saved_regs, the addresses
2988 of the saved registers of frame described by THIS_FRAME. CACHE is
2991 static struct arm_prologue_cache *
2992 arm_m_exception_cache (struct frame_info *this_frame)
2994 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2995 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2996 struct arm_prologue_cache *cache;
2997 CORE_ADDR unwound_sp;
3000 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3001 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3003 unwound_sp = get_frame_register_unsigned (this_frame,
3006 /* The hardware saves eight 32-bit words, comprising xPSR,
3007 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3008 "B1.5.6 Exception entry behavior" in
3009 "ARMv7-M Architecture Reference Manual". */
3010 cache->saved_regs[0].addr = unwound_sp;
3011 cache->saved_regs[1].addr = unwound_sp + 4;
3012 cache->saved_regs[2].addr = unwound_sp + 8;
3013 cache->saved_regs[3].addr = unwound_sp + 12;
3014 cache->saved_regs[12].addr = unwound_sp + 16;
3015 cache->saved_regs[14].addr = unwound_sp + 20;
3016 cache->saved_regs[15].addr = unwound_sp + 24;
3017 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3019 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3020 aligner between the top of the 32-byte stack frame and the
3021 previous context's stack pointer. */
3022 cache->prev_sp = unwound_sp + 32;
3023 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3024 && (xpsr & (1 << 9)) != 0)
3025 cache->prev_sp += 4;
3030 /* Implementation of function hook 'this_id' in
3031 'struct frame_uwnind'. */
3034 arm_m_exception_this_id (struct frame_info *this_frame,
3036 struct frame_id *this_id)
3038 struct arm_prologue_cache *cache;
3040 if (*this_cache == NULL)
3041 *this_cache = arm_m_exception_cache (this_frame);
3042 cache = *this_cache;
3044 /* Our frame ID for a stub frame is the current SP and LR. */
3045 *this_id = frame_id_build (cache->prev_sp,
3046 get_frame_pc (this_frame));
3049 /* Implementation of function hook 'prev_register' in
3050 'struct frame_uwnind'. */
3052 static struct value *
3053 arm_m_exception_prev_register (struct frame_info *this_frame,
3057 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3058 struct arm_prologue_cache *cache;
3060 if (*this_cache == NULL)
3061 *this_cache = arm_m_exception_cache (this_frame);
3062 cache = *this_cache;
3064 /* The value was already reconstructed into PREV_SP. */
3065 if (prev_regnum == ARM_SP_REGNUM)
3066 return frame_unwind_got_constant (this_frame, prev_regnum,
3069 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3073 /* Implementation of function hook 'sniffer' in
3074 'struct frame_uwnind'. */
3077 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3078 struct frame_info *this_frame,
3079 void **this_prologue_cache)
3081 CORE_ADDR this_pc = get_frame_pc (this_frame);
3083 /* No need to check is_m; this sniffer is only registered for
3084 M-profile architectures. */
3086 /* Exception frames return to one of these magic PCs. Other values
3087 are not defined as of v7-M. See details in "B1.5.8 Exception
3088 return behavior" in "ARMv7-M Architecture Reference Manual". */
3089 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3090 || this_pc == 0xfffffffd)
3096 /* Frame unwinder for M-profile exceptions. */
3098 struct frame_unwind arm_m_exception_unwind =
3101 default_frame_unwind_stop_reason,
3102 arm_m_exception_this_id,
3103 arm_m_exception_prev_register,
3105 arm_m_exception_unwind_sniffer
3109 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3111 struct arm_prologue_cache *cache;
3113 if (*this_cache == NULL)
3114 *this_cache = arm_make_prologue_cache (this_frame);
3115 cache = *this_cache;
3117 return cache->prev_sp - cache->framesize;
3120 struct frame_base arm_normal_base = {
3121 &arm_prologue_unwind,
3122 arm_normal_frame_base,
3123 arm_normal_frame_base,
3124 arm_normal_frame_base
3127 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3128 dummy frame. The frame ID's base needs to match the TOS value
3129 saved by save_dummy_frame_tos() and returned from
3130 arm_push_dummy_call, and the PC needs to match the dummy frame's
3133 static struct frame_id
3134 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3136 return frame_id_build (get_frame_register_unsigned (this_frame,
3138 get_frame_pc (this_frame));
3141 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3142 be used to construct the previous frame's ID, after looking up the
3143 containing function). */
3146 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3149 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3150 return arm_addr_bits_remove (gdbarch, pc);
3154 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3156 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3159 static struct value *
3160 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3163 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3165 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3170 /* The PC is normally copied from the return column, which
3171 describes saves of LR. However, that version may have an
3172 extra bit set to indicate Thumb state. The bit is not
3174 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3175 return frame_unwind_got_constant (this_frame, regnum,
3176 arm_addr_bits_remove (gdbarch, lr));
3179 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3180 cpsr = get_frame_register_unsigned (this_frame, regnum);
3181 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3182 if (IS_THUMB_ADDR (lr))
3186 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3189 internal_error (__FILE__, __LINE__,
3190 _("Unexpected register %d"), regnum);
3195 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3196 struct dwarf2_frame_state_reg *reg,
3197 struct frame_info *this_frame)
3203 reg->how = DWARF2_FRAME_REG_FN;
3204 reg->loc.fn = arm_dwarf2_prev_register;
3207 reg->how = DWARF2_FRAME_REG_CFA;
3212 /* Return true if we are in the function's epilogue, i.e. after the
3213 instruction that destroyed the function's stack frame. */
3216 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3218 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3219 unsigned int insn, insn2;
3220 int found_return = 0, found_stack_adjust = 0;
3221 CORE_ADDR func_start, func_end;
3225 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3228 /* The epilogue is a sequence of instructions along the following lines:
3230 - add stack frame size to SP or FP
3231 - [if frame pointer used] restore SP from FP
3232 - restore registers from SP [may include PC]
3233 - a return-type instruction [if PC wasn't already restored]
3235 In a first pass, we scan forward from the current PC and verify the
3236 instructions we find as compatible with this sequence, ending in a
3239 However, this is not sufficient to distinguish indirect function calls
3240 within a function from indirect tail calls in the epilogue in some cases.
3241 Therefore, if we didn't already find any SP-changing instruction during
3242 forward scan, we add a backward scanning heuristic to ensure we actually
3243 are in the epilogue. */
3246 while (scan_pc < func_end && !found_return)
3248 if (target_read_memory (scan_pc, buf, 2))
3252 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3254 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3256 else if (insn == 0x46f7) /* mov pc, lr */
3258 else if (insn == 0x46bd) /* mov sp, r7 */
3259 found_stack_adjust = 1;
3260 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3261 found_stack_adjust = 1;
3262 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3264 found_stack_adjust = 1;
3265 if (insn & 0x0100) /* <registers> include PC. */
3268 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3270 if (target_read_memory (scan_pc, buf, 2))
3274 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3276 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3278 found_stack_adjust = 1;
3279 if (insn2 & 0x8000) /* <registers> include PC. */
3282 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3283 && (insn2 & 0x0fff) == 0x0b04)
3285 found_stack_adjust = 1;
3286 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3289 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3290 && (insn2 & 0x0e00) == 0x0a00)
3291 found_stack_adjust = 1;
3302 /* Since any instruction in the epilogue sequence, with the possible
3303 exception of return itself, updates the stack pointer, we need to
3304 scan backwards for at most one instruction. Try either a 16-bit or
3305 a 32-bit instruction. This is just a heuristic, so we do not worry
3306 too much about false positives. */
3308 if (!found_stack_adjust)
3310 if (pc - 4 < func_start)
3312 if (target_read_memory (pc - 4, buf, 4))
3315 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3316 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3318 if (insn2 == 0x46bd) /* mov sp, r7 */
3319 found_stack_adjust = 1;
3320 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3321 found_stack_adjust = 1;
3322 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3323 found_stack_adjust = 1;
3324 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3325 found_stack_adjust = 1;
3326 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3327 && (insn2 & 0x0fff) == 0x0b04)
3328 found_stack_adjust = 1;
3329 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3330 && (insn2 & 0x0e00) == 0x0a00)
3331 found_stack_adjust = 1;
3334 return found_stack_adjust;
3337 /* Return true if we are in the function's epilogue, i.e. after the
3338 instruction that destroyed the function's stack frame. */
3341 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3343 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3345 int found_return, found_stack_adjust;
3346 CORE_ADDR func_start, func_end;
3348 if (arm_pc_is_thumb (gdbarch, pc))
3349 return thumb_in_function_epilogue_p (gdbarch, pc);
3351 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3354 /* We are in the epilogue if the previous instruction was a stack
3355 adjustment and the next instruction is a possible return (bx, mov
3356 pc, or pop). We could have to scan backwards to find the stack
3357 adjustment, or forwards to find the return, but this is a decent
3358 approximation. First scan forwards. */
3361 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3362 if (bits (insn, 28, 31) != INST_NV)
3364 if ((insn & 0x0ffffff0) == 0x012fff10)
3367 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3370 else if ((insn & 0x0fff0000) == 0x08bd0000
3371 && (insn & 0x0000c000) != 0)
3372 /* POP (LDMIA), including PC or LR. */
3379 /* Scan backwards. This is just a heuristic, so do not worry about
3380 false positives from mode changes. */
3382 if (pc < func_start + 4)
3385 found_stack_adjust = 0;
3386 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3387 if (bits (insn, 28, 31) != INST_NV)
3389 if ((insn & 0x0df0f000) == 0x0080d000)
3390 /* ADD SP (register or immediate). */
3391 found_stack_adjust = 1;
3392 else if ((insn & 0x0df0f000) == 0x0040d000)
3393 /* SUB SP (register or immediate). */
3394 found_stack_adjust = 1;
3395 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3397 found_stack_adjust = 1;
3398 else if ((insn & 0x0fff0000) == 0x08bd0000)
3400 found_stack_adjust = 1;
3401 else if ((insn & 0x0fff0000) == 0x049d0000)
3402 /* POP of a single register. */
3403 found_stack_adjust = 1;
3406 if (found_stack_adjust)
3413 /* When arguments must be pushed onto the stack, they go on in reverse
3414 order. The code below implements a FILO (stack) to do this. */
3419 struct stack_item *prev;
3423 static struct stack_item *
3424 push_stack_item (struct stack_item *prev, const void *contents, int len)
3426 struct stack_item *si;
3427 si = xmalloc (sizeof (struct stack_item));
3428 si->data = xmalloc (len);
3431 memcpy (si->data, contents, len);
3435 static struct stack_item *
3436 pop_stack_item (struct stack_item *si)
3438 struct stack_item *dead = si;
3446 /* Return the alignment (in bytes) of the given type. */
3449 arm_type_align (struct type *t)
3455 t = check_typedef (t);
3456 switch (TYPE_CODE (t))
3459 /* Should never happen. */
3460 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3464 case TYPE_CODE_ENUM:
3468 case TYPE_CODE_RANGE:
3470 case TYPE_CODE_CHAR:
3471 case TYPE_CODE_BOOL:
3472 return TYPE_LENGTH (t);
3474 case TYPE_CODE_ARRAY:
3475 case TYPE_CODE_COMPLEX:
3476 /* TODO: What about vector types? */
3477 return arm_type_align (TYPE_TARGET_TYPE (t));
3479 case TYPE_CODE_STRUCT:
3480 case TYPE_CODE_UNION:
3482 for (n = 0; n < TYPE_NFIELDS (t); n++)
3484 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3492 /* Possible base types for a candidate for passing and returning in
3495 enum arm_vfp_cprc_base_type
3504 /* The length of one element of base type B. */
3507 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3511 case VFP_CPRC_SINGLE:
3513 case VFP_CPRC_DOUBLE:
3515 case VFP_CPRC_VEC64:
3517 case VFP_CPRC_VEC128:
3520 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3525 /* The character ('s', 'd' or 'q') for the type of VFP register used
3526 for passing base type B. */
3529 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3533 case VFP_CPRC_SINGLE:
3535 case VFP_CPRC_DOUBLE:
3537 case VFP_CPRC_VEC64:
3539 case VFP_CPRC_VEC128:
3542 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3547 /* Determine whether T may be part of a candidate for passing and
3548 returning in VFP registers, ignoring the limit on the total number
3549 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3550 classification of the first valid component found; if it is not
3551 VFP_CPRC_UNKNOWN, all components must have the same classification
3552 as *BASE_TYPE. If it is found that T contains a type not permitted
3553 for passing and returning in VFP registers, a type differently
3554 classified from *BASE_TYPE, or two types differently classified
3555 from each other, return -1, otherwise return the total number of
3556 base-type elements found (possibly 0 in an empty structure or
3557 array). Vectors and complex types are not currently supported,
3558 matching the generic AAPCS support. */
3561 arm_vfp_cprc_sub_candidate (struct type *t,
3562 enum arm_vfp_cprc_base_type *base_type)
3564 t = check_typedef (t);
3565 switch (TYPE_CODE (t))
3568 switch (TYPE_LENGTH (t))
3571 if (*base_type == VFP_CPRC_UNKNOWN)
3572 *base_type = VFP_CPRC_SINGLE;
3573 else if (*base_type != VFP_CPRC_SINGLE)
3578 if (*base_type == VFP_CPRC_UNKNOWN)
3579 *base_type = VFP_CPRC_DOUBLE;
3580 else if (*base_type != VFP_CPRC_DOUBLE)
3589 case TYPE_CODE_ARRAY:
3593 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3596 if (TYPE_LENGTH (t) == 0)
3598 gdb_assert (count == 0);
3601 else if (count == 0)
3603 unitlen = arm_vfp_cprc_unit_length (*base_type);
3604 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3605 return TYPE_LENGTH (t) / unitlen;
3609 case TYPE_CODE_STRUCT:
3614 for (i = 0; i < TYPE_NFIELDS (t); i++)
3616 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3618 if (sub_count == -1)
3622 if (TYPE_LENGTH (t) == 0)
3624 gdb_assert (count == 0);
3627 else if (count == 0)
3629 unitlen = arm_vfp_cprc_unit_length (*base_type);
3630 if (TYPE_LENGTH (t) != unitlen * count)
3635 case TYPE_CODE_UNION:
3640 for (i = 0; i < TYPE_NFIELDS (t); i++)
3642 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3644 if (sub_count == -1)
3646 count = (count > sub_count ? count : sub_count);
3648 if (TYPE_LENGTH (t) == 0)
3650 gdb_assert (count == 0);
3653 else if (count == 0)
3655 unitlen = arm_vfp_cprc_unit_length (*base_type);
3656 if (TYPE_LENGTH (t) != unitlen * count)
3668 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3669 if passed to or returned from a non-variadic function with the VFP
3670 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3671 *BASE_TYPE to the base type for T and *COUNT to the number of
3672 elements of that base type before returning. */
3675 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3678 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3679 int c = arm_vfp_cprc_sub_candidate (t, &b);
3680 if (c <= 0 || c > 4)
3687 /* Return 1 if the VFP ABI should be used for passing arguments to and
3688 returning values from a function of type FUNC_TYPE, 0
3692 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3694 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3695 /* Variadic functions always use the base ABI. Assume that functions
3696 without debug info are not variadic. */
3697 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3699 /* The VFP ABI is only supported as a variant of AAPCS. */
3700 if (tdep->arm_abi != ARM_ABI_AAPCS)
3702 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3705 /* We currently only support passing parameters in integer registers, which
3706 conforms with GCC's default model, and VFP argument passing following
3707 the VFP variant of AAPCS. Several other variants exist and
3708 we should probably support some of them based on the selected ABI. */
3711 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3712 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3713 struct value **args, CORE_ADDR sp, int struct_return,
3714 CORE_ADDR struct_addr)
3716 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3720 struct stack_item *si = NULL;
3723 unsigned vfp_regs_free = (1 << 16) - 1;
3725 /* Determine the type of this function and whether the VFP ABI
3727 ftype = check_typedef (value_type (function));
3728 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3729 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3730 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3732 /* Set the return address. For the ARM, the return breakpoint is
3733 always at BP_ADDR. */
3734 if (arm_pc_is_thumb (gdbarch, bp_addr))
3736 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3738 /* Walk through the list of args and determine how large a temporary
3739 stack is required. Need to take care here as structs may be
3740 passed on the stack, and we have to push them. */
3743 argreg = ARM_A1_REGNUM;
3746 /* The struct_return pointer occupies the first parameter
3747 passing register. */
3751 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3752 gdbarch_register_name (gdbarch, argreg),
3753 paddress (gdbarch, struct_addr));
3754 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3758 for (argnum = 0; argnum < nargs; argnum++)
3761 struct type *arg_type;
3762 struct type *target_type;
3763 enum type_code typecode;
3764 const bfd_byte *val;
3766 enum arm_vfp_cprc_base_type vfp_base_type;
3768 int may_use_core_reg = 1;
3770 arg_type = check_typedef (value_type (args[argnum]));
3771 len = TYPE_LENGTH (arg_type);
3772 target_type = TYPE_TARGET_TYPE (arg_type);
3773 typecode = TYPE_CODE (arg_type);
3774 val = value_contents (args[argnum]);
3776 align = arm_type_align (arg_type);
3777 /* Round alignment up to a whole number of words. */
3778 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3779 /* Different ABIs have different maximum alignments. */
3780 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3782 /* The APCS ABI only requires word alignment. */
3783 align = INT_REGISTER_SIZE;
3787 /* The AAPCS requires at most doubleword alignment. */
3788 if (align > INT_REGISTER_SIZE * 2)
3789 align = INT_REGISTER_SIZE * 2;
3793 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3801 /* Because this is a CPRC it cannot go in a core register or
3802 cause a core register to be skipped for alignment.
3803 Either it goes in VFP registers and the rest of this loop
3804 iteration is skipped for this argument, or it goes on the
3805 stack (and the stack alignment code is correct for this
3807 may_use_core_reg = 0;
3809 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3810 shift = unit_length / 4;
3811 mask = (1 << (shift * vfp_base_count)) - 1;
3812 for (regno = 0; regno < 16; regno += shift)
3813 if (((vfp_regs_free >> regno) & mask) == mask)
3822 vfp_regs_free &= ~(mask << regno);
3823 reg_scaled = regno / shift;
3824 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3825 for (i = 0; i < vfp_base_count; i++)
3829 if (reg_char == 'q')
3830 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3831 val + i * unit_length);
3834 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3835 reg_char, reg_scaled + i);
3836 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3838 regcache_cooked_write (regcache, regnum,
3839 val + i * unit_length);
3846 /* This CPRC could not go in VFP registers, so all VFP
3847 registers are now marked as used. */
3852 /* Push stack padding for dowubleword alignment. */
3853 if (nstack & (align - 1))
3855 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3856 nstack += INT_REGISTER_SIZE;
3859 /* Doubleword aligned quantities must go in even register pairs. */
3860 if (may_use_core_reg
3861 && argreg <= ARM_LAST_ARG_REGNUM
3862 && align > INT_REGISTER_SIZE
3866 /* If the argument is a pointer to a function, and it is a
3867 Thumb function, create a LOCAL copy of the value and set
3868 the THUMB bit in it. */
3869 if (TYPE_CODE_PTR == typecode
3870 && target_type != NULL
3871 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3873 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3874 if (arm_pc_is_thumb (gdbarch, regval))
3876 bfd_byte *copy = alloca (len);
3877 store_unsigned_integer (copy, len, byte_order,
3878 MAKE_THUMB_ADDR (regval));
3883 /* Copy the argument to general registers or the stack in
3884 register-sized pieces. Large arguments are split between
3885 registers and stack. */
3888 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3890 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3892 /* The argument is being passed in a general purpose
3895 = extract_unsigned_integer (val, partial_len, byte_order);
3896 if (byte_order == BFD_ENDIAN_BIG)
3897 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3899 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3901 gdbarch_register_name
3903 phex (regval, INT_REGISTER_SIZE));
3904 regcache_cooked_write_unsigned (regcache, argreg, regval);
3909 /* Push the arguments onto the stack. */
3911 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3913 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3914 nstack += INT_REGISTER_SIZE;
3921 /* If we have an odd number of words to push, then decrement the stack
3922 by one word now, so first stack argument will be dword aligned. */
3929 write_memory (sp, si->data, si->len);
3930 si = pop_stack_item (si);
3933 /* Finally, update teh SP register. */
3934 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3940 /* Always align the frame to an 8-byte boundary. This is required on
3941 some platforms and harmless on the rest. */
3944 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3946 /* Align the stack to eight bytes. */
3947 return sp & ~ (CORE_ADDR) 7;
3951 print_fpu_flags (struct ui_file *file, int flags)
3953 if (flags & (1 << 0))
3954 fputs_filtered ("IVO ", file);
3955 if (flags & (1 << 1))
3956 fputs_filtered ("DVZ ", file);
3957 if (flags & (1 << 2))
3958 fputs_filtered ("OFL ", file);
3959 if (flags & (1 << 3))
3960 fputs_filtered ("UFL ", file);
3961 if (flags & (1 << 4))
3962 fputs_filtered ("INX ", file);
3963 fputc_filtered ('\n', file);
3966 /* Print interesting information about the floating point processor
3967 (if present) or emulator. */
3969 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3970 struct frame_info *frame, const char *args)
3972 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3975 type = (status >> 24) & 127;
3976 if (status & (1 << 31))
3977 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3979 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3980 /* i18n: [floating point unit] mask */
3981 fputs_filtered (_("mask: "), file);
3982 print_fpu_flags (file, status >> 16);
3983 /* i18n: [floating point unit] flags */
3984 fputs_filtered (_("flags: "), file);
3985 print_fpu_flags (file, status);
3988 /* Construct the ARM extended floating point type. */
3989 static struct type *
3990 arm_ext_type (struct gdbarch *gdbarch)
3992 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3994 if (!tdep->arm_ext_type)
3996 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3997 floatformats_arm_ext);
3999 return tdep->arm_ext_type;
4002 static struct type *
4003 arm_neon_double_type (struct gdbarch *gdbarch)
4005 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4007 if (tdep->neon_double_type == NULL)
4009 struct type *t, *elem;
4011 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4013 elem = builtin_type (gdbarch)->builtin_uint8;
4014 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4015 elem = builtin_type (gdbarch)->builtin_uint16;
4016 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4017 elem = builtin_type (gdbarch)->builtin_uint32;
4018 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4019 elem = builtin_type (gdbarch)->builtin_uint64;
4020 append_composite_type_field (t, "u64", elem);
4021 elem = builtin_type (gdbarch)->builtin_float;
4022 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4023 elem = builtin_type (gdbarch)->builtin_double;
4024 append_composite_type_field (t, "f64", elem);
4026 TYPE_VECTOR (t) = 1;
4027 TYPE_NAME (t) = "neon_d";
4028 tdep->neon_double_type = t;
4031 return tdep->neon_double_type;
4034 /* FIXME: The vector types are not correctly ordered on big-endian
4035 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4036 bits of d0 - regardless of what unit size is being held in d0. So
4037 the offset of the first uint8 in d0 is 7, but the offset of the
4038 first float is 4. This code works as-is for little-endian
4041 static struct type *
4042 arm_neon_quad_type (struct gdbarch *gdbarch)
4044 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4046 if (tdep->neon_quad_type == NULL)
4048 struct type *t, *elem;
4050 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4052 elem = builtin_type (gdbarch)->builtin_uint8;
4053 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4054 elem = builtin_type (gdbarch)->builtin_uint16;
4055 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4056 elem = builtin_type (gdbarch)->builtin_uint32;
4057 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4058 elem = builtin_type (gdbarch)->builtin_uint64;
4059 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4060 elem = builtin_type (gdbarch)->builtin_float;
4061 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4062 elem = builtin_type (gdbarch)->builtin_double;
4063 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4065 TYPE_VECTOR (t) = 1;
4066 TYPE_NAME (t) = "neon_q";
4067 tdep->neon_quad_type = t;
4070 return tdep->neon_quad_type;
4073 /* Return the GDB type object for the "standard" data type of data in
4076 static struct type *
4077 arm_register_type (struct gdbarch *gdbarch, int regnum)
4079 int num_regs = gdbarch_num_regs (gdbarch);
4081 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4082 && regnum >= num_regs && regnum < num_regs + 32)
4083 return builtin_type (gdbarch)->builtin_float;
4085 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4086 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4087 return arm_neon_quad_type (gdbarch);
4089 /* If the target description has register information, we are only
4090 in this function so that we can override the types of
4091 double-precision registers for NEON. */
4092 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4094 struct type *t = tdesc_register_type (gdbarch, regnum);
4096 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4097 && TYPE_CODE (t) == TYPE_CODE_FLT
4098 && gdbarch_tdep (gdbarch)->have_neon)
4099 return arm_neon_double_type (gdbarch);
4104 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4106 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4107 return builtin_type (gdbarch)->builtin_void;
4109 return arm_ext_type (gdbarch);
4111 else if (regnum == ARM_SP_REGNUM)
4112 return builtin_type (gdbarch)->builtin_data_ptr;
4113 else if (regnum == ARM_PC_REGNUM)
4114 return builtin_type (gdbarch)->builtin_func_ptr;
4115 else if (regnum >= ARRAY_SIZE (arm_register_names))
4116 /* These registers are only supported on targets which supply
4117 an XML description. */
4118 return builtin_type (gdbarch)->builtin_int0;
4120 return builtin_type (gdbarch)->builtin_uint32;
4123 /* Map a DWARF register REGNUM onto the appropriate GDB register
4127 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4129 /* Core integer regs. */
4130 if (reg >= 0 && reg <= 15)
4133 /* Legacy FPA encoding. These were once used in a way which
4134 overlapped with VFP register numbering, so their use is
4135 discouraged, but GDB doesn't support the ARM toolchain
4136 which used them for VFP. */
4137 if (reg >= 16 && reg <= 23)
4138 return ARM_F0_REGNUM + reg - 16;
4140 /* New assignments for the FPA registers. */
4141 if (reg >= 96 && reg <= 103)
4142 return ARM_F0_REGNUM + reg - 96;
4144 /* WMMX register assignments. */
4145 if (reg >= 104 && reg <= 111)
4146 return ARM_WCGR0_REGNUM + reg - 104;
4148 if (reg >= 112 && reg <= 127)
4149 return ARM_WR0_REGNUM + reg - 112;
4151 if (reg >= 192 && reg <= 199)
4152 return ARM_WC0_REGNUM + reg - 192;
4154 /* VFP v2 registers. A double precision value is actually
4155 in d1 rather than s2, but the ABI only defines numbering
4156 for the single precision registers. This will "just work"
4157 in GDB for little endian targets (we'll read eight bytes,
4158 starting in s0 and then progressing to s1), but will be
4159 reversed on big endian targets with VFP. This won't
4160 be a problem for the new Neon quad registers; you're supposed
4161 to use DW_OP_piece for those. */
4162 if (reg >= 64 && reg <= 95)
4166 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4167 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4171 /* VFP v3 / Neon registers. This range is also used for VFP v2
4172 registers, except that it now describes d0 instead of s0. */
4173 if (reg >= 256 && reg <= 287)
4177 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4178 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4185 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4187 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4190 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4192 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4193 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4195 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4196 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4198 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4199 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4201 if (reg < NUM_GREGS)
4202 return SIM_ARM_R0_REGNUM + reg;
4205 if (reg < NUM_FREGS)
4206 return SIM_ARM_FP0_REGNUM + reg;
4209 if (reg < NUM_SREGS)
4210 return SIM_ARM_FPS_REGNUM + reg;
4213 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4216 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4217 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4218 It is thought that this is is the floating-point register format on
4219 little-endian systems. */
4222 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4223 void *dbl, int endianess)
4227 if (endianess == BFD_ENDIAN_BIG)
4228 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4230 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4232 floatformat_from_doublest (fmt, &d, dbl);
4236 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4241 floatformat_to_doublest (fmt, ptr, &d);
4242 if (endianess == BFD_ENDIAN_BIG)
4243 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4245 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4250 condition_true (unsigned long cond, unsigned long status_reg)
4252 if (cond == INST_AL || cond == INST_NV)
4258 return ((status_reg & FLAG_Z) != 0);
4260 return ((status_reg & FLAG_Z) == 0);
4262 return ((status_reg & FLAG_C) != 0);
4264 return ((status_reg & FLAG_C) == 0);
4266 return ((status_reg & FLAG_N) != 0);
4268 return ((status_reg & FLAG_N) == 0);
4270 return ((status_reg & FLAG_V) != 0);
4272 return ((status_reg & FLAG_V) == 0);
4274 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4276 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4278 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4280 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4282 return (((status_reg & FLAG_Z) == 0)
4283 && (((status_reg & FLAG_N) == 0)
4284 == ((status_reg & FLAG_V) == 0)));
4286 return (((status_reg & FLAG_Z) != 0)
4287 || (((status_reg & FLAG_N) == 0)
4288 != ((status_reg & FLAG_V) == 0)));
4293 static unsigned long
4294 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4295 unsigned long pc_val, unsigned long status_reg)
4297 unsigned long res, shift;
4298 int rm = bits (inst, 0, 3);
4299 unsigned long shifttype = bits (inst, 5, 6);
4303 int rs = bits (inst, 8, 11);
4304 shift = (rs == 15 ? pc_val + 8
4305 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4308 shift = bits (inst, 7, 11);
4310 res = (rm == ARM_PC_REGNUM
4311 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4312 : get_frame_register_unsigned (frame, rm));
4317 res = shift >= 32 ? 0 : res << shift;
4321 res = shift >= 32 ? 0 : res >> shift;
4327 res = ((res & 0x80000000L)
4328 ? ~((~res) >> shift) : res >> shift);
4331 case 3: /* ROR/RRX */
4334 res = (res >> 1) | (carry ? 0x80000000L : 0);
4336 res = (res >> shift) | (res << (32 - shift));
4340 return res & 0xffffffff;
4343 /* Return number of 1-bits in VAL. */
4346 bitcount (unsigned long val)
4349 for (nbits = 0; val != 0; nbits++)
4350 val &= val - 1; /* Delete rightmost 1-bit in val. */
4354 /* Return the size in bytes of the complete Thumb instruction whose
4355 first halfword is INST1. */
4358 thumb_insn_size (unsigned short inst1)
4360 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4367 thumb_advance_itstate (unsigned int itstate)
4369 /* Preserve IT[7:5], the first three bits of the condition. Shift
4370 the upcoming condition flags left by one bit. */
4371 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4373 /* If we have finished the IT block, clear the state. */
4374 if ((itstate & 0x0f) == 0)
4380 /* Find the next PC after the current instruction executes. In some
4381 cases we can not statically determine the answer (see the IT state
4382 handling in this function); in that case, a breakpoint may be
4383 inserted in addition to the returned PC, which will be used to set
4384 another breakpoint by our caller. */
4387 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4389 struct gdbarch *gdbarch = get_frame_arch (frame);
4390 struct address_space *aspace = get_frame_address_space (frame);
4391 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4392 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4393 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4394 unsigned short inst1;
4395 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4396 unsigned long offset;
4397 ULONGEST status, itstate;
4399 nextpc = MAKE_THUMB_ADDR (nextpc);
4400 pc_val = MAKE_THUMB_ADDR (pc_val);
4402 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4404 /* Thumb-2 conditional execution support. There are eight bits in
4405 the CPSR which describe conditional execution state. Once
4406 reconstructed (they're in a funny order), the low five bits
4407 describe the low bit of the condition for each instruction and
4408 how many instructions remain. The high three bits describe the
4409 base condition. One of the low four bits will be set if an IT
4410 block is active. These bits read as zero on earlier
4412 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4413 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4415 /* If-Then handling. On GNU/Linux, where this routine is used, we
4416 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4417 can disable execution of the undefined instruction. So we might
4418 miss the breakpoint if we set it on a skipped conditional
4419 instruction. Because conditional instructions can change the
4420 flags, affecting the execution of further instructions, we may
4421 need to set two breakpoints. */
4423 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4425 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4427 /* An IT instruction. Because this instruction does not
4428 modify the flags, we can accurately predict the next
4429 executed instruction. */
4430 itstate = inst1 & 0x00ff;
4431 pc += thumb_insn_size (inst1);
4433 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4435 inst1 = read_memory_unsigned_integer (pc, 2,
4436 byte_order_for_code);
4437 pc += thumb_insn_size (inst1);
4438 itstate = thumb_advance_itstate (itstate);
4441 return MAKE_THUMB_ADDR (pc);
4443 else if (itstate != 0)
4445 /* We are in a conditional block. Check the condition. */
4446 if (! condition_true (itstate >> 4, status))
4448 /* Advance to the next executed instruction. */
4449 pc += thumb_insn_size (inst1);
4450 itstate = thumb_advance_itstate (itstate);
4452 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4454 inst1 = read_memory_unsigned_integer (pc, 2,
4455 byte_order_for_code);
4456 pc += thumb_insn_size (inst1);
4457 itstate = thumb_advance_itstate (itstate);
4460 return MAKE_THUMB_ADDR (pc);
4462 else if ((itstate & 0x0f) == 0x08)
4464 /* This is the last instruction of the conditional
4465 block, and it is executed. We can handle it normally
4466 because the following instruction is not conditional,
4467 and we must handle it normally because it is
4468 permitted to branch. Fall through. */
4474 /* There are conditional instructions after this one.
4475 If this instruction modifies the flags, then we can
4476 not predict what the next executed instruction will
4477 be. Fortunately, this instruction is architecturally
4478 forbidden to branch; we know it will fall through.
4479 Start by skipping past it. */
4480 pc += thumb_insn_size (inst1);
4481 itstate = thumb_advance_itstate (itstate);
4483 /* Set a breakpoint on the following instruction. */
4484 gdb_assert ((itstate & 0x0f) != 0);
4485 arm_insert_single_step_breakpoint (gdbarch, aspace,
4486 MAKE_THUMB_ADDR (pc));
4487 cond_negated = (itstate >> 4) & 1;
4489 /* Skip all following instructions with the same
4490 condition. If there is a later instruction in the IT
4491 block with the opposite condition, set the other
4492 breakpoint there. If not, then set a breakpoint on
4493 the instruction after the IT block. */
4496 inst1 = read_memory_unsigned_integer (pc, 2,
4497 byte_order_for_code);
4498 pc += thumb_insn_size (inst1);
4499 itstate = thumb_advance_itstate (itstate);
4501 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4503 return MAKE_THUMB_ADDR (pc);
4507 else if (itstate & 0x0f)
4509 /* We are in a conditional block. Check the condition. */
4510 int cond = itstate >> 4;
4512 if (! condition_true (cond, status))
4513 /* Advance to the next instruction. All the 32-bit
4514 instructions share a common prefix. */
4515 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4517 /* Otherwise, handle the instruction normally. */
4520 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4524 /* Fetch the saved PC from the stack. It's stored above
4525 all of the other registers. */
4526 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4527 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4528 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4530 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4532 unsigned long cond = bits (inst1, 8, 11);
4533 if (cond == 0x0f) /* 0x0f = SWI */
4535 struct gdbarch_tdep *tdep;
4536 tdep = gdbarch_tdep (gdbarch);
4538 if (tdep->syscall_next_pc != NULL)
4539 nextpc = tdep->syscall_next_pc (frame);
4542 else if (cond != 0x0f && condition_true (cond, status))
4543 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4545 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4547 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4549 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4551 unsigned short inst2;
4552 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4554 /* Default to the next instruction. */
4556 nextpc = MAKE_THUMB_ADDR (nextpc);
4558 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4560 /* Branches and miscellaneous control instructions. */
4562 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4565 int j1, j2, imm1, imm2;
4567 imm1 = sbits (inst1, 0, 10);
4568 imm2 = bits (inst2, 0, 10);
4569 j1 = bit (inst2, 13);
4570 j2 = bit (inst2, 11);
4572 offset = ((imm1 << 12) + (imm2 << 1));
4573 offset ^= ((!j2) << 22) | ((!j1) << 23);
4575 nextpc = pc_val + offset;
4576 /* For BLX make sure to clear the low bits. */
4577 if (bit (inst2, 12) == 0)
4578 nextpc = nextpc & 0xfffffffc;
4580 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4582 /* SUBS PC, LR, #imm8. */
4583 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4584 nextpc -= inst2 & 0x00ff;
4586 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4588 /* Conditional branch. */
4589 if (condition_true (bits (inst1, 6, 9), status))
4591 int sign, j1, j2, imm1, imm2;
4593 sign = sbits (inst1, 10, 10);
4594 imm1 = bits (inst1, 0, 5);
4595 imm2 = bits (inst2, 0, 10);
4596 j1 = bit (inst2, 13);
4597 j2 = bit (inst2, 11);
4599 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4600 offset += (imm1 << 12) + (imm2 << 1);
4602 nextpc = pc_val + offset;
4606 else if ((inst1 & 0xfe50) == 0xe810)
4608 /* Load multiple or RFE. */
4609 int rn, offset, load_pc = 1;
4611 rn = bits (inst1, 0, 3);
4612 if (bit (inst1, 7) && !bit (inst1, 8))
4615 if (!bit (inst2, 15))
4617 offset = bitcount (inst2) * 4 - 4;
4619 else if (!bit (inst1, 7) && bit (inst1, 8))
4622 if (!bit (inst2, 15))
4626 else if (bit (inst1, 7) && bit (inst1, 8))
4631 else if (!bit (inst1, 7) && !bit (inst1, 8))
4641 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4642 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4645 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4647 /* MOV PC or MOVS PC. */
4648 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4649 nextpc = MAKE_THUMB_ADDR (nextpc);
4651 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4655 int rn, load_pc = 1;
4657 rn = bits (inst1, 0, 3);
4658 base = get_frame_register_unsigned (frame, rn);
4659 if (rn == ARM_PC_REGNUM)
4661 base = (base + 4) & ~(CORE_ADDR) 0x3;
4663 base += bits (inst2, 0, 11);
4665 base -= bits (inst2, 0, 11);
4667 else if (bit (inst1, 7))
4668 base += bits (inst2, 0, 11);
4669 else if (bit (inst2, 11))
4671 if (bit (inst2, 10))
4674 base += bits (inst2, 0, 7);
4676 base -= bits (inst2, 0, 7);
4679 else if ((inst2 & 0x0fc0) == 0x0000)
4681 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4682 base += get_frame_register_unsigned (frame, rm) << shift;
4689 nextpc = get_frame_memory_unsigned (frame, base, 4);
4691 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4694 CORE_ADDR tbl_reg, table, offset, length;
4696 tbl_reg = bits (inst1, 0, 3);
4697 if (tbl_reg == 0x0f)
4698 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4700 table = get_frame_register_unsigned (frame, tbl_reg);
4702 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4703 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4704 nextpc = pc_val + length;
4706 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4709 CORE_ADDR tbl_reg, table, offset, length;
4711 tbl_reg = bits (inst1, 0, 3);
4712 if (tbl_reg == 0x0f)
4713 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4715 table = get_frame_register_unsigned (frame, tbl_reg);
4717 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4718 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4719 nextpc = pc_val + length;
4722 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4724 if (bits (inst1, 3, 6) == 0x0f)
4725 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4727 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4729 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4731 if (bits (inst1, 3, 6) == 0x0f)
4734 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4736 nextpc = MAKE_THUMB_ADDR (nextpc);
4738 else if ((inst1 & 0xf500) == 0xb100)
4741 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4742 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4744 if (bit (inst1, 11) && reg != 0)
4745 nextpc = pc_val + imm;
4746 else if (!bit (inst1, 11) && reg == 0)
4747 nextpc = pc_val + imm;
4752 /* Get the raw next address. PC is the current program counter, in
4753 FRAME, which is assumed to be executing in ARM mode.
4755 The value returned has the execution state of the next instruction
4756 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4757 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4761 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4763 struct gdbarch *gdbarch = get_frame_arch (frame);
4764 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4765 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4766 unsigned long pc_val;
4767 unsigned long this_instr;
4768 unsigned long status;
4771 pc_val = (unsigned long) pc;
4772 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4774 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4775 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4777 if (bits (this_instr, 28, 31) == INST_NV)
4778 switch (bits (this_instr, 24, 27))
4783 /* Branch with Link and change to Thumb. */
4784 nextpc = BranchDest (pc, this_instr);
4785 nextpc |= bit (this_instr, 24) << 1;
4786 nextpc = MAKE_THUMB_ADDR (nextpc);
4792 /* Coprocessor register transfer. */
4793 if (bits (this_instr, 12, 15) == 15)
4794 error (_("Invalid update to pc in instruction"));
4797 else if (condition_true (bits (this_instr, 28, 31), status))
4799 switch (bits (this_instr, 24, 27))
4802 case 0x1: /* data processing */
4806 unsigned long operand1, operand2, result = 0;
4810 if (bits (this_instr, 12, 15) != 15)
4813 if (bits (this_instr, 22, 25) == 0
4814 && bits (this_instr, 4, 7) == 9) /* multiply */
4815 error (_("Invalid update to pc in instruction"));
4817 /* BX <reg>, BLX <reg> */
4818 if (bits (this_instr, 4, 27) == 0x12fff1
4819 || bits (this_instr, 4, 27) == 0x12fff3)
4821 rn = bits (this_instr, 0, 3);
4822 nextpc = ((rn == ARM_PC_REGNUM)
4824 : get_frame_register_unsigned (frame, rn));
4829 /* Multiply into PC. */
4830 c = (status & FLAG_C) ? 1 : 0;
4831 rn = bits (this_instr, 16, 19);
4832 operand1 = ((rn == ARM_PC_REGNUM)
4834 : get_frame_register_unsigned (frame, rn));
4836 if (bit (this_instr, 25))
4838 unsigned long immval = bits (this_instr, 0, 7);
4839 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4840 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4843 else /* operand 2 is a shifted register. */
4844 operand2 = shifted_reg_val (frame, this_instr, c,
4847 switch (bits (this_instr, 21, 24))
4850 result = operand1 & operand2;
4854 result = operand1 ^ operand2;
4858 result = operand1 - operand2;
4862 result = operand2 - operand1;
4866 result = operand1 + operand2;
4870 result = operand1 + operand2 + c;
4874 result = operand1 - operand2 + c;
4878 result = operand2 - operand1 + c;
4884 case 0xb: /* tst, teq, cmp, cmn */
4885 result = (unsigned long) nextpc;
4889 result = operand1 | operand2;
4893 /* Always step into a function. */
4898 result = operand1 & ~operand2;
4906 /* In 26-bit APCS the bottom two bits of the result are
4907 ignored, and we always end up in ARM state. */
4909 nextpc = arm_addr_bits_remove (gdbarch, result);
4917 case 0x5: /* data transfer */
4920 if (bit (this_instr, 20))
4923 if (bits (this_instr, 12, 15) == 15)
4929 if (bit (this_instr, 22))
4930 error (_("Invalid update to pc in instruction"));
4932 /* byte write to PC */
4933 rn = bits (this_instr, 16, 19);
4934 base = ((rn == ARM_PC_REGNUM)
4936 : get_frame_register_unsigned (frame, rn));
4938 if (bit (this_instr, 24))
4941 int c = (status & FLAG_C) ? 1 : 0;
4942 unsigned long offset =
4943 (bit (this_instr, 25)
4944 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4945 : bits (this_instr, 0, 11));
4947 if (bit (this_instr, 23))
4953 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4960 case 0x9: /* block transfer */
4961 if (bit (this_instr, 20))
4964 if (bit (this_instr, 15))
4968 unsigned long rn_val
4969 = get_frame_register_unsigned (frame,
4970 bits (this_instr, 16, 19));
4972 if (bit (this_instr, 23))
4975 unsigned long reglist = bits (this_instr, 0, 14);
4976 offset = bitcount (reglist) * 4;
4977 if (bit (this_instr, 24)) /* pre */
4980 else if (bit (this_instr, 24))
4984 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4991 case 0xb: /* branch & link */
4992 case 0xa: /* branch */
4994 nextpc = BranchDest (pc, this_instr);
5000 case 0xe: /* coproc ops */
5004 struct gdbarch_tdep *tdep;
5005 tdep = gdbarch_tdep (gdbarch);
5007 if (tdep->syscall_next_pc != NULL)
5008 nextpc = tdep->syscall_next_pc (frame);
5014 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5022 /* Determine next PC after current instruction executes. Will call either
5023 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5024 loop is detected. */
5027 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5031 if (arm_frame_is_thumb (frame))
5032 nextpc = thumb_get_next_pc_raw (frame, pc);
5034 nextpc = arm_get_next_pc_raw (frame, pc);
5039 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5040 of the appropriate mode (as encoded in the PC value), even if this
5041 differs from what would be expected according to the symbol tables. */
5044 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5045 struct address_space *aspace,
5048 struct cleanup *old_chain
5049 = make_cleanup_restore_integer (&arm_override_mode);
5051 arm_override_mode = IS_THUMB_ADDR (pc);
5052 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5054 insert_single_step_breakpoint (gdbarch, aspace, pc);
5056 do_cleanups (old_chain);
5059 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5060 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5061 is found, attempt to step through it. A breakpoint is placed at the end of
5065 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5067 struct gdbarch *gdbarch = get_frame_arch (frame);
5068 struct address_space *aspace = get_frame_address_space (frame);
5069 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5070 CORE_ADDR pc = get_frame_pc (frame);
5071 CORE_ADDR breaks[2] = {-1, -1};
5073 unsigned short insn1, insn2;
5076 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5077 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5078 ULONGEST status, itstate;
5080 /* We currently do not support atomic sequences within an IT block. */
5081 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5082 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5086 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5087 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5089 if (thumb_insn_size (insn1) != 4)
5092 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5094 if (!((insn1 & 0xfff0) == 0xe850
5095 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5098 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5100 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5102 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5105 if (thumb_insn_size (insn1) != 4)
5107 /* Assume that there is at most one conditional branch in the
5108 atomic sequence. If a conditional branch is found, put a
5109 breakpoint in its destination address. */
5110 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5112 if (last_breakpoint > 0)
5113 return 0; /* More than one conditional branch found,
5114 fallback to the standard code. */
5116 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5120 /* We do not support atomic sequences that use any *other*
5121 instructions but conditional branches to change the PC.
5122 Fall back to standard code to avoid losing control of
5124 else if (thumb_instruction_changes_pc (insn1))
5129 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5132 /* Assume that there is at most one conditional branch in the
5133 atomic sequence. If a conditional branch is found, put a
5134 breakpoint in its destination address. */
5135 if ((insn1 & 0xf800) == 0xf000
5136 && (insn2 & 0xd000) == 0x8000
5137 && (insn1 & 0x0380) != 0x0380)
5139 int sign, j1, j2, imm1, imm2;
5140 unsigned int offset;
5142 sign = sbits (insn1, 10, 10);
5143 imm1 = bits (insn1, 0, 5);
5144 imm2 = bits (insn2, 0, 10);
5145 j1 = bit (insn2, 13);
5146 j2 = bit (insn2, 11);
5148 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5149 offset += (imm1 << 12) + (imm2 << 1);
5151 if (last_breakpoint > 0)
5152 return 0; /* More than one conditional branch found,
5153 fallback to the standard code. */
5155 breaks[1] = loc + offset;
5159 /* We do not support atomic sequences that use any *other*
5160 instructions but conditional branches to change the PC.
5161 Fall back to standard code to avoid losing control of
5163 else if (thumb2_instruction_changes_pc (insn1, insn2))
5166 /* If we find a strex{,b,h,d}, we're done. */
5167 if ((insn1 & 0xfff0) == 0xe840
5168 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5173 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5174 if (insn_count == atomic_sequence_length)
5177 /* Insert a breakpoint right after the end of the atomic sequence. */
5180 /* Check for duplicated breakpoints. Check also for a breakpoint
5181 placed (branch instruction's destination) anywhere in sequence. */
5183 && (breaks[1] == breaks[0]
5184 || (breaks[1] >= pc && breaks[1] < loc)))
5185 last_breakpoint = 0;
5187 /* Effectively inserts the breakpoints. */
5188 for (index = 0; index <= last_breakpoint; index++)
5189 arm_insert_single_step_breakpoint (gdbarch, aspace,
5190 MAKE_THUMB_ADDR (breaks[index]));
5196 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5198 struct gdbarch *gdbarch = get_frame_arch (frame);
5199 struct address_space *aspace = get_frame_address_space (frame);
5200 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5201 CORE_ADDR pc = get_frame_pc (frame);
5202 CORE_ADDR breaks[2] = {-1, -1};
5207 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5208 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5210 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5211 Note that we do not currently support conditionally executed atomic
5213 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5215 if ((insn & 0xff9000f0) != 0xe1900090)
5218 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5220 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5222 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5225 /* Assume that there is at most one conditional branch in the atomic
5226 sequence. If a conditional branch is found, put a breakpoint in
5227 its destination address. */
5228 if (bits (insn, 24, 27) == 0xa)
5230 if (last_breakpoint > 0)
5231 return 0; /* More than one conditional branch found, fallback
5232 to the standard single-step code. */
5234 breaks[1] = BranchDest (loc - 4, insn);
5238 /* We do not support atomic sequences that use any *other* instructions
5239 but conditional branches to change the PC. Fall back to standard
5240 code to avoid losing control of execution. */
5241 else if (arm_instruction_changes_pc (insn))
5244 /* If we find a strex{,b,h,d}, we're done. */
5245 if ((insn & 0xff9000f0) == 0xe1800090)
5249 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5250 if (insn_count == atomic_sequence_length)
5253 /* Insert a breakpoint right after the end of the atomic sequence. */
5256 /* Check for duplicated breakpoints. Check also for a breakpoint
5257 placed (branch instruction's destination) anywhere in sequence. */
5259 && (breaks[1] == breaks[0]
5260 || (breaks[1] >= pc && breaks[1] < loc)))
5261 last_breakpoint = 0;
5263 /* Effectively inserts the breakpoints. */
5264 for (index = 0; index <= last_breakpoint; index++)
5265 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5271 arm_deal_with_atomic_sequence (struct frame_info *frame)
5273 if (arm_frame_is_thumb (frame))
5274 return thumb_deal_with_atomic_sequence_raw (frame);
5276 return arm_deal_with_atomic_sequence_raw (frame);
5279 /* single_step() is called just before we want to resume the inferior,
5280 if we want to single-step it but there is no hardware or kernel
5281 single-step support. We find the target of the coming instruction
5282 and breakpoint it. */
5285 arm_software_single_step (struct frame_info *frame)
5287 struct gdbarch *gdbarch = get_frame_arch (frame);
5288 struct address_space *aspace = get_frame_address_space (frame);
5291 if (arm_deal_with_atomic_sequence (frame))
5294 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5295 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5300 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5301 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5302 NULL if an error occurs. BUF is freed. */
5305 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5306 int old_len, int new_len)
5309 int bytes_to_read = new_len - old_len;
5311 new_buf = xmalloc (new_len);
5312 memcpy (new_buf + bytes_to_read, buf, old_len);
5314 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5322 /* An IT block is at most the 2-byte IT instruction followed by
5323 four 4-byte instructions. The furthest back we must search to
5324 find an IT block that affects the current instruction is thus
5325 2 + 3 * 4 == 14 bytes. */
5326 #define MAX_IT_BLOCK_PREFIX 14
5328 /* Use a quick scan if there are more than this many bytes of
5330 #define IT_SCAN_THRESHOLD 32
5332 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5333 A breakpoint in an IT block may not be hit, depending on the
5336 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5340 CORE_ADDR boundary, func_start;
5342 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5343 int i, any, last_it, last_it_count;
5345 /* If we are using BKPT breakpoints, none of this is necessary. */
5346 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5349 /* ARM mode does not have this problem. */
5350 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5353 /* We are setting a breakpoint in Thumb code that could potentially
5354 contain an IT block. The first step is to find how much Thumb
5355 code there is; we do not need to read outside of known Thumb
5357 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5359 /* Thumb-2 code must have mapping symbols to have a chance. */
5362 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5364 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5365 && func_start > boundary)
5366 boundary = func_start;
5368 /* Search for a candidate IT instruction. We have to do some fancy
5369 footwork to distinguish a real IT instruction from the second
5370 half of a 32-bit instruction, but there is no need for that if
5371 there's no candidate. */
5372 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5374 /* No room for an IT instruction. */
5377 buf = xmalloc (buf_len);
5378 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5381 for (i = 0; i < buf_len; i += 2)
5383 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5384 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5396 /* OK, the code bytes before this instruction contain at least one
5397 halfword which resembles an IT instruction. We know that it's
5398 Thumb code, but there are still two possibilities. Either the
5399 halfword really is an IT instruction, or it is the second half of
5400 a 32-bit Thumb instruction. The only way we can tell is to
5401 scan forwards from a known instruction boundary. */
5402 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5406 /* There's a lot of code before this instruction. Start with an
5407 optimistic search; it's easy to recognize halfwords that can
5408 not be the start of a 32-bit instruction, and use that to
5409 lock on to the instruction boundaries. */
5410 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5413 buf_len = IT_SCAN_THRESHOLD;
5416 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5418 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5419 if (thumb_insn_size (inst1) == 2)
5426 /* At this point, if DEFINITE, BUF[I] is the first place we
5427 are sure that we know the instruction boundaries, and it is far
5428 enough from BPADDR that we could not miss an IT instruction
5429 affecting BPADDR. If ! DEFINITE, give up - start from a
5433 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5437 buf_len = bpaddr - boundary;
5443 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5446 buf_len = bpaddr - boundary;
5450 /* Scan forwards. Find the last IT instruction before BPADDR. */
5455 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5457 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5462 else if (inst1 & 0x0002)
5464 else if (inst1 & 0x0004)
5469 i += thumb_insn_size (inst1);
5475 /* There wasn't really an IT instruction after all. */
5478 if (last_it_count < 1)
5479 /* It was too far away. */
5482 /* This really is a trouble spot. Move the breakpoint to the IT
5484 return bpaddr - buf_len + last_it;
5487 /* ARM displaced stepping support.
5489 Generally ARM displaced stepping works as follows:
5491 1. When an instruction is to be single-stepped, it is first decoded by
5492 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5493 Depending on the type of instruction, it is then copied to a scratch
5494 location, possibly in a modified form. The copy_* set of functions
5495 performs such modification, as necessary. A breakpoint is placed after
5496 the modified instruction in the scratch space to return control to GDB.
5497 Note in particular that instructions which modify the PC will no longer
5498 do so after modification.
5500 2. The instruction is single-stepped, by setting the PC to the scratch
5501 location address, and resuming. Control returns to GDB when the
5504 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5505 function used for the current instruction. This function's job is to
5506 put the CPU/memory state back to what it would have been if the
5507 instruction had been executed unmodified in its original location. */
5509 /* NOP instruction (mov r0, r0). */
5510 #define ARM_NOP 0xe1a00000
5511 #define THUMB_NOP 0x4600
5513 /* Helper for register reads for displaced stepping. In particular, this
5514 returns the PC as it would be seen by the instruction at its original
5518 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5522 CORE_ADDR from = dsc->insn_addr;
5524 if (regno == ARM_PC_REGNUM)
5526 /* Compute pipeline offset:
5527 - When executing an ARM instruction, PC reads as the address of the
5528 current instruction plus 8.
5529 - When executing a Thumb instruction, PC reads as the address of the
5530 current instruction plus 4. */
5537 if (debug_displaced)
5538 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5539 (unsigned long) from);
5540 return (ULONGEST) from;
5544 regcache_cooked_read_unsigned (regs, regno, &ret);
5545 if (debug_displaced)
5546 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5547 regno, (unsigned long) ret);
5553 displaced_in_arm_mode (struct regcache *regs)
5556 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5558 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5560 return (ps & t_bit) == 0;
5563 /* Write to the PC as from a branch instruction. */
5566 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5570 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5571 architecture versions < 6. */
5572 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5573 val & ~(ULONGEST) 0x3);
5575 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5576 val & ~(ULONGEST) 0x1);
5579 /* Write to the PC as from a branch-exchange instruction. */
5582 bx_write_pc (struct regcache *regs, ULONGEST val)
5585 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5587 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5591 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5592 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5594 else if ((val & 2) == 0)
5596 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5597 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5601 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5602 mode, align dest to 4 bytes). */
5603 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5604 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5605 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5609 /* Write to the PC as if from a load instruction. */
5612 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5615 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5616 bx_write_pc (regs, val);
5618 branch_write_pc (regs, dsc, val);
5621 /* Write to the PC as if from an ALU instruction. */
5624 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5627 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5628 bx_write_pc (regs, val);
5630 branch_write_pc (regs, dsc, val);
5633 /* Helper for writing to registers for displaced stepping. Writing to the PC
5634 has a varying effects depending on the instruction which does the write:
5635 this is controlled by the WRITE_PC argument. */
5638 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5639 int regno, ULONGEST val, enum pc_write_style write_pc)
5641 if (regno == ARM_PC_REGNUM)
5643 if (debug_displaced)
5644 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5645 (unsigned long) val);
5648 case BRANCH_WRITE_PC:
5649 branch_write_pc (regs, dsc, val);
5653 bx_write_pc (regs, val);
5657 load_write_pc (regs, dsc, val);
5661 alu_write_pc (regs, dsc, val);
5664 case CANNOT_WRITE_PC:
5665 warning (_("Instruction wrote to PC in an unexpected way when "
5666 "single-stepping"));
5670 internal_error (__FILE__, __LINE__,
5671 _("Invalid argument to displaced_write_reg"));
5674 dsc->wrote_to_pc = 1;
5678 if (debug_displaced)
5679 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5680 regno, (unsigned long) val);
5681 regcache_cooked_write_unsigned (regs, regno, val);
5685 /* This function is used to concisely determine if an instruction INSN
5686 references PC. Register fields of interest in INSN should have the
5687 corresponding fields of BITMASK set to 0b1111. The function
5688 returns return 1 if any of these fields in INSN reference the PC
5689 (also 0b1111, r15), else it returns 0. */
5692 insn_references_pc (uint32_t insn, uint32_t bitmask)
5694 uint32_t lowbit = 1;
5696 while (bitmask != 0)
5700 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5706 mask = lowbit * 0xf;
5708 if ((insn & mask) == mask)
5717 /* The simplest copy function. Many instructions have the same effect no
5718 matter what address they are executed at: in those cases, use this. */
5721 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5722 const char *iname, struct displaced_step_closure *dsc)
5724 if (debug_displaced)
5725 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5726 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5729 dsc->modinsn[0] = insn;
5735 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5736 uint16_t insn2, const char *iname,
5737 struct displaced_step_closure *dsc)
5739 if (debug_displaced)
5740 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5741 "opcode/class '%s' unmodified\n", insn1, insn2,
5744 dsc->modinsn[0] = insn1;
5745 dsc->modinsn[1] = insn2;
5751 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5754 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5756 struct displaced_step_closure *dsc)
5758 if (debug_displaced)
5759 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5760 "opcode/class '%s' unmodified\n", insn,
5763 dsc->modinsn[0] = insn;
5768 /* Preload instructions with immediate offset. */
5771 cleanup_preload (struct gdbarch *gdbarch,
5772 struct regcache *regs, struct displaced_step_closure *dsc)
5774 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5775 if (!dsc->u.preload.immed)
5776 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5780 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5781 struct displaced_step_closure *dsc, unsigned int rn)
5784 /* Preload instructions:
5786 {pli/pld} [rn, #+/-imm]
5788 {pli/pld} [r0, #+/-imm]. */
5790 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5791 rn_val = displaced_read_reg (regs, dsc, rn);
5792 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5793 dsc->u.preload.immed = 1;
5795 dsc->cleanup = &cleanup_preload;
5799 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5800 struct displaced_step_closure *dsc)
5802 unsigned int rn = bits (insn, 16, 19);
5804 if (!insn_references_pc (insn, 0x000f0000ul))
5805 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5807 if (debug_displaced)
5808 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5809 (unsigned long) insn);
5811 dsc->modinsn[0] = insn & 0xfff0ffff;
5813 install_preload (gdbarch, regs, dsc, rn);
5819 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5820 struct regcache *regs, struct displaced_step_closure *dsc)
5822 unsigned int rn = bits (insn1, 0, 3);
5823 unsigned int u_bit = bit (insn1, 7);
5824 int imm12 = bits (insn2, 0, 11);
5827 if (rn != ARM_PC_REGNUM)
5828 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5830 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5831 PLD (literal) Encoding T1. */
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog,
5834 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5835 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5841 /* Rewrite instruction {pli/pld} PC imm12 into:
5842 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5846 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5848 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5849 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5851 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5853 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5854 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5855 dsc->u.preload.immed = 0;
5857 /* {pli/pld} [r0, r1] */
5858 dsc->modinsn[0] = insn1 & 0xfff0;
5859 dsc->modinsn[1] = 0xf001;
5862 dsc->cleanup = &cleanup_preload;
5866 /* Preload instructions with register offset. */
5869 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5870 struct displaced_step_closure *dsc, unsigned int rn,
5873 ULONGEST rn_val, rm_val;
5875 /* Preload register-offset instructions:
5877 {pli/pld} [rn, rm {, shift}]
5879 {pli/pld} [r0, r1 {, shift}]. */
5881 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5882 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5883 rn_val = displaced_read_reg (regs, dsc, rn);
5884 rm_val = displaced_read_reg (regs, dsc, rm);
5885 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5886 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5887 dsc->u.preload.immed = 0;
5889 dsc->cleanup = &cleanup_preload;
5893 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5894 struct regcache *regs,
5895 struct displaced_step_closure *dsc)
5897 unsigned int rn = bits (insn, 16, 19);
5898 unsigned int rm = bits (insn, 0, 3);
5901 if (!insn_references_pc (insn, 0x000f000ful))
5902 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5904 if (debug_displaced)
5905 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5906 (unsigned long) insn);
5908 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5910 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5914 /* Copy/cleanup coprocessor load and store instructions. */
5917 cleanup_copro_load_store (struct gdbarch *gdbarch,
5918 struct regcache *regs,
5919 struct displaced_step_closure *dsc)
5921 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5923 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5925 if (dsc->u.ldst.writeback)
5926 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5930 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5931 struct displaced_step_closure *dsc,
5932 int writeback, unsigned int rn)
5936 /* Coprocessor load/store instructions:
5938 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5940 {stc/stc2} [r0, #+/-imm].
5942 ldc/ldc2 are handled identically. */
5944 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5945 rn_val = displaced_read_reg (regs, dsc, rn);
5946 /* PC should be 4-byte aligned. */
5947 rn_val = rn_val & 0xfffffffc;
5948 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5950 dsc->u.ldst.writeback = writeback;
5951 dsc->u.ldst.rn = rn;
5953 dsc->cleanup = &cleanup_copro_load_store;
5957 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5958 struct regcache *regs,
5959 struct displaced_step_closure *dsc)
5961 unsigned int rn = bits (insn, 16, 19);
5963 if (!insn_references_pc (insn, 0x000f0000ul))
5964 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5966 if (debug_displaced)
5967 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5968 "load/store insn %.8lx\n", (unsigned long) insn);
5970 dsc->modinsn[0] = insn & 0xfff0ffff;
5972 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5978 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5979 uint16_t insn2, struct regcache *regs,
5980 struct displaced_step_closure *dsc)
5982 unsigned int rn = bits (insn1, 0, 3);
5984 if (rn != ARM_PC_REGNUM)
5985 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5986 "copro load/store", dsc);
5988 if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5990 "load/store insn %.4x%.4x\n", insn1, insn2);
5992 dsc->modinsn[0] = insn1 & 0xfff0;
5993 dsc->modinsn[1] = insn2;
5996 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5997 doesn't support writeback, so pass 0. */
5998 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6003 /* Clean up branch instructions (actually perform the branch, by setting
6007 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6008 struct displaced_step_closure *dsc)
6010 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6011 int branch_taken = condition_true (dsc->u.branch.cond, status);
6012 enum pc_write_style write_pc = dsc->u.branch.exchange
6013 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6018 if (dsc->u.branch.link)
6020 /* The value of LR should be the next insn of current one. In order
6021 not to confuse logic hanlding later insn `bx lr', if current insn mode
6022 is Thumb, the bit 0 of LR value should be set to 1. */
6023 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6026 next_insn_addr |= 0x1;
6028 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6032 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6035 /* Copy B/BL/BLX instructions with immediate destinations. */
6038 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6039 struct displaced_step_closure *dsc,
6040 unsigned int cond, int exchange, int link, long offset)
6042 /* Implement "BL<cond> <label>" as:
6044 Preparation: cond <- instruction condition
6045 Insn: mov r0, r0 (nop)
6046 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6048 B<cond> similar, but don't set r14 in cleanup. */
6050 dsc->u.branch.cond = cond;
6051 dsc->u.branch.link = link;
6052 dsc->u.branch.exchange = exchange;
6054 dsc->u.branch.dest = dsc->insn_addr;
6055 if (link && exchange)
6056 /* For BLX, offset is computed from the Align (PC, 4). */
6057 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6060 dsc->u.branch.dest += 4 + offset;
6062 dsc->u.branch.dest += 8 + offset;
6064 dsc->cleanup = &cleanup_branch;
6067 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6068 struct regcache *regs, struct displaced_step_closure *dsc)
6070 unsigned int cond = bits (insn, 28, 31);
6071 int exchange = (cond == 0xf);
6072 int link = exchange || bit (insn, 24);
6075 if (debug_displaced)
6076 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6077 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6078 (unsigned long) insn);
6080 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6081 then arrange the switch into Thumb mode. */
6082 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6084 offset = bits (insn, 0, 23) << 2;
6086 if (bit (offset, 25))
6087 offset = offset | ~0x3ffffff;
6089 dsc->modinsn[0] = ARM_NOP;
6091 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6096 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6097 uint16_t insn2, struct regcache *regs,
6098 struct displaced_step_closure *dsc)
6100 int link = bit (insn2, 14);
6101 int exchange = link && !bit (insn2, 12);
6104 int j1 = bit (insn2, 13);
6105 int j2 = bit (insn2, 11);
6106 int s = sbits (insn1, 10, 10);
6107 int i1 = !(j1 ^ bit (insn1, 10));
6108 int i2 = !(j2 ^ bit (insn1, 10));
6110 if (!link && !exchange) /* B */
6112 offset = (bits (insn2, 0, 10) << 1);
6113 if (bit (insn2, 12)) /* Encoding T4 */
6115 offset |= (bits (insn1, 0, 9) << 12)
6121 else /* Encoding T3 */
6123 offset |= (bits (insn1, 0, 5) << 12)
6127 cond = bits (insn1, 6, 9);
6132 offset = (bits (insn1, 0, 9) << 12);
6133 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6134 offset |= exchange ?
6135 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6138 if (debug_displaced)
6139 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6140 "%.4x %.4x with offset %.8lx\n",
6141 link ? (exchange) ? "blx" : "bl" : "b",
6142 insn1, insn2, offset);
6144 dsc->modinsn[0] = THUMB_NOP;
6146 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6150 /* Copy B Thumb instructions. */
6152 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6153 struct displaced_step_closure *dsc)
6155 unsigned int cond = 0;
6157 unsigned short bit_12_15 = bits (insn, 12, 15);
6158 CORE_ADDR from = dsc->insn_addr;
6160 if (bit_12_15 == 0xd)
6162 /* offset = SignExtend (imm8:0, 32) */
6163 offset = sbits ((insn << 1), 0, 8);
6164 cond = bits (insn, 8, 11);
6166 else if (bit_12_15 == 0xe) /* Encoding T2 */
6168 offset = sbits ((insn << 1), 0, 11);
6172 if (debug_displaced)
6173 fprintf_unfiltered (gdb_stdlog,
6174 "displaced: copying b immediate insn %.4x "
6175 "with offset %d\n", insn, offset);
6177 dsc->u.branch.cond = cond;
6178 dsc->u.branch.link = 0;
6179 dsc->u.branch.exchange = 0;
6180 dsc->u.branch.dest = from + 4 + offset;
6182 dsc->modinsn[0] = THUMB_NOP;
6184 dsc->cleanup = &cleanup_branch;
6189 /* Copy BX/BLX with register-specified destinations. */
6192 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6193 struct displaced_step_closure *dsc, int link,
6194 unsigned int cond, unsigned int rm)
6196 /* Implement {BX,BLX}<cond> <reg>" as:
6198 Preparation: cond <- instruction condition
6199 Insn: mov r0, r0 (nop)
6200 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6202 Don't set r14 in cleanup for BX. */
6204 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6206 dsc->u.branch.cond = cond;
6207 dsc->u.branch.link = link;
6209 dsc->u.branch.exchange = 1;
6211 dsc->cleanup = &cleanup_branch;
6215 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6216 struct regcache *regs, struct displaced_step_closure *dsc)
6218 unsigned int cond = bits (insn, 28, 31);
6221 int link = bit (insn, 5);
6222 unsigned int rm = bits (insn, 0, 3);
6224 if (debug_displaced)
6225 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6226 (unsigned long) insn);
6228 dsc->modinsn[0] = ARM_NOP;
6230 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6235 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6236 struct regcache *regs,
6237 struct displaced_step_closure *dsc)
6239 int link = bit (insn, 7);
6240 unsigned int rm = bits (insn, 3, 6);
6242 if (debug_displaced)
6243 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6244 (unsigned short) insn);
6246 dsc->modinsn[0] = THUMB_NOP;
6248 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6254 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6257 cleanup_alu_imm (struct gdbarch *gdbarch,
6258 struct regcache *regs, struct displaced_step_closure *dsc)
6260 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6261 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6262 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6263 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6267 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6268 struct displaced_step_closure *dsc)
6270 unsigned int rn = bits (insn, 16, 19);
6271 unsigned int rd = bits (insn, 12, 15);
6272 unsigned int op = bits (insn, 21, 24);
6273 int is_mov = (op == 0xd);
6274 ULONGEST rd_val, rn_val;
6276 if (!insn_references_pc (insn, 0x000ff000ul))
6277 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6279 if (debug_displaced)
6280 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6281 "%.8lx\n", is_mov ? "move" : "ALU",
6282 (unsigned long) insn);
6284 /* Instruction is of form:
6286 <op><cond> rd, [rn,] #imm
6290 Preparation: tmp1, tmp2 <- r0, r1;
6292 Insn: <op><cond> r0, r1, #imm
6293 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6296 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6297 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6298 rn_val = displaced_read_reg (regs, dsc, rn);
6299 rd_val = displaced_read_reg (regs, dsc, rd);
6300 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6301 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6305 dsc->modinsn[0] = insn & 0xfff00fff;
6307 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6309 dsc->cleanup = &cleanup_alu_imm;
6315 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6316 uint16_t insn2, struct regcache *regs,
6317 struct displaced_step_closure *dsc)
6319 unsigned int op = bits (insn1, 5, 8);
6320 unsigned int rn, rm, rd;
6321 ULONGEST rd_val, rn_val;
6323 rn = bits (insn1, 0, 3); /* Rn */
6324 rm = bits (insn2, 0, 3); /* Rm */
6325 rd = bits (insn2, 8, 11); /* Rd */
6327 /* This routine is only called for instruction MOV. */
6328 gdb_assert (op == 0x2 && rn == 0xf);
6330 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6331 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6333 if (debug_displaced)
6334 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6335 "ALU", insn1, insn2);
6337 /* Instruction is of form:
6339 <op><cond> rd, [rn,] #imm
6343 Preparation: tmp1, tmp2 <- r0, r1;
6345 Insn: <op><cond> r0, r1, #imm
6346 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6349 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6350 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6351 rn_val = displaced_read_reg (regs, dsc, rn);
6352 rd_val = displaced_read_reg (regs, dsc, rd);
6353 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6354 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6357 dsc->modinsn[0] = insn1;
6358 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6361 dsc->cleanup = &cleanup_alu_imm;
6366 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6369 cleanup_alu_reg (struct gdbarch *gdbarch,
6370 struct regcache *regs, struct displaced_step_closure *dsc)
6375 rd_val = displaced_read_reg (regs, dsc, 0);
6377 for (i = 0; i < 3; i++)
6378 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6380 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6384 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6385 struct displaced_step_closure *dsc,
6386 unsigned int rd, unsigned int rn, unsigned int rm)
6388 ULONGEST rd_val, rn_val, rm_val;
6390 /* Instruction is of form:
6392 <op><cond> rd, [rn,] rm [, <shift>]
6396 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6397 r0, r1, r2 <- rd, rn, rm
6398 Insn: <op><cond> r0, r1, r2 [, <shift>]
6399 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6402 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6403 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6404 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6405 rd_val = displaced_read_reg (regs, dsc, rd);
6406 rn_val = displaced_read_reg (regs, dsc, rn);
6407 rm_val = displaced_read_reg (regs, dsc, rm);
6408 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6409 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6410 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6413 dsc->cleanup = &cleanup_alu_reg;
6417 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6418 struct displaced_step_closure *dsc)
6420 unsigned int op = bits (insn, 21, 24);
6421 int is_mov = (op == 0xd);
6423 if (!insn_references_pc (insn, 0x000ff00ful))
6424 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6426 if (debug_displaced)
6427 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6428 is_mov ? "move" : "ALU", (unsigned long) insn);
6431 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6433 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6435 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6441 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6442 struct regcache *regs,
6443 struct displaced_step_closure *dsc)
6445 unsigned rn, rm, rd;
6447 rd = bits (insn, 3, 6);
6448 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6451 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6452 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6454 if (debug_displaced)
6455 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6456 "ALU", (unsigned short) insn);
6458 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6460 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6465 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6468 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6469 struct regcache *regs,
6470 struct displaced_step_closure *dsc)
6472 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6475 for (i = 0; i < 4; i++)
6476 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6478 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6482 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6483 struct displaced_step_closure *dsc,
6484 unsigned int rd, unsigned int rn, unsigned int rm,
6488 ULONGEST rd_val, rn_val, rm_val, rs_val;
6490 /* Instruction is of form:
6492 <op><cond> rd, [rn,] rm, <shift> rs
6496 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6497 r0, r1, r2, r3 <- rd, rn, rm, rs
6498 Insn: <op><cond> r0, r1, r2, <shift> r3
6500 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6504 for (i = 0; i < 4; i++)
6505 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6507 rd_val = displaced_read_reg (regs, dsc, rd);
6508 rn_val = displaced_read_reg (regs, dsc, rn);
6509 rm_val = displaced_read_reg (regs, dsc, rm);
6510 rs_val = displaced_read_reg (regs, dsc, rs);
6511 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6512 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6514 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6516 dsc->cleanup = &cleanup_alu_shifted_reg;
6520 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6521 struct regcache *regs,
6522 struct displaced_step_closure *dsc)
6524 unsigned int op = bits (insn, 21, 24);
6525 int is_mov = (op == 0xd);
6526 unsigned int rd, rn, rm, rs;
6528 if (!insn_references_pc (insn, 0x000fff0ful))
6529 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6531 if (debug_displaced)
6532 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6533 "%.8lx\n", is_mov ? "move" : "ALU",
6534 (unsigned long) insn);
6536 rn = bits (insn, 16, 19);
6537 rm = bits (insn, 0, 3);
6538 rs = bits (insn, 8, 11);
6539 rd = bits (insn, 12, 15);
6542 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6544 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6546 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6551 /* Clean up load instructions. */
6554 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6555 struct displaced_step_closure *dsc)
6557 ULONGEST rt_val, rt_val2 = 0, rn_val;
6559 rt_val = displaced_read_reg (regs, dsc, 0);
6560 if (dsc->u.ldst.xfersize == 8)
6561 rt_val2 = displaced_read_reg (regs, dsc, 1);
6562 rn_val = displaced_read_reg (regs, dsc, 2);
6564 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6565 if (dsc->u.ldst.xfersize > 4)
6566 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6567 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6568 if (!dsc->u.ldst.immed)
6569 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6571 /* Handle register writeback. */
6572 if (dsc->u.ldst.writeback)
6573 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6574 /* Put result in right place. */
6575 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6576 if (dsc->u.ldst.xfersize == 8)
6577 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6580 /* Clean up store instructions. */
6583 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6584 struct displaced_step_closure *dsc)
6586 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6588 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6589 if (dsc->u.ldst.xfersize > 4)
6590 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6591 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6592 if (!dsc->u.ldst.immed)
6593 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6594 if (!dsc->u.ldst.restore_r4)
6595 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6598 if (dsc->u.ldst.writeback)
6599 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6602 /* Copy "extra" load/store instructions. These are halfword/doubleword
6603 transfers, which have a different encoding to byte/word transfers. */
6606 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6607 struct regcache *regs, struct displaced_step_closure *dsc)
6609 unsigned int op1 = bits (insn, 20, 24);
6610 unsigned int op2 = bits (insn, 5, 6);
6611 unsigned int rt = bits (insn, 12, 15);
6612 unsigned int rn = bits (insn, 16, 19);
6613 unsigned int rm = bits (insn, 0, 3);
6614 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6615 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6616 int immed = (op1 & 0x4) != 0;
6618 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6620 if (!insn_references_pc (insn, 0x000ff00ful))
6621 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6623 if (debug_displaced)
6624 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6625 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6626 (unsigned long) insn);
6628 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6631 internal_error (__FILE__, __LINE__,
6632 _("copy_extra_ld_st: instruction decode error"));
6634 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6635 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6636 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6638 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6640 rt_val = displaced_read_reg (regs, dsc, rt);
6641 if (bytesize[opcode] == 8)
6642 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6643 rn_val = displaced_read_reg (regs, dsc, rn);
6645 rm_val = displaced_read_reg (regs, dsc, rm);
6647 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6648 if (bytesize[opcode] == 8)
6649 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6650 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6652 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6655 dsc->u.ldst.xfersize = bytesize[opcode];
6656 dsc->u.ldst.rn = rn;
6657 dsc->u.ldst.immed = immed;
6658 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6659 dsc->u.ldst.restore_r4 = 0;
6662 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6664 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6665 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6667 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6669 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6670 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6672 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6677 /* Copy byte/half word/word loads and stores. */
6680 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6681 struct displaced_step_closure *dsc, int load,
6682 int immed, int writeback, int size, int usermode,
6683 int rt, int rm, int rn)
6685 ULONGEST rt_val, rn_val, rm_val = 0;
6687 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6688 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6690 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6692 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6694 rt_val = displaced_read_reg (regs, dsc, rt);
6695 rn_val = displaced_read_reg (regs, dsc, rn);
6697 rm_val = displaced_read_reg (regs, dsc, rm);
6699 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6700 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6702 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6704 dsc->u.ldst.xfersize = size;
6705 dsc->u.ldst.rn = rn;
6706 dsc->u.ldst.immed = immed;
6707 dsc->u.ldst.writeback = writeback;
6709 /* To write PC we can do:
6711 Before this sequence of instructions:
6712 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6713 r2 is the Rn value got from dispalced_read_reg.
6715 Insn1: push {pc} Write address of STR instruction + offset on stack
6716 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6717 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6718 = addr(Insn1) + offset - addr(Insn3) - 8
6720 Insn4: add r4, r4, #8 r4 = offset - 8
6721 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6723 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6725 Otherwise we don't know what value to write for PC, since the offset is
6726 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6727 of this can be found in Section "Saving from r15" in
6728 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6730 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6735 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6736 uint16_t insn2, struct regcache *regs,
6737 struct displaced_step_closure *dsc, int size)
6739 unsigned int u_bit = bit (insn1, 7);
6740 unsigned int rt = bits (insn2, 12, 15);
6741 int imm12 = bits (insn2, 0, 11);
6744 if (debug_displaced)
6745 fprintf_unfiltered (gdb_stdlog,
6746 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6747 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6753 /* Rewrite instruction LDR Rt imm12 into:
6755 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6759 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6762 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6763 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6764 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6766 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6768 pc_val = pc_val & 0xfffffffc;
6770 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6771 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6775 dsc->u.ldst.xfersize = size;
6776 dsc->u.ldst.immed = 0;
6777 dsc->u.ldst.writeback = 0;
6778 dsc->u.ldst.restore_r4 = 0;
6780 /* LDR R0, R2, R3 */
6781 dsc->modinsn[0] = 0xf852;
6782 dsc->modinsn[1] = 0x3;
6785 dsc->cleanup = &cleanup_load;
6791 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6792 uint16_t insn2, struct regcache *regs,
6793 struct displaced_step_closure *dsc,
6794 int writeback, int immed)
6796 unsigned int rt = bits (insn2, 12, 15);
6797 unsigned int rn = bits (insn1, 0, 3);
6798 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6799 /* In LDR (register), there is also a register Rm, which is not allowed to
6800 be PC, so we don't have to check it. */
6802 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6803 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6806 if (debug_displaced)
6807 fprintf_unfiltered (gdb_stdlog,
6808 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6809 rt, rn, insn1, insn2);
6811 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6814 dsc->u.ldst.restore_r4 = 0;
6817 /* ldr[b]<cond> rt, [rn, #imm], etc.
6819 ldr[b]<cond> r0, [r2, #imm]. */
6821 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6822 dsc->modinsn[1] = insn2 & 0x0fff;
6825 /* ldr[b]<cond> rt, [rn, rm], etc.
6827 ldr[b]<cond> r0, [r2, r3]. */
6829 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6830 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6840 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6841 struct regcache *regs,
6842 struct displaced_step_closure *dsc,
6843 int load, int size, int usermode)
6845 int immed = !bit (insn, 25);
6846 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6847 unsigned int rt = bits (insn, 12, 15);
6848 unsigned int rn = bits (insn, 16, 19);
6849 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6851 if (!insn_references_pc (insn, 0x000ff00ful))
6852 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6854 if (debug_displaced)
6855 fprintf_unfiltered (gdb_stdlog,
6856 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6857 load ? (size == 1 ? "ldrb" : "ldr")
6858 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6860 (unsigned long) insn);
6862 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6863 usermode, rt, rm, rn);
6865 if (load || rt != ARM_PC_REGNUM)
6867 dsc->u.ldst.restore_r4 = 0;
6870 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6872 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6873 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6875 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6877 {ldr,str}[b]<cond> r0, [r2, r3]. */
6878 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6882 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6883 dsc->u.ldst.restore_r4 = 1;
6884 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6885 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6886 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6887 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6888 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6892 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6894 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6899 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6904 /* Cleanup LDM instructions with fully-populated register list. This is an
6905 unfortunate corner case: it's impossible to implement correctly by modifying
6906 the instruction. The issue is as follows: we have an instruction,
6910 which we must rewrite to avoid loading PC. A possible solution would be to
6911 do the load in two halves, something like (with suitable cleanup
6915 ldm[id][ab] r8!, {r0-r7}
6917 ldm[id][ab] r8, {r7-r14}
6920 but at present there's no suitable place for <temp>, since the scratch space
6921 is overwritten before the cleanup routine is called. For now, we simply
6922 emulate the instruction. */
6925 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6926 struct displaced_step_closure *dsc)
6928 int inc = dsc->u.block.increment;
6929 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6930 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6931 uint32_t regmask = dsc->u.block.regmask;
6932 int regno = inc ? 0 : 15;
6933 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6934 int exception_return = dsc->u.block.load && dsc->u.block.user
6935 && (regmask & 0x8000) != 0;
6936 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6937 int do_transfer = condition_true (dsc->u.block.cond, status);
6938 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6943 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6944 sensible we can do here. Complain loudly. */
6945 if (exception_return)
6946 error (_("Cannot single-step exception return"));
6948 /* We don't handle any stores here for now. */
6949 gdb_assert (dsc->u.block.load != 0);
6951 if (debug_displaced)
6952 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6953 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6954 dsc->u.block.increment ? "inc" : "dec",
6955 dsc->u.block.before ? "before" : "after");
6962 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6965 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6968 xfer_addr += bump_before;
6970 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6971 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6973 xfer_addr += bump_after;
6975 regmask &= ~(1 << regno);
6978 if (dsc->u.block.writeback)
6979 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6983 /* Clean up an STM which included the PC in the register list. */
6986 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6987 struct displaced_step_closure *dsc)
6989 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6990 int store_executed = condition_true (dsc->u.block.cond, status);
6991 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6992 CORE_ADDR stm_insn_addr;
6995 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6997 /* If condition code fails, there's nothing else to do. */
6998 if (!store_executed)
7001 if (dsc->u.block.increment)
7003 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7005 if (dsc->u.block.before)
7010 pc_stored_at = dsc->u.block.xfer_addr;
7012 if (dsc->u.block.before)
7016 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7017 stm_insn_addr = dsc->scratch_base;
7018 offset = pc_val - stm_insn_addr;
7020 if (debug_displaced)
7021 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7022 "STM instruction\n", offset);
7024 /* Rewrite the stored PC to the proper value for the non-displaced original
7026 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7027 dsc->insn_addr + offset);
7030 /* Clean up an LDM which includes the PC in the register list. We clumped all
7031 the registers in the transferred list into a contiguous range r0...rX (to
7032 avoid loading PC directly and losing control of the debugged program), so we
7033 must undo that here. */
7036 cleanup_block_load_pc (struct gdbarch *gdbarch,
7037 struct regcache *regs,
7038 struct displaced_step_closure *dsc)
7040 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7041 int load_executed = condition_true (dsc->u.block.cond, status);
7042 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7043 unsigned int regs_loaded = bitcount (mask);
7044 unsigned int num_to_shuffle = regs_loaded, clobbered;
7046 /* The method employed here will fail if the register list is fully populated
7047 (we need to avoid loading PC directly). */
7048 gdb_assert (num_to_shuffle < 16);
7053 clobbered = (1 << num_to_shuffle) - 1;
7055 while (num_to_shuffle > 0)
7057 if ((mask & (1 << write_reg)) != 0)
7059 unsigned int read_reg = num_to_shuffle - 1;
7061 if (read_reg != write_reg)
7063 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7064 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7065 if (debug_displaced)
7066 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7067 "loaded register r%d to r%d\n"), read_reg,
7070 else if (debug_displaced)
7071 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7072 "r%d already in the right place\n"),
7075 clobbered &= ~(1 << write_reg);
7083 /* Restore any registers we scribbled over. */
7084 for (write_reg = 0; clobbered != 0; write_reg++)
7086 if ((clobbered & (1 << write_reg)) != 0)
7088 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7090 if (debug_displaced)
7091 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7092 "clobbered register r%d\n"), write_reg);
7093 clobbered &= ~(1 << write_reg);
7097 /* Perform register writeback manually. */
7098 if (dsc->u.block.writeback)
7100 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7102 if (dsc->u.block.increment)
7103 new_rn_val += regs_loaded * 4;
7105 new_rn_val -= regs_loaded * 4;
7107 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7112 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7113 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7116 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7117 struct regcache *regs,
7118 struct displaced_step_closure *dsc)
7120 int load = bit (insn, 20);
7121 int user = bit (insn, 22);
7122 int increment = bit (insn, 23);
7123 int before = bit (insn, 24);
7124 int writeback = bit (insn, 21);
7125 int rn = bits (insn, 16, 19);
7127 /* Block transfers which don't mention PC can be run directly
7129 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7130 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7132 if (rn == ARM_PC_REGNUM)
7134 warning (_("displaced: Unpredictable LDM or STM with "
7135 "base register r15"));
7136 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7141 "%.8lx\n", (unsigned long) insn);
7143 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7144 dsc->u.block.rn = rn;
7146 dsc->u.block.load = load;
7147 dsc->u.block.user = user;
7148 dsc->u.block.increment = increment;
7149 dsc->u.block.before = before;
7150 dsc->u.block.writeback = writeback;
7151 dsc->u.block.cond = bits (insn, 28, 31);
7153 dsc->u.block.regmask = insn & 0xffff;
7157 if ((insn & 0xffff) == 0xffff)
7159 /* LDM with a fully-populated register list. This case is
7160 particularly tricky. Implement for now by fully emulating the
7161 instruction (which might not behave perfectly in all cases, but
7162 these instructions should be rare enough for that not to matter
7164 dsc->modinsn[0] = ARM_NOP;
7166 dsc->cleanup = &cleanup_block_load_all;
7170 /* LDM of a list of registers which includes PC. Implement by
7171 rewriting the list of registers to be transferred into a
7172 contiguous chunk r0...rX before doing the transfer, then shuffling
7173 registers into the correct places in the cleanup routine. */
7174 unsigned int regmask = insn & 0xffff;
7175 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7176 unsigned int to = 0, from = 0, i, new_rn;
7178 for (i = 0; i < num_in_list; i++)
7179 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7181 /* Writeback makes things complicated. We need to avoid clobbering
7182 the base register with one of the registers in our modified
7183 register list, but just using a different register can't work in
7186 ldm r14!, {r0-r13,pc}
7188 which would need to be rewritten as:
7192 but that can't work, because there's no free register for N.
7194 Solve this by turning off the writeback bit, and emulating
7195 writeback manually in the cleanup routine. */
7200 new_regmask = (1 << num_in_list) - 1;
7202 if (debug_displaced)
7203 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7204 "{..., pc}: original reg list %.4x, modified "
7205 "list %.4x\n"), rn, writeback ? "!" : "",
7206 (int) insn & 0xffff, new_regmask);
7208 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7210 dsc->cleanup = &cleanup_block_load_pc;
7215 /* STM of a list of registers which includes PC. Run the instruction
7216 as-is, but out of line: this will store the wrong value for the PC,
7217 so we must manually fix up the memory in the cleanup routine.
7218 Doing things this way has the advantage that we can auto-detect
7219 the offset of the PC write (which is architecture-dependent) in
7220 the cleanup routine. */
7221 dsc->modinsn[0] = insn;
7223 dsc->cleanup = &cleanup_block_store_pc;
7230 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7231 struct regcache *regs,
7232 struct displaced_step_closure *dsc)
7234 int rn = bits (insn1, 0, 3);
7235 int load = bit (insn1, 4);
7236 int writeback = bit (insn1, 5);
7238 /* Block transfers which don't mention PC can be run directly
7240 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7241 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7243 if (rn == ARM_PC_REGNUM)
7245 warning (_("displaced: Unpredictable LDM or STM with "
7246 "base register r15"));
7247 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7248 "unpredictable ldm/stm", dsc);
7251 if (debug_displaced)
7252 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7253 "%.4x%.4x\n", insn1, insn2);
7255 /* Clear bit 13, since it should be always zero. */
7256 dsc->u.block.regmask = (insn2 & 0xdfff);
7257 dsc->u.block.rn = rn;
7259 dsc->u.block.load = load;
7260 dsc->u.block.user = 0;
7261 dsc->u.block.increment = bit (insn1, 7);
7262 dsc->u.block.before = bit (insn1, 8);
7263 dsc->u.block.writeback = writeback;
7264 dsc->u.block.cond = INST_AL;
7265 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7269 if (dsc->u.block.regmask == 0xffff)
7271 /* This branch is impossible to happen. */
7276 unsigned int regmask = dsc->u.block.regmask;
7277 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7278 unsigned int to = 0, from = 0, i, new_rn;
7280 for (i = 0; i < num_in_list; i++)
7281 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7286 new_regmask = (1 << num_in_list) - 1;
7288 if (debug_displaced)
7289 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7290 "{..., pc}: original reg list %.4x, modified "
7291 "list %.4x\n"), rn, writeback ? "!" : "",
7292 (int) dsc->u.block.regmask, new_regmask);
7294 dsc->modinsn[0] = insn1;
7295 dsc->modinsn[1] = (new_regmask & 0xffff);
7298 dsc->cleanup = &cleanup_block_load_pc;
7303 dsc->modinsn[0] = insn1;
7304 dsc->modinsn[1] = insn2;
7306 dsc->cleanup = &cleanup_block_store_pc;
7311 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7312 for Linux, where some SVC instructions must be treated specially. */
7315 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7316 struct displaced_step_closure *dsc)
7318 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7320 if (debug_displaced)
7321 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7322 "%.8lx\n", (unsigned long) resume_addr);
7324 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7328 /* Common copy routine for svc instruciton. */
7331 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7332 struct displaced_step_closure *dsc)
7334 /* Preparation: none.
7335 Insn: unmodified svc.
7336 Cleanup: pc <- insn_addr + insn_size. */
7338 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7340 dsc->wrote_to_pc = 1;
7342 /* Allow OS-specific code to override SVC handling. */
7343 if (dsc->u.svc.copy_svc_os)
7344 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7347 dsc->cleanup = &cleanup_svc;
7353 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7354 struct regcache *regs, struct displaced_step_closure *dsc)
7357 if (debug_displaced)
7358 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7359 (unsigned long) insn);
7361 dsc->modinsn[0] = insn;
7363 return install_svc (gdbarch, regs, dsc);
7367 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7368 struct regcache *regs, struct displaced_step_closure *dsc)
7371 if (debug_displaced)
7372 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7375 dsc->modinsn[0] = insn;
7377 return install_svc (gdbarch, regs, dsc);
7380 /* Copy undefined instructions. */
7383 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7384 struct displaced_step_closure *dsc)
7386 if (debug_displaced)
7387 fprintf_unfiltered (gdb_stdlog,
7388 "displaced: copying undefined insn %.8lx\n",
7389 (unsigned long) insn);
7391 dsc->modinsn[0] = insn;
7397 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7398 struct displaced_step_closure *dsc)
7401 if (debug_displaced)
7402 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7403 "%.4x %.4x\n", (unsigned short) insn1,
7404 (unsigned short) insn2);
7406 dsc->modinsn[0] = insn1;
7407 dsc->modinsn[1] = insn2;
7413 /* Copy unpredictable instructions. */
7416 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7417 struct displaced_step_closure *dsc)
7419 if (debug_displaced)
7420 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7421 "%.8lx\n", (unsigned long) insn);
7423 dsc->modinsn[0] = insn;
7428 /* The decode_* functions are instruction decoding helpers. They mostly follow
7429 the presentation in the ARM ARM. */
7432 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7433 struct regcache *regs,
7434 struct displaced_step_closure *dsc)
7436 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7437 unsigned int rn = bits (insn, 16, 19);
7439 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7440 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7441 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7442 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7443 else if ((op1 & 0x60) == 0x20)
7444 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7445 else if ((op1 & 0x71) == 0x40)
7446 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7448 else if ((op1 & 0x77) == 0x41)
7449 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7450 else if ((op1 & 0x77) == 0x45)
7451 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7452 else if ((op1 & 0x77) == 0x51)
7455 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7457 return arm_copy_unpred (gdbarch, insn, dsc);
7459 else if ((op1 & 0x77) == 0x55)
7460 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7461 else if (op1 == 0x57)
7464 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7465 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7466 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7467 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7468 default: return arm_copy_unpred (gdbarch, insn, dsc);
7470 else if ((op1 & 0x63) == 0x43)
7471 return arm_copy_unpred (gdbarch, insn, dsc);
7472 else if ((op2 & 0x1) == 0x0)
7473 switch (op1 & ~0x80)
7476 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7478 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7479 case 0x71: case 0x75:
7481 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7482 case 0x63: case 0x67: case 0x73: case 0x77:
7483 return arm_copy_unpred (gdbarch, insn, dsc);
7485 return arm_copy_undef (gdbarch, insn, dsc);
7488 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7492 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7493 struct regcache *regs,
7494 struct displaced_step_closure *dsc)
7496 if (bit (insn, 27) == 0)
7497 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7498 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7499 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7502 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7505 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7507 case 0x4: case 0x5: case 0x6: case 0x7:
7508 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7511 switch ((insn & 0xe00000) >> 21)
7513 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7515 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7518 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7521 return arm_copy_undef (gdbarch, insn, dsc);
7526 int rn_f = (bits (insn, 16, 19) == 0xf);
7527 switch ((insn & 0xe00000) >> 21)
7530 /* ldc/ldc2 imm (undefined for rn == pc). */
7531 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7532 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7535 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7537 case 0x4: case 0x5: case 0x6: case 0x7:
7538 /* ldc/ldc2 lit (undefined for rn != pc). */
7539 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7540 : arm_copy_undef (gdbarch, insn, dsc);
7543 return arm_copy_undef (gdbarch, insn, dsc);
7548 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7551 if (bits (insn, 16, 19) == 0xf)
7553 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7555 return arm_copy_undef (gdbarch, insn, dsc);
7559 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7561 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7565 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7567 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7570 return arm_copy_undef (gdbarch, insn, dsc);
7574 /* Decode miscellaneous instructions in dp/misc encoding space. */
7577 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7578 struct regcache *regs,
7579 struct displaced_step_closure *dsc)
7581 unsigned int op2 = bits (insn, 4, 6);
7582 unsigned int op = bits (insn, 21, 22);
7583 unsigned int op1 = bits (insn, 16, 19);
7588 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7591 if (op == 0x1) /* bx. */
7592 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7594 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7596 return arm_copy_undef (gdbarch, insn, dsc);
7600 /* Not really supported. */
7601 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7603 return arm_copy_undef (gdbarch, insn, dsc);
7607 return arm_copy_bx_blx_reg (gdbarch, insn,
7608 regs, dsc); /* blx register. */
7610 return arm_copy_undef (gdbarch, insn, dsc);
7613 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7617 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7619 /* Not really supported. */
7620 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7623 return arm_copy_undef (gdbarch, insn, dsc);
7628 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7629 struct regcache *regs,
7630 struct displaced_step_closure *dsc)
7633 switch (bits (insn, 20, 24))
7636 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7639 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7641 case 0x12: case 0x16:
7642 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7645 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7649 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7651 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7652 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7653 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7654 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7655 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7656 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7657 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7658 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7659 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7660 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7661 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7662 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7663 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7664 /* 2nd arg means "unpriveleged". */
7665 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7669 /* Should be unreachable. */
7674 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7675 struct regcache *regs,
7676 struct displaced_step_closure *dsc)
7678 int a = bit (insn, 25), b = bit (insn, 4);
7679 uint32_t op1 = bits (insn, 20, 24);
7680 int rn_f = bits (insn, 16, 19) == 0xf;
7682 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7683 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7684 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7685 else if ((!a && (op1 & 0x17) == 0x02)
7686 || (a && (op1 & 0x17) == 0x02 && !b))
7687 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7688 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7689 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7690 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7691 else if ((!a && (op1 & 0x17) == 0x03)
7692 || (a && (op1 & 0x17) == 0x03 && !b))
7693 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7694 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7695 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7696 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7697 else if ((!a && (op1 & 0x17) == 0x06)
7698 || (a && (op1 & 0x17) == 0x06 && !b))
7699 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7700 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7701 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7702 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7703 else if ((!a && (op1 & 0x17) == 0x07)
7704 || (a && (op1 & 0x17) == 0x07 && !b))
7705 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7707 /* Should be unreachable. */
7712 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7713 struct displaced_step_closure *dsc)
7715 switch (bits (insn, 20, 24))
7717 case 0x00: case 0x01: case 0x02: case 0x03:
7718 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7720 case 0x04: case 0x05: case 0x06: case 0x07:
7721 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7723 case 0x08: case 0x09: case 0x0a: case 0x0b:
7724 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7725 return arm_copy_unmodified (gdbarch, insn,
7726 "decode/pack/unpack/saturate/reverse", dsc);
7729 if (bits (insn, 5, 7) == 0) /* op2. */
7731 if (bits (insn, 12, 15) == 0xf)
7732 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7734 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7737 return arm_copy_undef (gdbarch, insn, dsc);
7739 case 0x1a: case 0x1b:
7740 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7741 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7743 return arm_copy_undef (gdbarch, insn, dsc);
7745 case 0x1c: case 0x1d:
7746 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7748 if (bits (insn, 0, 3) == 0xf)
7749 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7751 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7754 return arm_copy_undef (gdbarch, insn, dsc);
7756 case 0x1e: case 0x1f:
7757 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7758 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7760 return arm_copy_undef (gdbarch, insn, dsc);
7763 /* Should be unreachable. */
7768 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7769 struct regcache *regs,
7770 struct displaced_step_closure *dsc)
7773 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7775 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7779 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7780 struct regcache *regs,
7781 struct displaced_step_closure *dsc)
7783 unsigned int opcode = bits (insn, 20, 24);
7787 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7788 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7790 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7791 case 0x12: case 0x16:
7792 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7794 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7795 case 0x13: case 0x17:
7796 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7798 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7799 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7800 /* Note: no writeback for these instructions. Bit 25 will always be
7801 zero though (via caller), so the following works OK. */
7802 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7805 /* Should be unreachable. */
7809 /* Decode shifted register instructions. */
7812 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7813 uint16_t insn2, struct regcache *regs,
7814 struct displaced_step_closure *dsc)
7816 /* PC is only allowed to be used in instruction MOV. */
7818 unsigned int op = bits (insn1, 5, 8);
7819 unsigned int rn = bits (insn1, 0, 3);
7821 if (op == 0x2 && rn == 0xf) /* MOV */
7822 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7825 "dp (shift reg)", dsc);
7829 /* Decode extension register load/store. Exactly the same as
7830 arm_decode_ext_reg_ld_st. */
7833 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7834 uint16_t insn2, struct regcache *regs,
7835 struct displaced_step_closure *dsc)
7837 unsigned int opcode = bits (insn1, 4, 8);
7841 case 0x04: case 0x05:
7842 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7843 "vfp/neon vmov", dsc);
7845 case 0x08: case 0x0c: /* 01x00 */
7846 case 0x0a: case 0x0e: /* 01x10 */
7847 case 0x12: case 0x16: /* 10x10 */
7848 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7849 "vfp/neon vstm/vpush", dsc);
7851 case 0x09: case 0x0d: /* 01x01 */
7852 case 0x0b: case 0x0f: /* 01x11 */
7853 case 0x13: case 0x17: /* 10x11 */
7854 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7855 "vfp/neon vldm/vpop", dsc);
7857 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7860 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7861 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7864 /* Should be unreachable. */
7869 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7870 struct regcache *regs, struct displaced_step_closure *dsc)
7872 unsigned int op1 = bits (insn, 20, 25);
7873 int op = bit (insn, 4);
7874 unsigned int coproc = bits (insn, 8, 11);
7875 unsigned int rn = bits (insn, 16, 19);
7877 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7878 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7879 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7880 && (coproc & 0xe) != 0xa)
7882 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7883 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7884 && (coproc & 0xe) != 0xa)
7885 /* ldc/ldc2 imm/lit. */
7886 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7887 else if ((op1 & 0x3e) == 0x00)
7888 return arm_copy_undef (gdbarch, insn, dsc);
7889 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7890 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7891 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7892 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7893 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7894 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7895 else if ((op1 & 0x30) == 0x20 && !op)
7897 if ((coproc & 0xe) == 0xa)
7898 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7900 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7902 else if ((op1 & 0x30) == 0x20 && op)
7903 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7904 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7905 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7906 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7907 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7908 else if ((op1 & 0x30) == 0x30)
7909 return arm_copy_svc (gdbarch, insn, regs, dsc);
7911 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7915 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7916 uint16_t insn2, struct regcache *regs,
7917 struct displaced_step_closure *dsc)
7919 unsigned int coproc = bits (insn2, 8, 11);
7920 unsigned int op1 = bits (insn1, 4, 9);
7921 unsigned int bit_5_8 = bits (insn1, 5, 8);
7922 unsigned int bit_9 = bit (insn1, 9);
7923 unsigned int bit_4 = bit (insn1, 4);
7924 unsigned int rn = bits (insn1, 0, 3);
7929 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7930 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7932 else if (bit_5_8 == 0) /* UNDEFINED. */
7933 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7936 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7937 if ((coproc & 0xe) == 0xa)
7938 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7940 else /* coproc is not 101x. */
7942 if (bit_4 == 0) /* STC/STC2. */
7943 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7945 else /* LDC/LDC2 {literal, immeidate}. */
7946 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7952 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7958 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7959 struct displaced_step_closure *dsc, int rd)
7965 Preparation: Rd <- PC
7971 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7972 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7976 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7977 struct displaced_step_closure *dsc,
7978 int rd, unsigned int imm)
7981 /* Encoding T2: ADDS Rd, #imm */
7982 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7984 install_pc_relative (gdbarch, regs, dsc, rd);
7990 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7991 struct regcache *regs,
7992 struct displaced_step_closure *dsc)
7994 unsigned int rd = bits (insn, 8, 10);
7995 unsigned int imm8 = bits (insn, 0, 7);
7997 if (debug_displaced)
7998 fprintf_unfiltered (gdb_stdlog,
7999 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8002 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8006 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8007 uint16_t insn2, struct regcache *regs,
8008 struct displaced_step_closure *dsc)
8010 unsigned int rd = bits (insn2, 8, 11);
8011 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8012 extract raw immediate encoding rather than computing immediate. When
8013 generating ADD or SUB instruction, we can simply perform OR operation to
8014 set immediate into ADD. */
8015 unsigned int imm_3_8 = insn2 & 0x70ff;
8016 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8018 if (debug_displaced)
8019 fprintf_unfiltered (gdb_stdlog,
8020 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8021 rd, imm_i, imm_3_8, insn1, insn2);
8023 if (bit (insn1, 7)) /* Encoding T2 */
8025 /* Encoding T3: SUB Rd, Rd, #imm */
8026 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8027 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8029 else /* Encoding T3 */
8031 /* Encoding T3: ADD Rd, Rd, #imm */
8032 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8033 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8037 install_pc_relative (gdbarch, regs, dsc, rd);
8043 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8044 struct regcache *regs,
8045 struct displaced_step_closure *dsc)
8047 unsigned int rt = bits (insn1, 8, 10);
8049 int imm8 = (bits (insn1, 0, 7) << 2);
8050 CORE_ADDR from = dsc->insn_addr;
8056 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8058 Insn: LDR R0, [R2, R3];
8059 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8061 if (debug_displaced)
8062 fprintf_unfiltered (gdb_stdlog,
8063 "displaced: copying thumb ldr r%d [pc #%d]\n"
8066 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8067 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8068 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8069 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8070 /* The assembler calculates the required value of the offset from the
8071 Align(PC,4) value of this instruction to the label. */
8072 pc = pc & 0xfffffffc;
8074 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8075 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8078 dsc->u.ldst.xfersize = 4;
8080 dsc->u.ldst.immed = 0;
8081 dsc->u.ldst.writeback = 0;
8082 dsc->u.ldst.restore_r4 = 0;
8084 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8086 dsc->cleanup = &cleanup_load;
8091 /* Copy Thumb cbnz/cbz insruction. */
8094 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8095 struct regcache *regs,
8096 struct displaced_step_closure *dsc)
8098 int non_zero = bit (insn1, 11);
8099 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8100 CORE_ADDR from = dsc->insn_addr;
8101 int rn = bits (insn1, 0, 2);
8102 int rn_val = displaced_read_reg (regs, dsc, rn);
8104 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8105 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8106 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8107 condition is false, let it be, cleanup_branch will do nothing. */
8108 if (dsc->u.branch.cond)
8110 dsc->u.branch.cond = INST_AL;
8111 dsc->u.branch.dest = from + 4 + imm5;
8114 dsc->u.branch.dest = from + 2;
8116 dsc->u.branch.link = 0;
8117 dsc->u.branch.exchange = 0;
8119 if (debug_displaced)
8120 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8121 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8122 rn, rn_val, insn1, dsc->u.branch.dest);
8124 dsc->modinsn[0] = THUMB_NOP;
8126 dsc->cleanup = &cleanup_branch;
8130 /* Copy Table Branch Byte/Halfword */
8132 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8133 uint16_t insn2, struct regcache *regs,
8134 struct displaced_step_closure *dsc)
8136 ULONGEST rn_val, rm_val;
8137 int is_tbh = bit (insn2, 4);
8138 CORE_ADDR halfwords = 0;
8139 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8141 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8142 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8148 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8149 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8155 target_read_memory (rn_val + rm_val, buf, 1);
8156 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8159 if (debug_displaced)
8160 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8161 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8162 (unsigned int) rn_val, (unsigned int) rm_val,
8163 (unsigned int) halfwords);
8165 dsc->u.branch.cond = INST_AL;
8166 dsc->u.branch.link = 0;
8167 dsc->u.branch.exchange = 0;
8168 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8170 dsc->cleanup = &cleanup_branch;
8176 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8177 struct displaced_step_closure *dsc)
8180 int val = displaced_read_reg (regs, dsc, 7);
8181 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8184 val = displaced_read_reg (regs, dsc, 8);
8185 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8188 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8193 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8194 struct regcache *regs,
8195 struct displaced_step_closure *dsc)
8197 dsc->u.block.regmask = insn1 & 0x00ff;
8199 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8202 (1) register list is full, that is, r0-r7 are used.
8203 Prepare: tmp[0] <- r8
8205 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8206 MOV r8, r7; Move value of r7 to r8;
8207 POP {r7}; Store PC value into r7.
8209 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8211 (2) register list is not full, supposing there are N registers in
8212 register list (except PC, 0 <= N <= 7).
8213 Prepare: for each i, 0 - N, tmp[i] <- ri.
8215 POP {r0, r1, ...., rN};
8217 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8218 from tmp[] properly.
8220 if (debug_displaced)
8221 fprintf_unfiltered (gdb_stdlog,
8222 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8223 dsc->u.block.regmask, insn1);
8225 if (dsc->u.block.regmask == 0xff)
8227 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8229 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8230 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8231 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8234 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8238 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8239 unsigned int new_regmask, bit = 1;
8240 unsigned int to = 0, from = 0, i, new_rn;
8242 for (i = 0; i < num_in_list + 1; i++)
8243 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8245 new_regmask = (1 << (num_in_list + 1)) - 1;
8247 if (debug_displaced)
8248 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8249 "{..., pc}: original reg list %.4x,"
8250 " modified list %.4x\n"),
8251 (int) dsc->u.block.regmask, new_regmask);
8253 dsc->u.block.regmask |= 0x8000;
8254 dsc->u.block.writeback = 0;
8255 dsc->u.block.cond = INST_AL;
8257 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8259 dsc->cleanup = &cleanup_block_load_pc;
8266 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8267 struct regcache *regs,
8268 struct displaced_step_closure *dsc)
8270 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8271 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8274 /* 16-bit thumb instructions. */
8275 switch (op_bit_12_15)
8277 /* Shift (imme), add, subtract, move and compare. */
8278 case 0: case 1: case 2: case 3:
8279 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8280 "shift/add/sub/mov/cmp",
8284 switch (op_bit_10_11)
8286 case 0: /* Data-processing */
8287 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8291 case 1: /* Special data instructions and branch and exchange. */
8293 unsigned short op = bits (insn1, 7, 9);
8294 if (op == 6 || op == 7) /* BX or BLX */
8295 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8296 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8297 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8303 default: /* LDR (literal) */
8304 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8307 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8308 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8311 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8312 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8313 else /* Generate SP-relative address */
8314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8316 case 11: /* Misc 16-bit instructions */
8318 switch (bits (insn1, 8, 11))
8320 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8321 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8323 case 12: case 13: /* POP */
8324 if (bit (insn1, 8)) /* PC is in register list. */
8325 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8327 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8329 case 15: /* If-Then, and hints */
8330 if (bits (insn1, 0, 3))
8331 /* If-Then makes up to four following instructions conditional.
8332 IT instruction itself is not conditional, so handle it as a
8333 common unmodified instruction. */
8334 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8340 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8345 if (op_bit_10_11 < 2) /* Store multiple registers */
8346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8347 else /* Load multiple registers */
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8350 case 13: /* Conditional branch and supervisor call */
8351 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8352 err = thumb_copy_b (gdbarch, insn1, dsc);
8354 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8356 case 14: /* Unconditional branch */
8357 err = thumb_copy_b (gdbarch, insn1, dsc);
8364 internal_error (__FILE__, __LINE__,
8365 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8369 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8370 uint16_t insn1, uint16_t insn2,
8371 struct regcache *regs,
8372 struct displaced_step_closure *dsc)
8374 int rt = bits (insn2, 12, 15);
8375 int rn = bits (insn1, 0, 3);
8376 int op1 = bits (insn1, 7, 8);
8379 switch (bits (insn1, 5, 6))
8381 case 0: /* Load byte and memory hints */
8382 if (rt == 0xf) /* PLD/PLI */
8385 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8386 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8388 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8393 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8394 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8397 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8398 "ldrb{reg, immediate}/ldrbt",
8403 case 1: /* Load halfword and memory hints. */
8404 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8405 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8406 "pld/unalloc memhint", dsc);
8410 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8413 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8417 case 2: /* Load word */
8419 int insn2_bit_8_11 = bits (insn2, 8, 11);
8422 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8423 else if (op1 == 0x1) /* Encoding T3 */
8424 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8426 else /* op1 == 0x0 */
8428 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8429 /* LDR (immediate) */
8430 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8431 dsc, bit (insn2, 8), 1);
8432 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8433 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8436 /* LDR (register) */
8437 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8443 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8450 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8451 uint16_t insn2, struct regcache *regs,
8452 struct displaced_step_closure *dsc)
8455 unsigned short op = bit (insn2, 15);
8456 unsigned int op1 = bits (insn1, 11, 12);
8462 switch (bits (insn1, 9, 10))
8467 /* Load/store {dual, execlusive}, table branch. */
8468 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8469 && bits (insn2, 5, 7) == 0)
8470 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8473 /* PC is not allowed to use in load/store {dual, exclusive}
8475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8476 "load/store dual/ex", dsc);
8478 else /* load/store multiple */
8480 switch (bits (insn1, 7, 8))
8482 case 0: case 3: /* SRS, RFE */
8483 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8486 case 1: case 2: /* LDM/STM/PUSH/POP */
8487 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8494 /* Data-processing (shift register). */
8495 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8498 default: /* Coprocessor instructions. */
8499 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8504 case 2: /* op1 = 2 */
8505 if (op) /* Branch and misc control. */
8507 if (bit (insn2, 14) /* BLX/BL */
8508 || bit (insn2, 12) /* Unconditional branch */
8509 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8510 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8517 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8519 int op = bits (insn1, 4, 8);
8520 int rn = bits (insn1, 0, 3);
8521 if ((op == 0 || op == 0xa) && rn == 0xf)
8522 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8525 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8528 else /* Data processing (modified immeidate) */
8529 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8533 case 3: /* op1 = 3 */
8534 switch (bits (insn1, 9, 10))
8538 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8540 else /* NEON Load/Store and Store single data item */
8541 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8542 "neon elt/struct load/store",
8545 case 1: /* op1 = 3, bits (9, 10) == 1 */
8546 switch (bits (insn1, 7, 8))
8548 case 0: case 1: /* Data processing (register) */
8549 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8552 case 2: /* Multiply and absolute difference */
8553 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8554 "mul/mua/diff", dsc);
8556 case 3: /* Long multiply and divide */
8557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8562 default: /* Coprocessor instructions */
8563 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8572 internal_error (__FILE__, __LINE__,
8573 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8578 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8579 CORE_ADDR to, struct regcache *regs,
8580 struct displaced_step_closure *dsc)
8582 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8584 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8586 if (debug_displaced)
8587 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8588 "at %.8lx\n", insn1, (unsigned long) from);
8591 dsc->insn_size = thumb_insn_size (insn1);
8592 if (thumb_insn_size (insn1) == 4)
8595 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8596 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8599 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8603 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8604 CORE_ADDR to, struct regcache *regs,
8605 struct displaced_step_closure *dsc)
8608 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8611 /* Most displaced instructions use a 1-instruction scratch space, so set this
8612 here and override below if/when necessary. */
8614 dsc->insn_addr = from;
8615 dsc->scratch_base = to;
8616 dsc->cleanup = NULL;
8617 dsc->wrote_to_pc = 0;
8619 if (!displaced_in_arm_mode (regs))
8620 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8624 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8625 if (debug_displaced)
8626 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8627 "at %.8lx\n", (unsigned long) insn,
8628 (unsigned long) from);
8630 if ((insn & 0xf0000000) == 0xf0000000)
8631 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8632 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8634 case 0x0: case 0x1: case 0x2: case 0x3:
8635 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8638 case 0x4: case 0x5: case 0x6:
8639 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8643 err = arm_decode_media (gdbarch, insn, dsc);
8646 case 0x8: case 0x9: case 0xa: case 0xb:
8647 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8650 case 0xc: case 0xd: case 0xe: case 0xf:
8651 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8656 internal_error (__FILE__, __LINE__,
8657 _("arm_process_displaced_insn: Instruction decode error"));
8660 /* Actually set up the scratch space for a displaced instruction. */
8663 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8664 CORE_ADDR to, struct displaced_step_closure *dsc)
8666 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8667 unsigned int i, len, offset;
8668 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8669 int size = dsc->is_thumb? 2 : 4;
8670 const gdb_byte *bkp_insn;
8673 /* Poke modified instruction(s). */
8674 for (i = 0; i < dsc->numinsns; i++)
8676 if (debug_displaced)
8678 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8680 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8683 fprintf_unfiltered (gdb_stdlog, "%.4x",
8684 (unsigned short)dsc->modinsn[i]);
8686 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8687 (unsigned long) to + offset);
8690 write_memory_unsigned_integer (to + offset, size,
8691 byte_order_for_code,
8696 /* Choose the correct breakpoint instruction. */
8699 bkp_insn = tdep->thumb_breakpoint;
8700 len = tdep->thumb_breakpoint_size;
8704 bkp_insn = tdep->arm_breakpoint;
8705 len = tdep->arm_breakpoint_size;
8708 /* Put breakpoint afterwards. */
8709 write_memory (to + offset, bkp_insn, len);
8711 if (debug_displaced)
8712 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8713 paddress (gdbarch, from), paddress (gdbarch, to));
8716 /* Entry point for copying an instruction into scratch space for displaced
8719 struct displaced_step_closure *
8720 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8721 CORE_ADDR from, CORE_ADDR to,
8722 struct regcache *regs)
8724 struct displaced_step_closure *dsc
8725 = xmalloc (sizeof (struct displaced_step_closure));
8726 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8727 arm_displaced_init_closure (gdbarch, from, to, dsc);
8732 /* Entry point for cleaning things up after a displaced instruction has been
8736 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8737 struct displaced_step_closure *dsc,
8738 CORE_ADDR from, CORE_ADDR to,
8739 struct regcache *regs)
8742 dsc->cleanup (gdbarch, regs, dsc);
8744 if (!dsc->wrote_to_pc)
8745 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8746 dsc->insn_addr + dsc->insn_size);
8750 #include "bfd-in2.h"
8751 #include "libcoff.h"
8754 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8756 struct gdbarch *gdbarch = info->application_data;
8758 if (arm_pc_is_thumb (gdbarch, memaddr))
8760 static asymbol *asym;
8761 static combined_entry_type ce;
8762 static struct coff_symbol_struct csym;
8763 static struct bfd fake_bfd;
8764 static bfd_target fake_target;
8766 if (csym.native == NULL)
8768 /* Create a fake symbol vector containing a Thumb symbol.
8769 This is solely so that the code in print_insn_little_arm()
8770 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8771 the presence of a Thumb symbol and switch to decoding
8772 Thumb instructions. */
8774 fake_target.flavour = bfd_target_coff_flavour;
8775 fake_bfd.xvec = &fake_target;
8776 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8778 csym.symbol.the_bfd = &fake_bfd;
8779 csym.symbol.name = "fake";
8780 asym = (asymbol *) & csym;
8783 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8784 info->symbols = &asym;
8787 info->symbols = NULL;
8789 if (info->endian == BFD_ENDIAN_BIG)
8790 return print_insn_big_arm (memaddr, info);
8792 return print_insn_little_arm (memaddr, info);
8795 /* The following define instruction sequences that will cause ARM
8796 cpu's to take an undefined instruction trap. These are used to
8797 signal a breakpoint to GDB.
8799 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8800 modes. A different instruction is required for each mode. The ARM
8801 cpu's can also be big or little endian. Thus four different
8802 instructions are needed to support all cases.
8804 Note: ARMv4 defines several new instructions that will take the
8805 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8806 not in fact add the new instructions. The new undefined
8807 instructions in ARMv4 are all instructions that had no defined
8808 behaviour in earlier chips. There is no guarantee that they will
8809 raise an exception, but may be treated as NOP's. In practice, it
8810 may only safe to rely on instructions matching:
8812 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8813 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8814 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8816 Even this may only true if the condition predicate is true. The
8817 following use a condition predicate of ALWAYS so it is always TRUE.
8819 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8820 and NetBSD all use a software interrupt rather than an undefined
8821 instruction to force a trap. This can be handled by by the
8822 abi-specific code during establishment of the gdbarch vector. */
8824 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8825 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8826 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8827 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8829 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8830 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8831 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8832 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8834 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8835 the program counter value to determine whether a 16-bit or 32-bit
8836 breakpoint should be used. It returns a pointer to a string of
8837 bytes that encode a breakpoint instruction, stores the length of
8838 the string to *lenptr, and adjusts the program counter (if
8839 necessary) to point to the actual memory location where the
8840 breakpoint should be inserted. */
8842 static const unsigned char *
8843 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8845 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8846 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8848 if (arm_pc_is_thumb (gdbarch, *pcptr))
8850 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8852 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8853 check whether we are replacing a 32-bit instruction. */
8854 if (tdep->thumb2_breakpoint != NULL)
8857 if (target_read_memory (*pcptr, buf, 2) == 0)
8859 unsigned short inst1;
8860 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8861 if (thumb_insn_size (inst1) == 4)
8863 *lenptr = tdep->thumb2_breakpoint_size;
8864 return tdep->thumb2_breakpoint;
8869 *lenptr = tdep->thumb_breakpoint_size;
8870 return tdep->thumb_breakpoint;
8874 *lenptr = tdep->arm_breakpoint_size;
8875 return tdep->arm_breakpoint;
8880 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8883 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8885 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8886 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8887 that this is not confused with a 32-bit ARM breakpoint. */
8891 /* Extract from an array REGBUF containing the (raw) register state a
8892 function return value of type TYPE, and copy that, in virtual
8893 format, into VALBUF. */
8896 arm_extract_return_value (struct type *type, struct regcache *regs,
8899 struct gdbarch *gdbarch = get_regcache_arch (regs);
8900 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8902 if (TYPE_CODE_FLT == TYPE_CODE (type))
8904 switch (gdbarch_tdep (gdbarch)->fp_model)
8908 /* The value is in register F0 in internal format. We need to
8909 extract the raw value and then convert it to the desired
8911 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8913 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8914 convert_from_extended (floatformat_from_type (type), tmpbuf,
8915 valbuf, gdbarch_byte_order (gdbarch));
8919 case ARM_FLOAT_SOFT_FPA:
8920 case ARM_FLOAT_SOFT_VFP:
8921 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8922 not using the VFP ABI code. */
8924 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8925 if (TYPE_LENGTH (type) > 4)
8926 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8927 valbuf + INT_REGISTER_SIZE);
8931 internal_error (__FILE__, __LINE__,
8932 _("arm_extract_return_value: "
8933 "Floating point model not supported"));
8937 else if (TYPE_CODE (type) == TYPE_CODE_INT
8938 || TYPE_CODE (type) == TYPE_CODE_CHAR
8939 || TYPE_CODE (type) == TYPE_CODE_BOOL
8940 || TYPE_CODE (type) == TYPE_CODE_PTR
8941 || TYPE_CODE (type) == TYPE_CODE_REF
8942 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8944 /* If the type is a plain integer, then the access is
8945 straight-forward. Otherwise we have to play around a bit
8947 int len = TYPE_LENGTH (type);
8948 int regno = ARM_A1_REGNUM;
8953 /* By using store_unsigned_integer we avoid having to do
8954 anything special for small big-endian values. */
8955 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8956 store_unsigned_integer (valbuf,
8957 (len > INT_REGISTER_SIZE
8958 ? INT_REGISTER_SIZE : len),
8960 len -= INT_REGISTER_SIZE;
8961 valbuf += INT_REGISTER_SIZE;
8966 /* For a structure or union the behaviour is as if the value had
8967 been stored to word-aligned memory and then loaded into
8968 registers with 32-bit load instruction(s). */
8969 int len = TYPE_LENGTH (type);
8970 int regno = ARM_A1_REGNUM;
8971 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8975 regcache_cooked_read (regs, regno++, tmpbuf);
8976 memcpy (valbuf, tmpbuf,
8977 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8978 len -= INT_REGISTER_SIZE;
8979 valbuf += INT_REGISTER_SIZE;
8985 /* Will a function return an aggregate type in memory or in a
8986 register? Return 0 if an aggregate type can be returned in a
8987 register, 1 if it must be returned in memory. */
8990 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8993 enum type_code code;
8995 CHECK_TYPEDEF (type);
8997 /* In the ARM ABI, "integer" like aggregate types are returned in
8998 registers. For an aggregate type to be integer like, its size
8999 must be less than or equal to INT_REGISTER_SIZE and the
9000 offset of each addressable subfield must be zero. Note that bit
9001 fields are not addressable, and all addressable subfields of
9002 unions always start at offset zero.
9004 This function is based on the behaviour of GCC 2.95.1.
9005 See: gcc/arm.c: arm_return_in_memory() for details.
9007 Note: All versions of GCC before GCC 2.95.2 do not set up the
9008 parameters correctly for a function returning the following
9009 structure: struct { float f;}; This should be returned in memory,
9010 not a register. Richard Earnshaw sent me a patch, but I do not
9011 know of any way to detect if a function like the above has been
9012 compiled with the correct calling convention. */
9014 /* All aggregate types that won't fit in a register must be returned
9016 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9021 /* The AAPCS says all aggregates not larger than a word are returned
9023 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9026 /* The only aggregate types that can be returned in a register are
9027 structs and unions. Arrays must be returned in memory. */
9028 code = TYPE_CODE (type);
9029 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9034 /* Assume all other aggregate types can be returned in a register.
9035 Run a check for structures, unions and arrays. */
9038 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9041 /* Need to check if this struct/union is "integer" like. For
9042 this to be true, its size must be less than or equal to
9043 INT_REGISTER_SIZE and the offset of each addressable
9044 subfield must be zero. Note that bit fields are not
9045 addressable, and unions always start at offset zero. If any
9046 of the subfields is a floating point type, the struct/union
9047 cannot be an integer type. */
9049 /* For each field in the object, check:
9050 1) Is it FP? --> yes, nRc = 1;
9051 2) Is it addressable (bitpos != 0) and
9052 not packed (bitsize == 0)?
9056 for (i = 0; i < TYPE_NFIELDS (type); i++)
9058 enum type_code field_type_code;
9059 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9062 /* Is it a floating point type field? */
9063 if (field_type_code == TYPE_CODE_FLT)
9069 /* If bitpos != 0, then we have to care about it. */
9070 if (TYPE_FIELD_BITPOS (type, i) != 0)
9072 /* Bitfields are not addressable. If the field bitsize is
9073 zero, then the field is not packed. Hence it cannot be
9074 a bitfield or any other packed type. */
9075 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9087 /* Write into appropriate registers a function return value of type
9088 TYPE, given in virtual format. */
9091 arm_store_return_value (struct type *type, struct regcache *regs,
9092 const gdb_byte *valbuf)
9094 struct gdbarch *gdbarch = get_regcache_arch (regs);
9095 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9097 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9099 gdb_byte buf[MAX_REGISTER_SIZE];
9101 switch (gdbarch_tdep (gdbarch)->fp_model)
9105 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9106 gdbarch_byte_order (gdbarch));
9107 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9110 case ARM_FLOAT_SOFT_FPA:
9111 case ARM_FLOAT_SOFT_VFP:
9112 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9113 not using the VFP ABI code. */
9115 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9116 if (TYPE_LENGTH (type) > 4)
9117 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9118 valbuf + INT_REGISTER_SIZE);
9122 internal_error (__FILE__, __LINE__,
9123 _("arm_store_return_value: Floating "
9124 "point model not supported"));
9128 else if (TYPE_CODE (type) == TYPE_CODE_INT
9129 || TYPE_CODE (type) == TYPE_CODE_CHAR
9130 || TYPE_CODE (type) == TYPE_CODE_BOOL
9131 || TYPE_CODE (type) == TYPE_CODE_PTR
9132 || TYPE_CODE (type) == TYPE_CODE_REF
9133 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9135 if (TYPE_LENGTH (type) <= 4)
9137 /* Values of one word or less are zero/sign-extended and
9139 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9140 LONGEST val = unpack_long (type, valbuf);
9142 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9143 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9147 /* Integral values greater than one word are stored in consecutive
9148 registers starting with r0. This will always be a multiple of
9149 the regiser size. */
9150 int len = TYPE_LENGTH (type);
9151 int regno = ARM_A1_REGNUM;
9155 regcache_cooked_write (regs, regno++, valbuf);
9156 len -= INT_REGISTER_SIZE;
9157 valbuf += INT_REGISTER_SIZE;
9163 /* For a structure or union the behaviour is as if the value had
9164 been stored to word-aligned memory and then loaded into
9165 registers with 32-bit load instruction(s). */
9166 int len = TYPE_LENGTH (type);
9167 int regno = ARM_A1_REGNUM;
9168 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9172 memcpy (tmpbuf, valbuf,
9173 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9174 regcache_cooked_write (regs, regno++, tmpbuf);
9175 len -= INT_REGISTER_SIZE;
9176 valbuf += INT_REGISTER_SIZE;
9182 /* Handle function return values. */
9184 static enum return_value_convention
9185 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9186 struct type *valtype, struct regcache *regcache,
9187 gdb_byte *readbuf, const gdb_byte *writebuf)
9189 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9190 struct type *func_type = function ? value_type (function) : NULL;
9191 enum arm_vfp_cprc_base_type vfp_base_type;
9194 if (arm_vfp_abi_for_function (gdbarch, func_type)
9195 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9197 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9198 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9200 for (i = 0; i < vfp_base_count; i++)
9202 if (reg_char == 'q')
9205 arm_neon_quad_write (gdbarch, regcache, i,
9206 writebuf + i * unit_length);
9209 arm_neon_quad_read (gdbarch, regcache, i,
9210 readbuf + i * unit_length);
9217 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9218 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9221 regcache_cooked_write (regcache, regnum,
9222 writebuf + i * unit_length);
9224 regcache_cooked_read (regcache, regnum,
9225 readbuf + i * unit_length);
9228 return RETURN_VALUE_REGISTER_CONVENTION;
9231 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9232 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9233 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9235 if (tdep->struct_return == pcc_struct_return
9236 || arm_return_in_memory (gdbarch, valtype))
9237 return RETURN_VALUE_STRUCT_CONVENTION;
9240 /* AAPCS returns complex types longer than a register in memory. */
9241 if (tdep->arm_abi != ARM_ABI_APCS
9242 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9243 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9244 return RETURN_VALUE_STRUCT_CONVENTION;
9247 arm_store_return_value (valtype, regcache, writebuf);
9250 arm_extract_return_value (valtype, regcache, readbuf);
9252 return RETURN_VALUE_REGISTER_CONVENTION;
9257 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9259 struct gdbarch *gdbarch = get_frame_arch (frame);
9260 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9261 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9263 gdb_byte buf[INT_REGISTER_SIZE];
9265 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9267 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9271 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9275 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9276 return the target PC. Otherwise return 0. */
9279 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9283 CORE_ADDR start_addr;
9285 /* Find the starting address and name of the function containing the PC. */
9286 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9288 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9290 start_addr = arm_skip_bx_reg (frame, pc);
9291 if (start_addr != 0)
9297 /* If PC is in a Thumb call or return stub, return the address of the
9298 target PC, which is in a register. The thunk functions are called
9299 _call_via_xx, where x is the register name. The possible names
9300 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9301 functions, named __ARM_call_via_r[0-7]. */
9302 if (strncmp (name, "_call_via_", 10) == 0
9303 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9305 /* Use the name suffix to determine which register contains the
9307 static char *table[15] =
9308 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9309 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9312 int offset = strlen (name) - 2;
9314 for (regno = 0; regno <= 14; regno++)
9315 if (strcmp (&name[offset], table[regno]) == 0)
9316 return get_frame_register_unsigned (frame, regno);
9319 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9320 non-interworking calls to foo. We could decode the stubs
9321 to find the target but it's easier to use the symbol table. */
9322 namelen = strlen (name);
9323 if (name[0] == '_' && name[1] == '_'
9324 && ((namelen > 2 + strlen ("_from_thumb")
9325 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9326 strlen ("_from_thumb")) == 0)
9327 || (namelen > 2 + strlen ("_from_arm")
9328 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9329 strlen ("_from_arm")) == 0)))
9332 int target_len = namelen - 2;
9333 struct bound_minimal_symbol minsym;
9334 struct objfile *objfile;
9335 struct obj_section *sec;
9337 if (name[namelen - 1] == 'b')
9338 target_len -= strlen ("_from_thumb");
9340 target_len -= strlen ("_from_arm");
9342 target_name = alloca (target_len + 1);
9343 memcpy (target_name, name + 2, target_len);
9344 target_name[target_len] = '\0';
9346 sec = find_pc_section (pc);
9347 objfile = (sec == NULL) ? NULL : sec->objfile;
9348 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9349 if (minsym.minsym != NULL)
9350 return BMSYMBOL_VALUE_ADDRESS (minsym);
9355 return 0; /* not a stub */
9359 set_arm_command (char *args, int from_tty)
9361 printf_unfiltered (_("\
9362 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9363 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9367 show_arm_command (char *args, int from_tty)
9369 cmd_show_list (showarmcmdlist, from_tty, "");
9373 arm_update_current_architecture (void)
9375 struct gdbarch_info info;
9377 /* If the current architecture is not ARM, we have nothing to do. */
9378 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9381 /* Update the architecture. */
9382 gdbarch_info_init (&info);
9384 if (!gdbarch_update_p (info))
9385 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9389 set_fp_model_sfunc (char *args, int from_tty,
9390 struct cmd_list_element *c)
9392 enum arm_float_model fp_model;
9394 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9395 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9397 arm_fp_model = fp_model;
9401 if (fp_model == ARM_FLOAT_LAST)
9402 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9405 arm_update_current_architecture ();
9409 show_fp_model (struct ui_file *file, int from_tty,
9410 struct cmd_list_element *c, const char *value)
9412 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9414 if (arm_fp_model == ARM_FLOAT_AUTO
9415 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9416 fprintf_filtered (file, _("\
9417 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9418 fp_model_strings[tdep->fp_model]);
9420 fprintf_filtered (file, _("\
9421 The current ARM floating point model is \"%s\".\n"),
9422 fp_model_strings[arm_fp_model]);
9426 arm_set_abi (char *args, int from_tty,
9427 struct cmd_list_element *c)
9429 enum arm_abi_kind arm_abi;
9431 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9432 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9434 arm_abi_global = arm_abi;
9438 if (arm_abi == ARM_ABI_LAST)
9439 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9442 arm_update_current_architecture ();
9446 arm_show_abi (struct ui_file *file, int from_tty,
9447 struct cmd_list_element *c, const char *value)
9449 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9451 if (arm_abi_global == ARM_ABI_AUTO
9452 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9453 fprintf_filtered (file, _("\
9454 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9455 arm_abi_strings[tdep->arm_abi]);
9457 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9462 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9463 struct cmd_list_element *c, const char *value)
9465 fprintf_filtered (file,
9466 _("The current execution mode assumed "
9467 "(when symbols are unavailable) is \"%s\".\n"),
9468 arm_fallback_mode_string);
9472 arm_show_force_mode (struct ui_file *file, int from_tty,
9473 struct cmd_list_element *c, const char *value)
9475 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9477 fprintf_filtered (file,
9478 _("The current execution mode assumed "
9479 "(even when symbols are available) is \"%s\".\n"),
9480 arm_force_mode_string);
9483 /* If the user changes the register disassembly style used for info
9484 register and other commands, we have to also switch the style used
9485 in opcodes for disassembly output. This function is run in the "set
9486 arm disassembly" command, and does that. */
9489 set_disassembly_style_sfunc (char *args, int from_tty,
9490 struct cmd_list_element *c)
9492 set_disassembly_style ();
9495 /* Return the ARM register name corresponding to register I. */
9497 arm_register_name (struct gdbarch *gdbarch, int i)
9499 const int num_regs = gdbarch_num_regs (gdbarch);
9501 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9502 && i >= num_regs && i < num_regs + 32)
9504 static const char *const vfp_pseudo_names[] = {
9505 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9506 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9507 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9508 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9511 return vfp_pseudo_names[i - num_regs];
9514 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9515 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9517 static const char *const neon_pseudo_names[] = {
9518 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9519 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9522 return neon_pseudo_names[i - num_regs - 32];
9525 if (i >= ARRAY_SIZE (arm_register_names))
9526 /* These registers are only supported on targets which supply
9527 an XML description. */
9530 return arm_register_names[i];
9534 set_disassembly_style (void)
9538 /* Find the style that the user wants. */
9539 for (current = 0; current < num_disassembly_options; current++)
9540 if (disassembly_style == valid_disassembly_styles[current])
9542 gdb_assert (current < num_disassembly_options);
9544 /* Synchronize the disassembler. */
9545 set_arm_regname_option (current);
9548 /* Test whether the coff symbol specific value corresponds to a Thumb
9552 coff_sym_is_thumb (int val)
9554 return (val == C_THUMBEXT
9555 || val == C_THUMBSTAT
9556 || val == C_THUMBEXTFUNC
9557 || val == C_THUMBSTATFUNC
9558 || val == C_THUMBLABEL);
9561 /* arm_coff_make_msymbol_special()
9562 arm_elf_make_msymbol_special()
9564 These functions test whether the COFF or ELF symbol corresponds to
9565 an address in thumb code, and set a "special" bit in a minimal
9566 symbol to indicate that it does. */
9569 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9571 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9572 == ST_BRANCH_TO_THUMB)
9573 MSYMBOL_SET_SPECIAL (msym);
9577 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9579 if (coff_sym_is_thumb (val))
9580 MSYMBOL_SET_SPECIAL (msym);
9584 arm_objfile_data_free (struct objfile *objfile, void *arg)
9586 struct arm_per_objfile *data = arg;
9589 for (i = 0; i < objfile->obfd->section_count; i++)
9590 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9594 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9597 const char *name = bfd_asymbol_name (sym);
9598 struct arm_per_objfile *data;
9599 VEC(arm_mapping_symbol_s) **map_p;
9600 struct arm_mapping_symbol new_map_sym;
9602 gdb_assert (name[0] == '$');
9603 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9606 data = objfile_data (objfile, arm_objfile_data_key);
9609 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9610 struct arm_per_objfile);
9611 set_objfile_data (objfile, arm_objfile_data_key, data);
9612 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9613 objfile->obfd->section_count,
9614 VEC(arm_mapping_symbol_s) *);
9616 map_p = &data->section_maps[bfd_get_section (sym)->index];
9618 new_map_sym.value = sym->value;
9619 new_map_sym.type = name[1];
9621 /* Assume that most mapping symbols appear in order of increasing
9622 value. If they were randomly distributed, it would be faster to
9623 always push here and then sort at first use. */
9624 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9626 struct arm_mapping_symbol *prev_map_sym;
9628 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9629 if (prev_map_sym->value >= sym->value)
9632 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9633 arm_compare_mapping_symbols);
9634 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9639 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9643 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9645 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9646 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9648 /* If necessary, set the T bit. */
9651 ULONGEST val, t_bit;
9652 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9653 t_bit = arm_psr_thumb_bit (gdbarch);
9654 if (arm_pc_is_thumb (gdbarch, pc))
9655 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9658 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9663 /* Read the contents of a NEON quad register, by reading from two
9664 double registers. This is used to implement the quad pseudo
9665 registers, and for argument passing in case the quad registers are
9666 missing; vectors are passed in quad registers when using the VFP
9667 ABI, even if a NEON unit is not present. REGNUM is the index of
9668 the quad register, in [0, 15]. */
9670 static enum register_status
9671 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9672 int regnum, gdb_byte *buf)
9675 gdb_byte reg_buf[8];
9676 int offset, double_regnum;
9677 enum register_status status;
9679 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9680 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9683 /* d0 is always the least significant half of q0. */
9684 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9689 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9690 if (status != REG_VALID)
9692 memcpy (buf + offset, reg_buf, 8);
9694 offset = 8 - offset;
9695 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9696 if (status != REG_VALID)
9698 memcpy (buf + offset, reg_buf, 8);
9703 static enum register_status
9704 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9705 int regnum, gdb_byte *buf)
9707 const int num_regs = gdbarch_num_regs (gdbarch);
9709 gdb_byte reg_buf[8];
9710 int offset, double_regnum;
9712 gdb_assert (regnum >= num_regs);
9715 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9716 /* Quad-precision register. */
9717 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9720 enum register_status status;
9722 /* Single-precision register. */
9723 gdb_assert (regnum < 32);
9725 /* s0 is always the least significant half of d0. */
9726 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9727 offset = (regnum & 1) ? 0 : 4;
9729 offset = (regnum & 1) ? 4 : 0;
9731 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9732 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9735 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9736 if (status == REG_VALID)
9737 memcpy (buf, reg_buf + offset, 4);
9742 /* Store the contents of BUF to a NEON quad register, by writing to
9743 two double registers. This is used to implement the quad pseudo
9744 registers, and for argument passing in case the quad registers are
9745 missing; vectors are passed in quad registers when using the VFP
9746 ABI, even if a NEON unit is not present. REGNUM is the index
9747 of the quad register, in [0, 15]. */
9750 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9751 int regnum, const gdb_byte *buf)
9754 int offset, double_regnum;
9756 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9757 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9760 /* d0 is always the least significant half of q0. */
9761 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9766 regcache_raw_write (regcache, double_regnum, buf + offset);
9767 offset = 8 - offset;
9768 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9772 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9773 int regnum, const gdb_byte *buf)
9775 const int num_regs = gdbarch_num_regs (gdbarch);
9777 gdb_byte reg_buf[8];
9778 int offset, double_regnum;
9780 gdb_assert (regnum >= num_regs);
9783 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9784 /* Quad-precision register. */
9785 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9788 /* Single-precision register. */
9789 gdb_assert (regnum < 32);
9791 /* s0 is always the least significant half of d0. */
9792 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9793 offset = (regnum & 1) ? 0 : 4;
9795 offset = (regnum & 1) ? 4 : 0;
9797 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9798 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9801 regcache_raw_read (regcache, double_regnum, reg_buf);
9802 memcpy (reg_buf + offset, buf, 4);
9803 regcache_raw_write (regcache, double_regnum, reg_buf);
9807 static struct value *
9808 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9810 const int *reg_p = baton;
9811 return value_of_register (*reg_p, frame);
9814 static enum gdb_osabi
9815 arm_elf_osabi_sniffer (bfd *abfd)
9817 unsigned int elfosabi;
9818 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9820 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9822 if (elfosabi == ELFOSABI_ARM)
9823 /* GNU tools use this value. Check note sections in this case,
9825 bfd_map_over_sections (abfd,
9826 generic_elf_osabi_sniff_abi_tag_sections,
9829 /* Anything else will be handled by the generic ELF sniffer. */
9834 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9835 struct reggroup *group)
9837 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9838 this, FPS register belongs to save_regroup, restore_reggroup, and
9839 all_reggroup, of course. */
9840 if (regnum == ARM_FPS_REGNUM)
9841 return (group == float_reggroup
9842 || group == save_reggroup
9843 || group == restore_reggroup
9844 || group == all_reggroup);
9846 return default_register_reggroup_p (gdbarch, regnum, group);
9850 /* For backward-compatibility we allow two 'g' packet lengths with
9851 the remote protocol depending on whether FPA registers are
9852 supplied. M-profile targets do not have FPA registers, but some
9853 stubs already exist in the wild which use a 'g' packet which
9854 supplies them albeit with dummy values. The packet format which
9855 includes FPA registers should be considered deprecated for
9856 M-profile targets. */
9859 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9861 if (gdbarch_tdep (gdbarch)->is_m)
9863 /* If we know from the executable this is an M-profile target,
9864 cater for remote targets whose register set layout is the
9865 same as the FPA layout. */
9866 register_remote_g_packet_guess (gdbarch,
9867 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9868 (16 * INT_REGISTER_SIZE)
9869 + (8 * FP_REGISTER_SIZE)
9870 + (2 * INT_REGISTER_SIZE),
9871 tdesc_arm_with_m_fpa_layout);
9873 /* The regular M-profile layout. */
9874 register_remote_g_packet_guess (gdbarch,
9875 /* r0-r12,sp,lr,pc; xpsr */
9876 (16 * INT_REGISTER_SIZE)
9877 + INT_REGISTER_SIZE,
9880 /* M-profile plus M4F VFP. */
9881 register_remote_g_packet_guess (gdbarch,
9882 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9883 (16 * INT_REGISTER_SIZE)
9884 + (16 * VFP_REGISTER_SIZE)
9885 + (2 * INT_REGISTER_SIZE),
9886 tdesc_arm_with_m_vfp_d16);
9889 /* Otherwise we don't have a useful guess. */
9893 /* Initialize the current architecture based on INFO. If possible,
9894 re-use an architecture from ARCHES, which is a list of
9895 architectures already created during this debugging session.
9897 Called e.g. at program startup, when reading a core file, and when
9898 reading a binary file. */
9900 static struct gdbarch *
9901 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9903 struct gdbarch_tdep *tdep;
9904 struct gdbarch *gdbarch;
9905 struct gdbarch_list *best_arch;
9906 enum arm_abi_kind arm_abi = arm_abi_global;
9907 enum arm_float_model fp_model = arm_fp_model;
9908 struct tdesc_arch_data *tdesc_data = NULL;
9910 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9912 int have_fpa_registers = 1;
9913 const struct target_desc *tdesc = info.target_desc;
9915 /* If we have an object to base this architecture on, try to determine
9918 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9920 int ei_osabi, e_flags;
9922 switch (bfd_get_flavour (info.abfd))
9924 case bfd_target_aout_flavour:
9925 /* Assume it's an old APCS-style ABI. */
9926 arm_abi = ARM_ABI_APCS;
9929 case bfd_target_coff_flavour:
9930 /* Assume it's an old APCS-style ABI. */
9932 arm_abi = ARM_ABI_APCS;
9935 case bfd_target_elf_flavour:
9936 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9937 e_flags = elf_elfheader (info.abfd)->e_flags;
9939 if (ei_osabi == ELFOSABI_ARM)
9941 /* GNU tools used to use this value, but do not for EABI
9942 objects. There's nowhere to tag an EABI version
9943 anyway, so assume APCS. */
9944 arm_abi = ARM_ABI_APCS;
9946 else if (ei_osabi == ELFOSABI_NONE)
9948 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9949 int attr_arch, attr_profile;
9953 case EF_ARM_EABI_UNKNOWN:
9954 /* Assume GNU tools. */
9955 arm_abi = ARM_ABI_APCS;
9958 case EF_ARM_EABI_VER4:
9959 case EF_ARM_EABI_VER5:
9960 arm_abi = ARM_ABI_AAPCS;
9961 /* EABI binaries default to VFP float ordering.
9962 They may also contain build attributes that can
9963 be used to identify if the VFP argument-passing
9965 if (fp_model == ARM_FLOAT_AUTO)
9968 switch (bfd_elf_get_obj_attr_int (info.abfd,
9973 /* "The user intended FP parameter/result
9974 passing to conform to AAPCS, base
9976 fp_model = ARM_FLOAT_SOFT_VFP;
9979 /* "The user intended FP parameter/result
9980 passing to conform to AAPCS, VFP
9982 fp_model = ARM_FLOAT_VFP;
9985 /* "The user intended FP parameter/result
9986 passing to conform to tool chain-specific
9987 conventions" - we don't know any such
9988 conventions, so leave it as "auto". */
9991 /* Attribute value not mentioned in the
9992 October 2008 ABI, so leave it as
9997 fp_model = ARM_FLOAT_SOFT_VFP;
10003 /* Leave it as "auto". */
10004 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10009 /* Detect M-profile programs. This only works if the
10010 executable file includes build attributes; GCC does
10011 copy them to the executable, but e.g. RealView does
10013 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10015 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10017 Tag_CPU_arch_profile);
10018 /* GCC specifies the profile for v6-M; RealView only
10019 specifies the profile for architectures starting with
10020 V7 (as opposed to architectures with a tag
10021 numerically greater than TAG_CPU_ARCH_V7). */
10022 if (!tdesc_has_registers (tdesc)
10023 && (attr_arch == TAG_CPU_ARCH_V6_M
10024 || attr_arch == TAG_CPU_ARCH_V6S_M
10025 || attr_profile == 'M'))
10030 if (fp_model == ARM_FLOAT_AUTO)
10032 int e_flags = elf_elfheader (info.abfd)->e_flags;
10034 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10037 /* Leave it as "auto". Strictly speaking this case
10038 means FPA, but almost nobody uses that now, and
10039 many toolchains fail to set the appropriate bits
10040 for the floating-point model they use. */
10042 case EF_ARM_SOFT_FLOAT:
10043 fp_model = ARM_FLOAT_SOFT_FPA;
10045 case EF_ARM_VFP_FLOAT:
10046 fp_model = ARM_FLOAT_VFP;
10048 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10049 fp_model = ARM_FLOAT_SOFT_VFP;
10054 if (e_flags & EF_ARM_BE8)
10055 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10060 /* Leave it as "auto". */
10065 /* Check any target description for validity. */
10066 if (tdesc_has_registers (tdesc))
10068 /* For most registers we require GDB's default names; but also allow
10069 the numeric names for sp / lr / pc, as a convenience. */
10070 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10071 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10072 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10074 const struct tdesc_feature *feature;
10077 feature = tdesc_find_feature (tdesc,
10078 "org.gnu.gdb.arm.core");
10079 if (feature == NULL)
10081 feature = tdesc_find_feature (tdesc,
10082 "org.gnu.gdb.arm.m-profile");
10083 if (feature == NULL)
10089 tdesc_data = tdesc_data_alloc ();
10092 for (i = 0; i < ARM_SP_REGNUM; i++)
10093 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10094 arm_register_names[i]);
10095 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10098 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10101 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10105 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10106 ARM_PS_REGNUM, "xpsr");
10108 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10109 ARM_PS_REGNUM, "cpsr");
10113 tdesc_data_cleanup (tdesc_data);
10117 feature = tdesc_find_feature (tdesc,
10118 "org.gnu.gdb.arm.fpa");
10119 if (feature != NULL)
10122 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10123 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10124 arm_register_names[i]);
10127 tdesc_data_cleanup (tdesc_data);
10132 have_fpa_registers = 0;
10134 feature = tdesc_find_feature (tdesc,
10135 "org.gnu.gdb.xscale.iwmmxt");
10136 if (feature != NULL)
10138 static const char *const iwmmxt_names[] = {
10139 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10140 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10141 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10142 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10146 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10148 &= tdesc_numbered_register (feature, tdesc_data, i,
10149 iwmmxt_names[i - ARM_WR0_REGNUM]);
10151 /* Check for the control registers, but do not fail if they
10153 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10154 tdesc_numbered_register (feature, tdesc_data, i,
10155 iwmmxt_names[i - ARM_WR0_REGNUM]);
10157 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10159 &= tdesc_numbered_register (feature, tdesc_data, i,
10160 iwmmxt_names[i - ARM_WR0_REGNUM]);
10164 tdesc_data_cleanup (tdesc_data);
10169 /* If we have a VFP unit, check whether the single precision registers
10170 are present. If not, then we will synthesize them as pseudo
10172 feature = tdesc_find_feature (tdesc,
10173 "org.gnu.gdb.arm.vfp");
10174 if (feature != NULL)
10176 static const char *const vfp_double_names[] = {
10177 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10178 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10179 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10180 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10183 /* Require the double precision registers. There must be either
10186 for (i = 0; i < 32; i++)
10188 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10190 vfp_double_names[i]);
10194 if (!valid_p && i == 16)
10197 /* Also require FPSCR. */
10198 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10199 ARM_FPSCR_REGNUM, "fpscr");
10202 tdesc_data_cleanup (tdesc_data);
10206 if (tdesc_unnumbered_register (feature, "s0") == 0)
10207 have_vfp_pseudos = 1;
10209 have_vfp_registers = 1;
10211 /* If we have VFP, also check for NEON. The architecture allows
10212 NEON without VFP (integer vector operations only), but GDB
10213 does not support that. */
10214 feature = tdesc_find_feature (tdesc,
10215 "org.gnu.gdb.arm.neon");
10216 if (feature != NULL)
10218 /* NEON requires 32 double-precision registers. */
10221 tdesc_data_cleanup (tdesc_data);
10225 /* If there are quad registers defined by the stub, use
10226 their type; otherwise (normally) provide them with
10227 the default type. */
10228 if (tdesc_unnumbered_register (feature, "q0") == 0)
10229 have_neon_pseudos = 1;
10236 /* If there is already a candidate, use it. */
10237 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10239 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10241 if (arm_abi != ARM_ABI_AUTO
10242 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10245 if (fp_model != ARM_FLOAT_AUTO
10246 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10249 /* There are various other properties in tdep that we do not
10250 need to check here: those derived from a target description,
10251 since gdbarches with a different target description are
10252 automatically disqualified. */
10254 /* Do check is_m, though, since it might come from the binary. */
10255 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10258 /* Found a match. */
10262 if (best_arch != NULL)
10264 if (tdesc_data != NULL)
10265 tdesc_data_cleanup (tdesc_data);
10266 return best_arch->gdbarch;
10269 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10270 gdbarch = gdbarch_alloc (&info, tdep);
10272 /* Record additional information about the architecture we are defining.
10273 These are gdbarch discriminators, like the OSABI. */
10274 tdep->arm_abi = arm_abi;
10275 tdep->fp_model = fp_model;
10277 tdep->have_fpa_registers = have_fpa_registers;
10278 tdep->have_vfp_registers = have_vfp_registers;
10279 tdep->have_vfp_pseudos = have_vfp_pseudos;
10280 tdep->have_neon_pseudos = have_neon_pseudos;
10281 tdep->have_neon = have_neon;
10283 arm_register_g_packet_guesses (gdbarch);
10286 switch (info.byte_order_for_code)
10288 case BFD_ENDIAN_BIG:
10289 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10290 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10291 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10292 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10296 case BFD_ENDIAN_LITTLE:
10297 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10298 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10299 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10300 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10305 internal_error (__FILE__, __LINE__,
10306 _("arm_gdbarch_init: bad byte order for float format"));
10309 /* On ARM targets char defaults to unsigned. */
10310 set_gdbarch_char_signed (gdbarch, 0);
10312 /* Note: for displaced stepping, this includes the breakpoint, and one word
10313 of additional scratch space. This setting isn't used for anything beside
10314 displaced stepping at present. */
10315 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10317 /* This should be low enough for everything. */
10318 tdep->lowest_pc = 0x20;
10319 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10321 /* The default, for both APCS and AAPCS, is to return small
10322 structures in registers. */
10323 tdep->struct_return = reg_struct_return;
10325 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10326 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10328 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10330 /* Frame handling. */
10331 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10332 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10333 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10335 frame_base_set_default (gdbarch, &arm_normal_base);
10337 /* Address manipulation. */
10338 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10340 /* Advance PC across function entry code. */
10341 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10343 /* Detect whether PC is in function epilogue. */
10344 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10346 /* Skip trampolines. */
10347 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10349 /* The stack grows downward. */
10350 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10352 /* Breakpoint manipulation. */
10353 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10354 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10355 arm_remote_breakpoint_from_pc);
10357 /* Information about registers, etc. */
10358 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10359 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10360 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10361 set_gdbarch_register_type (gdbarch, arm_register_type);
10362 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10364 /* This "info float" is FPA-specific. Use the generic version if we
10365 do not have FPA. */
10366 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10367 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10369 /* Internal <-> external register number maps. */
10370 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10371 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10373 set_gdbarch_register_name (gdbarch, arm_register_name);
10375 /* Returning results. */
10376 set_gdbarch_return_value (gdbarch, arm_return_value);
10379 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10381 /* Minsymbol frobbing. */
10382 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10383 set_gdbarch_coff_make_msymbol_special (gdbarch,
10384 arm_coff_make_msymbol_special);
10385 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10387 /* Thumb-2 IT block support. */
10388 set_gdbarch_adjust_breakpoint_address (gdbarch,
10389 arm_adjust_breakpoint_address);
10391 /* Virtual tables. */
10392 set_gdbarch_vbit_in_delta (gdbarch, 1);
10394 /* Hook in the ABI-specific overrides, if they have been registered. */
10395 gdbarch_init_osabi (info, gdbarch);
10397 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10399 /* Add some default predicates. */
10401 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10402 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10403 dwarf2_append_unwinders (gdbarch);
10404 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10405 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10407 /* Now we have tuned the configuration, set a few final things,
10408 based on what the OS ABI has told us. */
10410 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10411 binaries are always marked. */
10412 if (tdep->arm_abi == ARM_ABI_AUTO)
10413 tdep->arm_abi = ARM_ABI_APCS;
10415 /* Watchpoints are not steppable. */
10416 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10418 /* We used to default to FPA for generic ARM, but almost nobody
10419 uses that now, and we now provide a way for the user to force
10420 the model. So default to the most useful variant. */
10421 if (tdep->fp_model == ARM_FLOAT_AUTO)
10422 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10424 if (tdep->jb_pc >= 0)
10425 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10427 /* Floating point sizes and format. */
10428 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10429 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10431 set_gdbarch_double_format
10432 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10433 set_gdbarch_long_double_format
10434 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10438 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10439 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10442 if (have_vfp_pseudos)
10444 /* NOTE: These are the only pseudo registers used by
10445 the ARM target at the moment. If more are added, a
10446 little more care in numbering will be needed. */
10448 int num_pseudos = 32;
10449 if (have_neon_pseudos)
10451 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10452 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10453 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10458 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10460 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10462 /* Override tdesc_register_type to adjust the types of VFP
10463 registers for NEON. */
10464 set_gdbarch_register_type (gdbarch, arm_register_type);
10467 /* Add standard register aliases. We add aliases even for those
10468 nanes which are used by the current architecture - it's simpler,
10469 and does no harm, since nothing ever lists user registers. */
10470 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10471 user_reg_add (gdbarch, arm_register_aliases[i].name,
10472 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10478 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10480 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10485 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10486 (unsigned long) tdep->lowest_pc);
10489 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10492 _initialize_arm_tdep (void)
10494 struct ui_file *stb;
10496 struct cmd_list_element *new_set, *new_show;
10497 const char *setname;
10498 const char *setdesc;
10499 const char *const *regnames;
10501 static char *helptext;
10502 char regdesc[1024], *rdptr = regdesc;
10503 size_t rest = sizeof (regdesc);
10505 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10507 arm_objfile_data_key
10508 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10510 /* Add ourselves to objfile event chain. */
10511 observer_attach_new_objfile (arm_exidx_new_objfile);
10513 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10515 /* Register an ELF OS ABI sniffer for ARM binaries. */
10516 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10517 bfd_target_elf_flavour,
10518 arm_elf_osabi_sniffer);
10520 /* Initialize the standard target descriptions. */
10521 initialize_tdesc_arm_with_m ();
10522 initialize_tdesc_arm_with_m_fpa_layout ();
10523 initialize_tdesc_arm_with_m_vfp_d16 ();
10524 initialize_tdesc_arm_with_iwmmxt ();
10525 initialize_tdesc_arm_with_vfpv2 ();
10526 initialize_tdesc_arm_with_vfpv3 ();
10527 initialize_tdesc_arm_with_neon ();
10529 /* Get the number of possible sets of register names defined in opcodes. */
10530 num_disassembly_options = get_arm_regname_num_options ();
10532 /* Add root prefix command for all "set arm"/"show arm" commands. */
10533 add_prefix_cmd ("arm", no_class, set_arm_command,
10534 _("Various ARM-specific commands."),
10535 &setarmcmdlist, "set arm ", 0, &setlist);
10537 add_prefix_cmd ("arm", no_class, show_arm_command,
10538 _("Various ARM-specific commands."),
10539 &showarmcmdlist, "show arm ", 0, &showlist);
10541 /* Sync the opcode insn printer with our register viewer. */
10542 parse_arm_disassembler_option ("reg-names-std");
10544 /* Initialize the array that will be passed to
10545 add_setshow_enum_cmd(). */
10546 valid_disassembly_styles
10547 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10548 for (i = 0; i < num_disassembly_options; i++)
10550 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10551 valid_disassembly_styles[i] = setname;
10552 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10555 /* When we find the default names, tell the disassembler to use
10557 if (!strcmp (setname, "std"))
10559 disassembly_style = setname;
10560 set_arm_regname_option (i);
10563 /* Mark the end of valid options. */
10564 valid_disassembly_styles[num_disassembly_options] = NULL;
10566 /* Create the help text. */
10567 stb = mem_fileopen ();
10568 fprintf_unfiltered (stb, "%s%s%s",
10569 _("The valid values are:\n"),
10571 _("The default is \"std\"."));
10572 helptext = ui_file_xstrdup (stb, NULL);
10573 ui_file_delete (stb);
10575 add_setshow_enum_cmd("disassembler", no_class,
10576 valid_disassembly_styles, &disassembly_style,
10577 _("Set the disassembly style."),
10578 _("Show the disassembly style."),
10580 set_disassembly_style_sfunc,
10581 NULL, /* FIXME: i18n: The disassembly style is
10583 &setarmcmdlist, &showarmcmdlist);
10585 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10586 _("Set usage of ARM 32-bit mode."),
10587 _("Show usage of ARM 32-bit mode."),
10588 _("When off, a 26-bit PC will be used."),
10590 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10592 &setarmcmdlist, &showarmcmdlist);
10594 /* Add a command to allow the user to force the FPU model. */
10595 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10596 _("Set the floating point type."),
10597 _("Show the floating point type."),
10598 _("auto - Determine the FP typefrom the OS-ABI.\n\
10599 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10600 fpa - FPA co-processor (GCC compiled).\n\
10601 softvfp - Software FP with pure-endian doubles.\n\
10602 vfp - VFP co-processor."),
10603 set_fp_model_sfunc, show_fp_model,
10604 &setarmcmdlist, &showarmcmdlist);
10606 /* Add a command to allow the user to force the ABI. */
10607 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10609 _("Show the ABI."),
10610 NULL, arm_set_abi, arm_show_abi,
10611 &setarmcmdlist, &showarmcmdlist);
10613 /* Add two commands to allow the user to force the assumed
10615 add_setshow_enum_cmd ("fallback-mode", class_support,
10616 arm_mode_strings, &arm_fallback_mode_string,
10617 _("Set the mode assumed when symbols are unavailable."),
10618 _("Show the mode assumed when symbols are unavailable."),
10619 NULL, NULL, arm_show_fallback_mode,
10620 &setarmcmdlist, &showarmcmdlist);
10621 add_setshow_enum_cmd ("force-mode", class_support,
10622 arm_mode_strings, &arm_force_mode_string,
10623 _("Set the mode assumed even when symbols are available."),
10624 _("Show the mode assumed even when symbols are available."),
10625 NULL, NULL, arm_show_force_mode,
10626 &setarmcmdlist, &showarmcmdlist);
10628 /* Debugging flag. */
10629 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10630 _("Set ARM debugging."),
10631 _("Show ARM debugging."),
10632 _("When on, arm-specific debugging is enabled."),
10634 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10635 &setdebuglist, &showdebuglist);
10638 /* ARM-reversible process record data structures. */
10640 #define ARM_INSN_SIZE_BYTES 4
10641 #define THUMB_INSN_SIZE_BYTES 2
10642 #define THUMB2_INSN_SIZE_BYTES 4
10645 #define INSN_S_L_BIT_NUM 20
10647 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10650 unsigned int reg_len = LENGTH; \
10653 REGS = XNEWVEC (uint32_t, reg_len); \
10654 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10659 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10662 unsigned int mem_len = LENGTH; \
10665 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10666 memcpy(&MEMS->len, &RECORD_BUF[0], \
10667 sizeof(struct arm_mem_r) * LENGTH); \
10672 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10673 #define INSN_RECORDED(ARM_RECORD) \
10674 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10676 /* ARM memory record structure. */
10679 uint32_t len; /* Record length. */
10680 uint32_t addr; /* Memory address. */
10683 /* ARM instruction record contains opcode of current insn
10684 and execution state (before entry to decode_insn()),
10685 contains list of to-be-modified registers and
10686 memory blocks (on return from decode_insn()). */
10688 typedef struct insn_decode_record_t
10690 struct gdbarch *gdbarch;
10691 struct regcache *regcache;
10692 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10693 uint32_t arm_insn; /* Should accommodate thumb. */
10694 uint32_t cond; /* Condition code. */
10695 uint32_t opcode; /* Insn opcode. */
10696 uint32_t decode; /* Insn decode bits. */
10697 uint32_t mem_rec_count; /* No of mem records. */
10698 uint32_t reg_rec_count; /* No of reg records. */
10699 uint32_t *arm_regs; /* Registers to be saved for this record. */
10700 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10701 } insn_decode_record;
10704 /* Checks ARM SBZ and SBO mandatory fields. */
10707 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10709 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10728 enum arm_record_result
10730 ARM_RECORD_SUCCESS = 0,
10731 ARM_RECORD_FAILURE = 1
10738 } arm_record_strx_t;
10749 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10750 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10753 struct regcache *reg_cache = arm_insn_r->regcache;
10754 ULONGEST u_regval[2]= {0};
10756 uint32_t reg_src1 = 0, reg_src2 = 0;
10757 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10758 uint32_t opcode1 = 0;
10760 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10761 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10762 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10765 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10767 /* 1) Handle misc store, immediate offset. */
10768 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10769 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10770 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10771 regcache_raw_read_unsigned (reg_cache, reg_src1,
10773 if (ARM_PC_REGNUM == reg_src1)
10775 /* If R15 was used as Rn, hence current PC+8. */
10776 u_regval[0] = u_regval[0] + 8;
10778 offset_8 = (immed_high << 4) | immed_low;
10779 /* Calculate target store address. */
10780 if (14 == arm_insn_r->opcode)
10782 tgt_mem_addr = u_regval[0] + offset_8;
10786 tgt_mem_addr = u_regval[0] - offset_8;
10788 if (ARM_RECORD_STRH == str_type)
10790 record_buf_mem[0] = 2;
10791 record_buf_mem[1] = tgt_mem_addr;
10792 arm_insn_r->mem_rec_count = 1;
10794 else if (ARM_RECORD_STRD == str_type)
10796 record_buf_mem[0] = 4;
10797 record_buf_mem[1] = tgt_mem_addr;
10798 record_buf_mem[2] = 4;
10799 record_buf_mem[3] = tgt_mem_addr + 4;
10800 arm_insn_r->mem_rec_count = 2;
10803 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10805 /* 2) Store, register offset. */
10807 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10809 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10810 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10811 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10812 if (15 == reg_src2)
10814 /* If R15 was used as Rn, hence current PC+8. */
10815 u_regval[0] = u_regval[0] + 8;
10817 /* Calculate target store address, Rn +/- Rm, register offset. */
10818 if (12 == arm_insn_r->opcode)
10820 tgt_mem_addr = u_regval[0] + u_regval[1];
10824 tgt_mem_addr = u_regval[1] - u_regval[0];
10826 if (ARM_RECORD_STRH == str_type)
10828 record_buf_mem[0] = 2;
10829 record_buf_mem[1] = tgt_mem_addr;
10830 arm_insn_r->mem_rec_count = 1;
10832 else if (ARM_RECORD_STRD == str_type)
10834 record_buf_mem[0] = 4;
10835 record_buf_mem[1] = tgt_mem_addr;
10836 record_buf_mem[2] = 4;
10837 record_buf_mem[3] = tgt_mem_addr + 4;
10838 arm_insn_r->mem_rec_count = 2;
10841 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10842 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10844 /* 3) Store, immediate pre-indexed. */
10845 /* 5) Store, immediate post-indexed. */
10846 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10847 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10848 offset_8 = (immed_high << 4) | immed_low;
10849 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10850 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10851 /* Calculate target store address, Rn +/- Rm, register offset. */
10852 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10854 tgt_mem_addr = u_regval[0] + offset_8;
10858 tgt_mem_addr = u_regval[0] - offset_8;
10860 if (ARM_RECORD_STRH == str_type)
10862 record_buf_mem[0] = 2;
10863 record_buf_mem[1] = tgt_mem_addr;
10864 arm_insn_r->mem_rec_count = 1;
10866 else if (ARM_RECORD_STRD == str_type)
10868 record_buf_mem[0] = 4;
10869 record_buf_mem[1] = tgt_mem_addr;
10870 record_buf_mem[2] = 4;
10871 record_buf_mem[3] = tgt_mem_addr + 4;
10872 arm_insn_r->mem_rec_count = 2;
10874 /* Record Rn also as it changes. */
10875 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10876 arm_insn_r->reg_rec_count = 1;
10878 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10879 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10881 /* 4) Store, register pre-indexed. */
10882 /* 6) Store, register post -indexed. */
10883 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10884 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10885 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10886 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10887 /* Calculate target store address, Rn +/- Rm, register offset. */
10888 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10890 tgt_mem_addr = u_regval[0] + u_regval[1];
10894 tgt_mem_addr = u_regval[1] - u_regval[0];
10896 if (ARM_RECORD_STRH == str_type)
10898 record_buf_mem[0] = 2;
10899 record_buf_mem[1] = tgt_mem_addr;
10900 arm_insn_r->mem_rec_count = 1;
10902 else if (ARM_RECORD_STRD == str_type)
10904 record_buf_mem[0] = 4;
10905 record_buf_mem[1] = tgt_mem_addr;
10906 record_buf_mem[2] = 4;
10907 record_buf_mem[3] = tgt_mem_addr + 4;
10908 arm_insn_r->mem_rec_count = 2;
10910 /* Record Rn also as it changes. */
10911 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10912 arm_insn_r->reg_rec_count = 1;
10917 /* Handling ARM extension space insns. */
10920 arm_record_extension_space (insn_decode_record *arm_insn_r)
10922 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10923 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10924 uint32_t record_buf[8], record_buf_mem[8];
10925 uint32_t reg_src1 = 0;
10926 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10927 struct regcache *reg_cache = arm_insn_r->regcache;
10928 ULONGEST u_regval = 0;
10930 gdb_assert (!INSN_RECORDED(arm_insn_r));
10931 /* Handle unconditional insn extension space. */
10933 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10934 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10935 if (arm_insn_r->cond)
10937 /* PLD has no affect on architectural state, it just affects
10939 if (5 == ((opcode1 & 0xE0) >> 5))
10942 record_buf[0] = ARM_PS_REGNUM;
10943 record_buf[1] = ARM_LR_REGNUM;
10944 arm_insn_r->reg_rec_count = 2;
10946 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10950 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10951 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10954 /* Undefined instruction on ARM V5; need to handle if later
10955 versions define it. */
10958 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10959 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10960 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10962 /* Handle arithmetic insn extension space. */
10963 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10964 && !INSN_RECORDED(arm_insn_r))
10966 /* Handle MLA(S) and MUL(S). */
10967 if (0 <= insn_op1 && 3 >= insn_op1)
10969 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10970 record_buf[1] = ARM_PS_REGNUM;
10971 arm_insn_r->reg_rec_count = 2;
10973 else if (4 <= insn_op1 && 15 >= insn_op1)
10975 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10976 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10977 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10978 record_buf[2] = ARM_PS_REGNUM;
10979 arm_insn_r->reg_rec_count = 3;
10983 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10984 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10985 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10987 /* Handle control insn extension space. */
10989 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10990 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10992 if (!bit (arm_insn_r->arm_insn,25))
10994 if (!bits (arm_insn_r->arm_insn, 4, 7))
10996 if ((0 == insn_op1) || (2 == insn_op1))
10999 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11000 arm_insn_r->reg_rec_count = 1;
11002 else if (1 == insn_op1)
11004 /* CSPR is going to be changed. */
11005 record_buf[0] = ARM_PS_REGNUM;
11006 arm_insn_r->reg_rec_count = 1;
11008 else if (3 == insn_op1)
11010 /* SPSR is going to be changed. */
11011 /* We need to get SPSR value, which is yet to be done. */
11012 printf_unfiltered (_("Process record does not support "
11013 "instruction 0x%0x at address %s.\n"),
11014 arm_insn_r->arm_insn,
11015 paddress (arm_insn_r->gdbarch,
11016 arm_insn_r->this_addr));
11020 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11025 record_buf[0] = ARM_PS_REGNUM;
11026 arm_insn_r->reg_rec_count = 1;
11028 else if (3 == insn_op1)
11031 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11032 arm_insn_r->reg_rec_count = 1;
11035 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11038 record_buf[0] = ARM_PS_REGNUM;
11039 record_buf[1] = ARM_LR_REGNUM;
11040 arm_insn_r->reg_rec_count = 2;
11042 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11044 /* QADD, QSUB, QDADD, QDSUB */
11045 record_buf[0] = ARM_PS_REGNUM;
11046 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11047 arm_insn_r->reg_rec_count = 2;
11049 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11052 record_buf[0] = ARM_PS_REGNUM;
11053 record_buf[1] = ARM_LR_REGNUM;
11054 arm_insn_r->reg_rec_count = 2;
11056 /* Save SPSR also;how? */
11057 printf_unfiltered (_("Process record does not support "
11058 "instruction 0x%0x at address %s.\n"),
11059 arm_insn_r->arm_insn,
11060 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11063 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11064 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11065 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11066 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11069 if (0 == insn_op1 || 1 == insn_op1)
11071 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11072 /* We dont do optimization for SMULW<y> where we
11074 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11075 record_buf[1] = ARM_PS_REGNUM;
11076 arm_insn_r->reg_rec_count = 2;
11078 else if (2 == insn_op1)
11081 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11082 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11083 arm_insn_r->reg_rec_count = 2;
11085 else if (3 == insn_op1)
11088 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11089 arm_insn_r->reg_rec_count = 1;
11095 /* MSR : immediate form. */
11098 /* CSPR is going to be changed. */
11099 record_buf[0] = ARM_PS_REGNUM;
11100 arm_insn_r->reg_rec_count = 1;
11102 else if (3 == insn_op1)
11104 /* SPSR is going to be changed. */
11105 /* we need to get SPSR value, which is yet to be done */
11106 printf_unfiltered (_("Process record does not support "
11107 "instruction 0x%0x at address %s.\n"),
11108 arm_insn_r->arm_insn,
11109 paddress (arm_insn_r->gdbarch,
11110 arm_insn_r->this_addr));
11116 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11117 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11118 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11120 /* Handle load/store insn extension space. */
11122 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11123 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11124 && !INSN_RECORDED(arm_insn_r))
11129 /* These insn, changes register and memory as well. */
11130 /* SWP or SWPB insn. */
11131 /* Get memory address given by Rn. */
11132 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11133 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11134 /* SWP insn ?, swaps word. */
11135 if (8 == arm_insn_r->opcode)
11137 record_buf_mem[0] = 4;
11141 /* SWPB insn, swaps only byte. */
11142 record_buf_mem[0] = 1;
11144 record_buf_mem[1] = u_regval;
11145 arm_insn_r->mem_rec_count = 1;
11146 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11147 arm_insn_r->reg_rec_count = 1;
11149 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11152 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11155 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11158 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11159 record_buf[1] = record_buf[0] + 1;
11160 arm_insn_r->reg_rec_count = 2;
11162 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11165 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11168 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11170 /* LDRH, LDRSB, LDRSH. */
11171 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11172 arm_insn_r->reg_rec_count = 1;
11177 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11178 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11179 && !INSN_RECORDED(arm_insn_r))
11182 /* Handle coprocessor insn extension space. */
11185 /* To be done for ARMv5 and later; as of now we return -1. */
11187 printf_unfiltered (_("Process record does not support instruction x%0x "
11188 "at address %s.\n"),arm_insn_r->arm_insn,
11189 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11192 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11193 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11198 /* Handling opcode 000 insns. */
11201 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11203 struct regcache *reg_cache = arm_insn_r->regcache;
11204 uint32_t record_buf[8], record_buf_mem[8];
11205 ULONGEST u_regval[2] = {0};
11207 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11208 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11209 uint32_t opcode1 = 0;
11211 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11212 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11213 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11215 /* Data processing insn /multiply insn. */
11216 if (9 == arm_insn_r->decode
11217 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11218 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11220 /* Handle multiply instructions. */
11221 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11222 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11224 /* Handle MLA and MUL. */
11225 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11226 record_buf[1] = ARM_PS_REGNUM;
11227 arm_insn_r->reg_rec_count = 2;
11229 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11231 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11232 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11233 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11234 record_buf[2] = ARM_PS_REGNUM;
11235 arm_insn_r->reg_rec_count = 3;
11238 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11239 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11241 /* Handle misc load insns, as 20th bit (L = 1). */
11242 /* LDR insn has a capability to do branching, if
11243 MOV LR, PC is precceded by LDR insn having Rn as R15
11244 in that case, it emulates branch and link insn, and hence we
11245 need to save CSPR and PC as well. I am not sure this is right
11246 place; as opcode = 010 LDR insn make this happen, if R15 was
11248 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11249 if (15 != reg_dest)
11251 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11252 arm_insn_r->reg_rec_count = 1;
11256 record_buf[0] = reg_dest;
11257 record_buf[1] = ARM_PS_REGNUM;
11258 arm_insn_r->reg_rec_count = 2;
11261 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11262 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11263 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11264 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11266 /* Handle MSR insn. */
11267 if (9 == arm_insn_r->opcode)
11269 /* CSPR is going to be changed. */
11270 record_buf[0] = ARM_PS_REGNUM;
11271 arm_insn_r->reg_rec_count = 1;
11275 /* SPSR is going to be changed. */
11276 /* How to read SPSR value? */
11277 printf_unfiltered (_("Process record does not support instruction "
11278 "0x%0x at address %s.\n"),
11279 arm_insn_r->arm_insn,
11280 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11284 else if (9 == arm_insn_r->decode
11285 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11286 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11288 /* Handling SWP, SWPB. */
11289 /* These insn, changes register and memory as well. */
11290 /* SWP or SWPB insn. */
11292 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11293 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11294 /* SWP insn ?, swaps word. */
11295 if (8 == arm_insn_r->opcode)
11297 record_buf_mem[0] = 4;
11301 /* SWPB insn, swaps only byte. */
11302 record_buf_mem[0] = 1;
11304 record_buf_mem[1] = u_regval[0];
11305 arm_insn_r->mem_rec_count = 1;
11306 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11307 arm_insn_r->reg_rec_count = 1;
11309 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11310 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11312 /* Handle BLX, branch and link/exchange. */
11313 if (9 == arm_insn_r->opcode)
11315 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11316 and R14 stores the return address. */
11317 record_buf[0] = ARM_PS_REGNUM;
11318 record_buf[1] = ARM_LR_REGNUM;
11319 arm_insn_r->reg_rec_count = 2;
11322 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11324 /* Handle enhanced software breakpoint insn, BKPT. */
11325 /* CPSR is changed to be executed in ARM state, disabling normal
11326 interrupts, entering abort mode. */
11327 /* According to high vector configuration PC is set. */
11328 /* user hit breakpoint and type reverse, in
11329 that case, we need to go back with previous CPSR and
11330 Program Counter. */
11331 record_buf[0] = ARM_PS_REGNUM;
11332 record_buf[1] = ARM_LR_REGNUM;
11333 arm_insn_r->reg_rec_count = 2;
11335 /* Save SPSR also; how? */
11336 printf_unfiltered (_("Process record does not support instruction "
11337 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11338 paddress (arm_insn_r->gdbarch,
11339 arm_insn_r->this_addr));
11342 else if (11 == arm_insn_r->decode
11343 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11345 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11347 /* Handle str(x) insn */
11348 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11351 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11352 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11354 /* Handle BX, branch and link/exchange. */
11355 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11356 record_buf[0] = ARM_PS_REGNUM;
11357 arm_insn_r->reg_rec_count = 1;
11359 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11360 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11361 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11363 /* Count leading zeros: CLZ. */
11364 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11365 arm_insn_r->reg_rec_count = 1;
11367 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11368 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11369 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11370 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11373 /* Handle MRS insn. */
11374 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11375 arm_insn_r->reg_rec_count = 1;
11377 else if (arm_insn_r->opcode <= 15)
11379 /* Normal data processing insns. */
11380 /* Out of 11 shifter operands mode, all the insn modifies destination
11381 register, which is specified by 13-16 decode. */
11382 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11383 record_buf[1] = ARM_PS_REGNUM;
11384 arm_insn_r->reg_rec_count = 2;
11391 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11392 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11396 /* Handling opcode 001 insns. */
11399 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11401 uint32_t record_buf[8], record_buf_mem[8];
11403 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11404 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11406 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11407 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11408 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11411 /* Handle MSR insn. */
11412 if (9 == arm_insn_r->opcode)
11414 /* CSPR is going to be changed. */
11415 record_buf[0] = ARM_PS_REGNUM;
11416 arm_insn_r->reg_rec_count = 1;
11420 /* SPSR is going to be changed. */
11423 else if (arm_insn_r->opcode <= 15)
11425 /* Normal data processing insns. */
11426 /* Out of 11 shifter operands mode, all the insn modifies destination
11427 register, which is specified by 13-16 decode. */
11428 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11429 record_buf[1] = ARM_PS_REGNUM;
11430 arm_insn_r->reg_rec_count = 2;
11437 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11438 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11442 /* Handling opcode 010 insns. */
11445 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11447 struct regcache *reg_cache = arm_insn_r->regcache;
11449 uint32_t reg_src1 = 0 , reg_dest = 0;
11450 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11451 uint32_t record_buf[8], record_buf_mem[8];
11453 ULONGEST u_regval = 0;
11455 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11456 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11458 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11460 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11461 /* LDR insn has a capability to do branching, if
11462 MOV LR, PC is precedded by LDR insn having Rn as R15
11463 in that case, it emulates branch and link insn, and hence we
11464 need to save CSPR and PC as well. */
11465 if (ARM_PC_REGNUM != reg_dest)
11467 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11468 arm_insn_r->reg_rec_count = 1;
11472 record_buf[0] = reg_dest;
11473 record_buf[1] = ARM_PS_REGNUM;
11474 arm_insn_r->reg_rec_count = 2;
11479 /* Store, immediate offset, immediate pre-indexed,
11480 immediate post-indexed. */
11481 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11482 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11483 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11485 if (bit (arm_insn_r->arm_insn, 23))
11487 tgt_mem_addr = u_regval + offset_12;
11491 tgt_mem_addr = u_regval - offset_12;
11494 switch (arm_insn_r->opcode)
11508 record_buf_mem[0] = 4;
11523 record_buf_mem[0] = 1;
11527 gdb_assert_not_reached ("no decoding pattern found");
11530 record_buf_mem[1] = tgt_mem_addr;
11531 arm_insn_r->mem_rec_count = 1;
11533 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11534 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11535 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11536 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11537 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11538 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11541 /* We are handling pre-indexed mode; post-indexed mode;
11542 where Rn is going to be changed. */
11543 record_buf[0] = reg_src1;
11544 arm_insn_r->reg_rec_count = 1;
11548 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11549 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11553 /* Handling opcode 011 insns. */
11556 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11558 struct regcache *reg_cache = arm_insn_r->regcache;
11560 uint32_t shift_imm = 0;
11561 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11562 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11563 uint32_t record_buf[8], record_buf_mem[8];
11566 ULONGEST u_regval[2];
11568 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11569 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11571 /* Handle enhanced store insns and LDRD DSP insn,
11572 order begins according to addressing modes for store insns
11576 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11578 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11579 /* LDR insn has a capability to do branching, if
11580 MOV LR, PC is precedded by LDR insn having Rn as R15
11581 in that case, it emulates branch and link insn, and hence we
11582 need to save CSPR and PC as well. */
11583 if (15 != reg_dest)
11585 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11586 arm_insn_r->reg_rec_count = 1;
11590 record_buf[0] = reg_dest;
11591 record_buf[1] = ARM_PS_REGNUM;
11592 arm_insn_r->reg_rec_count = 2;
11597 if (! bits (arm_insn_r->arm_insn, 4, 11))
11599 /* Store insn, register offset and register pre-indexed,
11600 register post-indexed. */
11602 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11604 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11605 regcache_raw_read_unsigned (reg_cache, reg_src1
11607 regcache_raw_read_unsigned (reg_cache, reg_src2
11609 if (15 == reg_src2)
11611 /* If R15 was used as Rn, hence current PC+8. */
11612 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11613 u_regval[0] = u_regval[0] + 8;
11615 /* Calculate target store address, Rn +/- Rm, register offset. */
11617 if (bit (arm_insn_r->arm_insn, 23))
11619 tgt_mem_addr = u_regval[0] + u_regval[1];
11623 tgt_mem_addr = u_regval[1] - u_regval[0];
11626 switch (arm_insn_r->opcode)
11640 record_buf_mem[0] = 4;
11655 record_buf_mem[0] = 1;
11659 gdb_assert_not_reached ("no decoding pattern found");
11662 record_buf_mem[1] = tgt_mem_addr;
11663 arm_insn_r->mem_rec_count = 1;
11665 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11666 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11667 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11668 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11669 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11670 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11673 /* Rn is going to be changed in pre-indexed mode and
11674 post-indexed mode as well. */
11675 record_buf[0] = reg_src2;
11676 arm_insn_r->reg_rec_count = 1;
11681 /* Store insn, scaled register offset; scaled pre-indexed. */
11682 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11684 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11686 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11687 /* Get shift_imm. */
11688 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11689 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11690 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11691 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11692 /* Offset_12 used as shift. */
11696 /* Offset_12 used as index. */
11697 offset_12 = u_regval[0] << shift_imm;
11701 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11707 if (bit (u_regval[0], 31))
11709 offset_12 = 0xFFFFFFFF;
11718 /* This is arithmetic shift. */
11719 offset_12 = s_word >> shift_imm;
11726 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11728 /* Get C flag value and shift it by 31. */
11729 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11730 | (u_regval[0]) >> 1);
11734 offset_12 = (u_regval[0] >> shift_imm) \
11736 (sizeof(uint32_t) - shift_imm));
11741 gdb_assert_not_reached ("no decoding pattern found");
11745 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11747 if (bit (arm_insn_r->arm_insn, 23))
11749 tgt_mem_addr = u_regval[1] + offset_12;
11753 tgt_mem_addr = u_regval[1] - offset_12;
11756 switch (arm_insn_r->opcode)
11770 record_buf_mem[0] = 4;
11785 record_buf_mem[0] = 1;
11789 gdb_assert_not_reached ("no decoding pattern found");
11792 record_buf_mem[1] = tgt_mem_addr;
11793 arm_insn_r->mem_rec_count = 1;
11795 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11796 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11797 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11798 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11799 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11800 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11803 /* Rn is going to be changed in register scaled pre-indexed
11804 mode,and scaled post indexed mode. */
11805 record_buf[0] = reg_src2;
11806 arm_insn_r->reg_rec_count = 1;
11811 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11812 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11816 /* Handling opcode 100 insns. */
11819 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11821 struct regcache *reg_cache = arm_insn_r->regcache;
11823 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11824 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11825 uint32_t start_address = 0, index = 0;
11826 uint32_t record_buf[24], record_buf_mem[48];
11828 ULONGEST u_regval[2] = {0};
11830 /* This mode is exclusively for load and store multiple. */
11831 /* Handle incremenrt after/before and decrment after.before mode;
11832 Rn is changing depending on W bit, but as of now we store Rn too
11833 without optimization. */
11835 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11837 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11839 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11841 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11846 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11850 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11851 while (register_bits)
11853 if (register_bits & 0x00000001)
11854 record_buf[index++] = register_count;
11855 register_bits = register_bits >> 1;
11859 /* Extra space for Base Register and CPSR; wihtout optimization. */
11860 record_buf[index++] = reg_src1;
11861 record_buf[index++] = ARM_PS_REGNUM;
11862 arm_insn_r->reg_rec_count = index;
11866 /* It handles both STM(1) and STM(2). */
11867 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11869 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11871 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11872 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11873 while (register_bits)
11875 if (register_bits & 0x00000001)
11877 register_bits = register_bits >> 1;
11882 /* Decrement after. */
11884 start_address = (u_regval[0]) - (register_count * 4) + 4;
11885 arm_insn_r->mem_rec_count = register_count;
11886 while (register_count)
11888 record_buf_mem[(register_count * 2) - 1] = start_address;
11889 record_buf_mem[(register_count * 2) - 2] = 4;
11890 start_address = start_address + 4;
11895 /* Increment after. */
11897 start_address = u_regval[0];
11898 arm_insn_r->mem_rec_count = register_count;
11899 while (register_count)
11901 record_buf_mem[(register_count * 2) - 1] = start_address;
11902 record_buf_mem[(register_count * 2) - 2] = 4;
11903 start_address = start_address + 4;
11908 /* Decrement before. */
11911 start_address = (u_regval[0]) - (register_count * 4);
11912 arm_insn_r->mem_rec_count = register_count;
11913 while (register_count)
11915 record_buf_mem[(register_count * 2) - 1] = start_address;
11916 record_buf_mem[(register_count * 2) - 2] = 4;
11917 start_address = start_address + 4;
11922 /* Increment before. */
11924 start_address = u_regval[0] + 4;
11925 arm_insn_r->mem_rec_count = register_count;
11926 while (register_count)
11928 record_buf_mem[(register_count * 2) - 1] = start_address;
11929 record_buf_mem[(register_count * 2) - 2] = 4;
11930 start_address = start_address + 4;
11936 gdb_assert_not_reached ("no decoding pattern found");
11940 /* Base register also changes; based on condition and W bit. */
11941 /* We save it anyway without optimization. */
11942 record_buf[0] = reg_src1;
11943 arm_insn_r->reg_rec_count = 1;
11946 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11947 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11951 /* Handling opcode 101 insns. */
11954 arm_record_b_bl (insn_decode_record *arm_insn_r)
11956 uint32_t record_buf[8];
11958 /* Handle B, BL, BLX(1) insns. */
11959 /* B simply branches so we do nothing here. */
11960 /* Note: BLX(1) doesnt fall here but instead it falls into
11961 extension space. */
11962 if (bit (arm_insn_r->arm_insn, 24))
11964 record_buf[0] = ARM_LR_REGNUM;
11965 arm_insn_r->reg_rec_count = 1;
11968 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11973 /* Handling opcode 110 insns. */
11976 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11978 printf_unfiltered (_("Process record does not support instruction "
11979 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11980 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11985 /* Handling opcode 111 insns. */
11988 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11990 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11991 struct regcache *reg_cache = arm_insn_r->regcache;
11992 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11993 ULONGEST u_regval = 0;
11995 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11997 /* Handle arm SWI/SVC system call instructions. */
11998 if (15 == arm_insn_r->opcode)
12000 if (tdep->arm_syscall_record != NULL)
12002 ULONGEST svc_operand, svc_number;
12004 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12006 if (svc_operand) /* OABI. */
12007 svc_number = svc_operand - 0x900000;
12009 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12011 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12015 printf_unfiltered (_("no syscall record support\n"));
12021 arm_record_unsupported_insn (arm_insn_r);
12028 /* Handling opcode 000 insns. */
12031 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12033 uint32_t record_buf[8];
12034 uint32_t reg_src1 = 0;
12036 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12038 record_buf[0] = ARM_PS_REGNUM;
12039 record_buf[1] = reg_src1;
12040 thumb_insn_r->reg_rec_count = 2;
12042 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12048 /* Handling opcode 001 insns. */
12051 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12053 uint32_t record_buf[8];
12054 uint32_t reg_src1 = 0;
12056 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12058 record_buf[0] = ARM_PS_REGNUM;
12059 record_buf[1] = reg_src1;
12060 thumb_insn_r->reg_rec_count = 2;
12062 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12067 /* Handling opcode 010 insns. */
12070 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12072 struct regcache *reg_cache = thumb_insn_r->regcache;
12073 uint32_t record_buf[8], record_buf_mem[8];
12075 uint32_t reg_src1 = 0, reg_src2 = 0;
12076 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12078 ULONGEST u_regval[2] = {0};
12080 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12082 if (bit (thumb_insn_r->arm_insn, 12))
12084 /* Handle load/store register offset. */
12085 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12086 if (opcode2 >= 12 && opcode2 <= 15)
12088 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12089 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12090 record_buf[0] = reg_src1;
12091 thumb_insn_r->reg_rec_count = 1;
12093 else if (opcode2 >= 8 && opcode2 <= 10)
12095 /* STR(2), STRB(2), STRH(2) . */
12096 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12097 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12098 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12099 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12101 record_buf_mem[0] = 4; /* STR (2). */
12102 else if (10 == opcode2)
12103 record_buf_mem[0] = 1; /* STRB (2). */
12104 else if (9 == opcode2)
12105 record_buf_mem[0] = 2; /* STRH (2). */
12106 record_buf_mem[1] = u_regval[0] + u_regval[1];
12107 thumb_insn_r->mem_rec_count = 1;
12110 else if (bit (thumb_insn_r->arm_insn, 11))
12112 /* Handle load from literal pool. */
12114 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12115 record_buf[0] = reg_src1;
12116 thumb_insn_r->reg_rec_count = 1;
12120 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12121 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12122 if ((3 == opcode2) && (!opcode3))
12124 /* Branch with exchange. */
12125 record_buf[0] = ARM_PS_REGNUM;
12126 thumb_insn_r->reg_rec_count = 1;
12130 /* Format 8; special data processing insns. */
12131 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12132 record_buf[0] = ARM_PS_REGNUM;
12133 record_buf[1] = reg_src1;
12134 thumb_insn_r->reg_rec_count = 2;
12139 /* Format 5; data processing insns. */
12140 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12141 if (bit (thumb_insn_r->arm_insn, 7))
12143 reg_src1 = reg_src1 + 8;
12145 record_buf[0] = ARM_PS_REGNUM;
12146 record_buf[1] = reg_src1;
12147 thumb_insn_r->reg_rec_count = 2;
12150 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12151 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12157 /* Handling opcode 001 insns. */
12160 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12162 struct regcache *reg_cache = thumb_insn_r->regcache;
12163 uint32_t record_buf[8], record_buf_mem[8];
12165 uint32_t reg_src1 = 0;
12166 uint32_t opcode = 0, immed_5 = 0;
12168 ULONGEST u_regval = 0;
12170 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12175 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12176 record_buf[0] = reg_src1;
12177 thumb_insn_r->reg_rec_count = 1;
12182 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12183 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12184 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12185 record_buf_mem[0] = 4;
12186 record_buf_mem[1] = u_regval + (immed_5 * 4);
12187 thumb_insn_r->mem_rec_count = 1;
12190 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12191 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12197 /* Handling opcode 100 insns. */
12200 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12202 struct regcache *reg_cache = thumb_insn_r->regcache;
12203 uint32_t record_buf[8], record_buf_mem[8];
12205 uint32_t reg_src1 = 0;
12206 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12208 ULONGEST u_regval = 0;
12210 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12215 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12216 record_buf[0] = reg_src1;
12217 thumb_insn_r->reg_rec_count = 1;
12219 else if (1 == opcode)
12222 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12223 record_buf[0] = reg_src1;
12224 thumb_insn_r->reg_rec_count = 1;
12226 else if (2 == opcode)
12229 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12230 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12231 record_buf_mem[0] = 4;
12232 record_buf_mem[1] = u_regval + (immed_8 * 4);
12233 thumb_insn_r->mem_rec_count = 1;
12235 else if (0 == opcode)
12238 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12239 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12240 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12241 record_buf_mem[0] = 2;
12242 record_buf_mem[1] = u_regval + (immed_5 * 2);
12243 thumb_insn_r->mem_rec_count = 1;
12246 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12247 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12253 /* Handling opcode 101 insns. */
12256 thumb_record_misc (insn_decode_record *thumb_insn_r)
12258 struct regcache *reg_cache = thumb_insn_r->regcache;
12260 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12261 uint32_t register_bits = 0, register_count = 0;
12262 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12263 uint32_t record_buf[24], record_buf_mem[48];
12266 ULONGEST u_regval = 0;
12268 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12269 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12270 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12275 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12276 while (register_bits)
12278 if (register_bits & 0x00000001)
12279 record_buf[index++] = register_count;
12280 register_bits = register_bits >> 1;
12283 record_buf[index++] = ARM_PS_REGNUM;
12284 record_buf[index++] = ARM_SP_REGNUM;
12285 thumb_insn_r->reg_rec_count = index;
12287 else if (10 == opcode2)
12290 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12291 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12292 while (register_bits)
12294 if (register_bits & 0x00000001)
12296 register_bits = register_bits >> 1;
12298 start_address = u_regval - \
12299 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12300 thumb_insn_r->mem_rec_count = register_count;
12301 while (register_count)
12303 record_buf_mem[(register_count * 2) - 1] = start_address;
12304 record_buf_mem[(register_count * 2) - 2] = 4;
12305 start_address = start_address + 4;
12308 record_buf[0] = ARM_SP_REGNUM;
12309 thumb_insn_r->reg_rec_count = 1;
12311 else if (0x1E == opcode1)
12314 /* Handle enhanced software breakpoint insn, BKPT. */
12315 /* CPSR is changed to be executed in ARM state, disabling normal
12316 interrupts, entering abort mode. */
12317 /* According to high vector configuration PC is set. */
12318 /* User hits breakpoint and type reverse, in that case, we need to go back with
12319 previous CPSR and Program Counter. */
12320 record_buf[0] = ARM_PS_REGNUM;
12321 record_buf[1] = ARM_LR_REGNUM;
12322 thumb_insn_r->reg_rec_count = 2;
12323 /* We need to save SPSR value, which is not yet done. */
12324 printf_unfiltered (_("Process record does not support instruction "
12325 "0x%0x at address %s.\n"),
12326 thumb_insn_r->arm_insn,
12327 paddress (thumb_insn_r->gdbarch,
12328 thumb_insn_r->this_addr));
12331 else if ((0 == opcode) || (1 == opcode))
12333 /* ADD(5), ADD(6). */
12334 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12335 record_buf[0] = reg_src1;
12336 thumb_insn_r->reg_rec_count = 1;
12338 else if (2 == opcode)
12340 /* ADD(7), SUB(4). */
12341 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12342 record_buf[0] = ARM_SP_REGNUM;
12343 thumb_insn_r->reg_rec_count = 1;
12346 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12347 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12353 /* Handling opcode 110 insns. */
12356 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12358 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12359 struct regcache *reg_cache = thumb_insn_r->regcache;
12361 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12362 uint32_t reg_src1 = 0;
12363 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12364 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12365 uint32_t record_buf[24], record_buf_mem[48];
12367 ULONGEST u_regval = 0;
12369 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12370 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12376 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12378 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12379 while (register_bits)
12381 if (register_bits & 0x00000001)
12382 record_buf[index++] = register_count;
12383 register_bits = register_bits >> 1;
12386 record_buf[index++] = reg_src1;
12387 thumb_insn_r->reg_rec_count = index;
12389 else if (0 == opcode2)
12391 /* It handles both STMIA. */
12392 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12394 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12395 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12396 while (register_bits)
12398 if (register_bits & 0x00000001)
12400 register_bits = register_bits >> 1;
12402 start_address = u_regval;
12403 thumb_insn_r->mem_rec_count = register_count;
12404 while (register_count)
12406 record_buf_mem[(register_count * 2) - 1] = start_address;
12407 record_buf_mem[(register_count * 2) - 2] = 4;
12408 start_address = start_address + 4;
12412 else if (0x1F == opcode1)
12414 /* Handle arm syscall insn. */
12415 if (tdep->arm_syscall_record != NULL)
12417 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12418 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12422 printf_unfiltered (_("no syscall record support\n"));
12427 /* B (1), conditional branch is automatically taken care in process_record,
12428 as PC is saved there. */
12430 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12431 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12437 /* Handling opcode 111 insns. */
12440 thumb_record_branch (insn_decode_record *thumb_insn_r)
12442 uint32_t record_buf[8];
12443 uint32_t bits_h = 0;
12445 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12447 if (2 == bits_h || 3 == bits_h)
12450 record_buf[0] = ARM_LR_REGNUM;
12451 thumb_insn_r->reg_rec_count = 1;
12453 else if (1 == bits_h)
12456 record_buf[0] = ARM_PS_REGNUM;
12457 record_buf[1] = ARM_LR_REGNUM;
12458 thumb_insn_r->reg_rec_count = 2;
12461 /* B(2) is automatically taken care in process_record, as PC is
12464 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12469 /* Handler for thumb2 load/store multiple instructions. */
12472 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12474 struct regcache *reg_cache = thumb2_insn_r->regcache;
12476 uint32_t reg_rn, op;
12477 uint32_t register_bits = 0, register_count = 0;
12478 uint32_t index = 0, start_address = 0;
12479 uint32_t record_buf[24], record_buf_mem[48];
12481 ULONGEST u_regval = 0;
12483 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12484 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12486 if (0 == op || 3 == op)
12488 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12490 /* Handle RFE instruction. */
12491 record_buf[0] = ARM_PS_REGNUM;
12492 thumb2_insn_r->reg_rec_count = 1;
12496 /* Handle SRS instruction after reading banked SP. */
12497 return arm_record_unsupported_insn (thumb2_insn_r);
12500 else if (1 == op || 2 == op)
12502 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12504 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12505 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12506 while (register_bits)
12508 if (register_bits & 0x00000001)
12509 record_buf[index++] = register_count;
12512 register_bits = register_bits >> 1;
12514 record_buf[index++] = reg_rn;
12515 record_buf[index++] = ARM_PS_REGNUM;
12516 thumb2_insn_r->reg_rec_count = index;
12520 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12521 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12522 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12523 while (register_bits)
12525 if (register_bits & 0x00000001)
12528 register_bits = register_bits >> 1;
12533 /* Start address calculation for LDMDB/LDMEA. */
12534 start_address = u_regval;
12538 /* Start address calculation for LDMDB/LDMEA. */
12539 start_address = u_regval - register_count * 4;
12542 thumb2_insn_r->mem_rec_count = register_count;
12543 while (register_count)
12545 record_buf_mem[register_count * 2 - 1] = start_address;
12546 record_buf_mem[register_count * 2 - 2] = 4;
12547 start_address = start_address + 4;
12550 record_buf[0] = reg_rn;
12551 record_buf[1] = ARM_PS_REGNUM;
12552 thumb2_insn_r->reg_rec_count = 2;
12556 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12558 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12560 return ARM_RECORD_SUCCESS;
12563 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12567 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12569 struct regcache *reg_cache = thumb2_insn_r->regcache;
12571 uint32_t reg_rd, reg_rn, offset_imm;
12572 uint32_t reg_dest1, reg_dest2;
12573 uint32_t address, offset_addr;
12574 uint32_t record_buf[8], record_buf_mem[8];
12575 uint32_t op1, op2, op3;
12578 ULONGEST u_regval[2];
12580 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12581 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12582 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12584 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12586 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12588 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12589 record_buf[0] = reg_dest1;
12590 record_buf[1] = ARM_PS_REGNUM;
12591 thumb2_insn_r->reg_rec_count = 2;
12594 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12596 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12597 record_buf[2] = reg_dest2;
12598 thumb2_insn_r->reg_rec_count = 3;
12603 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12604 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12606 if (0 == op1 && 0 == op2)
12608 /* Handle STREX. */
12609 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12610 address = u_regval[0] + (offset_imm * 4);
12611 record_buf_mem[0] = 4;
12612 record_buf_mem[1] = address;
12613 thumb2_insn_r->mem_rec_count = 1;
12614 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12615 record_buf[0] = reg_rd;
12616 thumb2_insn_r->reg_rec_count = 1;
12618 else if (1 == op1 && 0 == op2)
12620 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12621 record_buf[0] = reg_rd;
12622 thumb2_insn_r->reg_rec_count = 1;
12623 address = u_regval[0];
12624 record_buf_mem[1] = address;
12628 /* Handle STREXB. */
12629 record_buf_mem[0] = 1;
12630 thumb2_insn_r->mem_rec_count = 1;
12634 /* Handle STREXH. */
12635 record_buf_mem[0] = 2 ;
12636 thumb2_insn_r->mem_rec_count = 1;
12640 /* Handle STREXD. */
12641 address = u_regval[0];
12642 record_buf_mem[0] = 4;
12643 record_buf_mem[2] = 4;
12644 record_buf_mem[3] = address + 4;
12645 thumb2_insn_r->mem_rec_count = 2;
12650 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12652 if (bit (thumb2_insn_r->arm_insn, 24))
12654 if (bit (thumb2_insn_r->arm_insn, 23))
12655 offset_addr = u_regval[0] + (offset_imm * 4);
12657 offset_addr = u_regval[0] - (offset_imm * 4);
12659 address = offset_addr;
12662 address = u_regval[0];
12664 record_buf_mem[0] = 4;
12665 record_buf_mem[1] = address;
12666 record_buf_mem[2] = 4;
12667 record_buf_mem[3] = address + 4;
12668 thumb2_insn_r->mem_rec_count = 2;
12669 record_buf[0] = reg_rn;
12670 thumb2_insn_r->reg_rec_count = 1;
12674 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12676 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12678 return ARM_RECORD_SUCCESS;
12681 /* Handler for thumb2 data processing (shift register and modified immediate)
12685 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12687 uint32_t reg_rd, op;
12688 uint32_t record_buf[8];
12690 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12691 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12693 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12695 record_buf[0] = ARM_PS_REGNUM;
12696 thumb2_insn_r->reg_rec_count = 1;
12700 record_buf[0] = reg_rd;
12701 record_buf[1] = ARM_PS_REGNUM;
12702 thumb2_insn_r->reg_rec_count = 2;
12705 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12707 return ARM_RECORD_SUCCESS;
12710 /* Generic handler for thumb2 instructions which effect destination and PS
12714 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12717 uint32_t record_buf[8];
12719 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12721 record_buf[0] = reg_rd;
12722 record_buf[1] = ARM_PS_REGNUM;
12723 thumb2_insn_r->reg_rec_count = 2;
12725 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12727 return ARM_RECORD_SUCCESS;
12730 /* Handler for thumb2 branch and miscellaneous control instructions. */
12733 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12735 uint32_t op, op1, op2;
12736 uint32_t record_buf[8];
12738 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12739 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12740 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12742 /* Handle MSR insn. */
12743 if (!(op1 & 0x2) && 0x38 == op)
12747 /* CPSR is going to be changed. */
12748 record_buf[0] = ARM_PS_REGNUM;
12749 thumb2_insn_r->reg_rec_count = 1;
12753 arm_record_unsupported_insn(thumb2_insn_r);
12757 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12760 record_buf[0] = ARM_PS_REGNUM;
12761 record_buf[1] = ARM_LR_REGNUM;
12762 thumb2_insn_r->reg_rec_count = 2;
12765 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12767 return ARM_RECORD_SUCCESS;
12770 /* Handler for thumb2 store single data item instructions. */
12773 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12775 struct regcache *reg_cache = thumb2_insn_r->regcache;
12777 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12778 uint32_t address, offset_addr;
12779 uint32_t record_buf[8], record_buf_mem[8];
12782 ULONGEST u_regval[2];
12784 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12785 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12786 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12787 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12789 if (bit (thumb2_insn_r->arm_insn, 23))
12792 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12793 offset_addr = u_regval[0] + offset_imm;
12794 address = offset_addr;
12799 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12801 /* Handle STRB (register). */
12802 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12803 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12804 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12805 offset_addr = u_regval[1] << shift_imm;
12806 address = u_regval[0] + offset_addr;
12810 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12811 if (bit (thumb2_insn_r->arm_insn, 10))
12813 if (bit (thumb2_insn_r->arm_insn, 9))
12814 offset_addr = u_regval[0] + offset_imm;
12816 offset_addr = u_regval[0] - offset_imm;
12818 address = offset_addr;
12821 address = u_regval[0];
12827 /* Store byte instructions. */
12830 record_buf_mem[0] = 1;
12832 /* Store half word instructions. */
12835 record_buf_mem[0] = 2;
12837 /* Store word instructions. */
12840 record_buf_mem[0] = 4;
12844 gdb_assert_not_reached ("no decoding pattern found");
12848 record_buf_mem[1] = address;
12849 thumb2_insn_r->mem_rec_count = 1;
12850 record_buf[0] = reg_rn;
12851 thumb2_insn_r->reg_rec_count = 1;
12853 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12855 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12857 return ARM_RECORD_SUCCESS;
12860 /* Handler for thumb2 load memory hints instructions. */
12863 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12865 uint32_t record_buf[8];
12866 uint32_t reg_rt, reg_rn;
12868 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12869 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12871 if (ARM_PC_REGNUM != reg_rt)
12873 record_buf[0] = reg_rt;
12874 record_buf[1] = reg_rn;
12875 record_buf[2] = ARM_PS_REGNUM;
12876 thumb2_insn_r->reg_rec_count = 3;
12878 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12880 return ARM_RECORD_SUCCESS;
12883 return ARM_RECORD_FAILURE;
12886 /* Handler for thumb2 load word instructions. */
12889 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12891 uint32_t opcode1 = 0, opcode2 = 0;
12892 uint32_t record_buf[8];
12894 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12895 record_buf[1] = ARM_PS_REGNUM;
12896 thumb2_insn_r->reg_rec_count = 2;
12898 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12900 return ARM_RECORD_SUCCESS;
12903 /* Handler for thumb2 long multiply, long multiply accumulate, and
12904 divide instructions. */
12907 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12909 uint32_t opcode1 = 0, opcode2 = 0;
12910 uint32_t record_buf[8];
12911 uint32_t reg_src1 = 0;
12913 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12914 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12916 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12918 /* Handle SMULL, UMULL, SMULAL. */
12919 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12920 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12921 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12922 record_buf[2] = ARM_PS_REGNUM;
12923 thumb2_insn_r->reg_rec_count = 3;
12925 else if (1 == opcode1 || 3 == opcode2)
12927 /* Handle SDIV and UDIV. */
12928 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12929 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12930 record_buf[2] = ARM_PS_REGNUM;
12931 thumb2_insn_r->reg_rec_count = 3;
12934 return ARM_RECORD_FAILURE;
12936 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12938 return ARM_RECORD_SUCCESS;
12941 /* Decodes thumb2 instruction type and invokes its record handler. */
12943 static unsigned int
12944 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12946 uint32_t op, op1, op2;
12948 op = bit (thumb2_insn_r->arm_insn, 15);
12949 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12950 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12954 if (!(op2 & 0x64 ))
12956 /* Load/store multiple instruction. */
12957 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12959 else if (!((op2 & 0x64) ^ 0x04))
12961 /* Load/store (dual/exclusive) and table branch instruction. */
12962 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12964 else if (!((op2 & 0x20) ^ 0x20))
12966 /* Data-processing (shifted register). */
12967 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12969 else if (op2 & 0x40)
12971 /* Co-processor instructions. */
12972 arm_record_unsupported_insn (thumb2_insn_r);
12975 else if (op1 == 0x02)
12979 /* Branches and miscellaneous control instructions. */
12980 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12982 else if (op2 & 0x20)
12984 /* Data-processing (plain binary immediate) instruction. */
12985 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12989 /* Data-processing (modified immediate). */
12990 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12993 else if (op1 == 0x03)
12995 if (!(op2 & 0x71 ))
12997 /* Store single data item. */
12998 return thumb2_record_str_single_data (thumb2_insn_r);
13000 else if (!((op2 & 0x71) ^ 0x10))
13002 /* Advanced SIMD or structure load/store instructions. */
13003 return arm_record_unsupported_insn (thumb2_insn_r);
13005 else if (!((op2 & 0x67) ^ 0x01))
13007 /* Load byte, memory hints instruction. */
13008 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13010 else if (!((op2 & 0x67) ^ 0x03))
13012 /* Load halfword, memory hints instruction. */
13013 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13015 else if (!((op2 & 0x67) ^ 0x05))
13017 /* Load word instruction. */
13018 return thumb2_record_ld_word (thumb2_insn_r);
13020 else if (!((op2 & 0x70) ^ 0x20))
13022 /* Data-processing (register) instruction. */
13023 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13025 else if (!((op2 & 0x78) ^ 0x30))
13027 /* Multiply, multiply accumulate, abs diff instruction. */
13028 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13030 else if (!((op2 & 0x78) ^ 0x38))
13032 /* Long multiply, long multiply accumulate, and divide. */
13033 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13035 else if (op2 & 0x40)
13037 /* Co-processor instructions. */
13038 return arm_record_unsupported_insn (thumb2_insn_r);
13045 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13046 and positive val on fauilure. */
13049 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13051 gdb_byte buf[insn_size];
13053 memset (&buf[0], 0, insn_size);
13055 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13057 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13059 gdbarch_byte_order (insn_record->gdbarch));
13063 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13065 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13069 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13070 uint32_t insn_size)
13073 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13074 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13076 arm_record_data_proc_misc_ld_str, /* 000. */
13077 arm_record_data_proc_imm, /* 001. */
13078 arm_record_ld_st_imm_offset, /* 010. */
13079 arm_record_ld_st_reg_offset, /* 011. */
13080 arm_record_ld_st_multiple, /* 100. */
13081 arm_record_b_bl, /* 101. */
13082 arm_record_unsupported_insn, /* 110. */
13083 arm_record_coproc_data_proc /* 111. */
13086 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13087 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13089 thumb_record_shift_add_sub, /* 000. */
13090 thumb_record_add_sub_cmp_mov, /* 001. */
13091 thumb_record_ld_st_reg_offset, /* 010. */
13092 thumb_record_ld_st_imm_offset, /* 011. */
13093 thumb_record_ld_st_stack, /* 100. */
13094 thumb_record_misc, /* 101. */
13095 thumb_record_ldm_stm_swi, /* 110. */
13096 thumb_record_branch /* 111. */
13099 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13100 uint32_t insn_id = 0;
13102 if (extract_arm_insn (arm_record, insn_size))
13106 printf_unfiltered (_("Process record: error reading memory at "
13107 "addr %s len = %d.\n"),
13108 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13112 else if (ARM_RECORD == record_type)
13114 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13115 insn_id = bits (arm_record->arm_insn, 25, 27);
13116 ret = arm_record_extension_space (arm_record);
13117 /* If this insn has fallen into extension space
13118 then we need not decode it anymore. */
13119 if (ret != -1 && !INSN_RECORDED(arm_record))
13121 ret = arm_handle_insn[insn_id] (arm_record);
13124 else if (THUMB_RECORD == record_type)
13126 /* As thumb does not have condition codes, we set negative. */
13127 arm_record->cond = -1;
13128 insn_id = bits (arm_record->arm_insn, 13, 15);
13129 ret = thumb_handle_insn[insn_id] (arm_record);
13131 else if (THUMB2_RECORD == record_type)
13133 /* As thumb does not have condition codes, we set negative. */
13134 arm_record->cond = -1;
13136 /* Swap first half of 32bit thumb instruction with second half. */
13137 arm_record->arm_insn
13138 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13140 insn_id = thumb2_record_decode_insn_handler (arm_record);
13142 if (insn_id != ARM_RECORD_SUCCESS)
13144 arm_record_unsupported_insn (arm_record);
13150 /* Throw assertion. */
13151 gdb_assert_not_reached ("not a valid instruction, could not decode");
13158 /* Cleans up local record registers and memory allocations. */
13161 deallocate_reg_mem (insn_decode_record *record)
13163 xfree (record->arm_regs);
13164 xfree (record->arm_mems);
13168 /* Parse the current instruction and record the values of the registers and
13169 memory that will be changed in current instruction to record_arch_list".
13170 Return -1 if something is wrong. */
13173 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13174 CORE_ADDR insn_addr)
13177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13178 uint32_t no_of_rec = 0;
13179 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13180 ULONGEST t_bit = 0, insn_id = 0;
13182 ULONGEST u_regval = 0;
13184 insn_decode_record arm_record;
13186 memset (&arm_record, 0, sizeof (insn_decode_record));
13187 arm_record.regcache = regcache;
13188 arm_record.this_addr = insn_addr;
13189 arm_record.gdbarch = gdbarch;
13192 if (record_debug > 1)
13194 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13196 paddress (gdbarch, arm_record.this_addr));
13199 if (extract_arm_insn (&arm_record, 2))
13203 printf_unfiltered (_("Process record: error reading memory at "
13204 "addr %s len = %d.\n"),
13205 paddress (arm_record.gdbarch,
13206 arm_record.this_addr), 2);
13211 /* Check the insn, whether it is thumb or arm one. */
13213 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13214 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13217 if (!(u_regval & t_bit))
13219 /* We are decoding arm insn. */
13220 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13224 insn_id = bits (arm_record.arm_insn, 11, 15);
13225 /* is it thumb2 insn? */
13226 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13228 ret = decode_insn (&arm_record, THUMB2_RECORD,
13229 THUMB2_INSN_SIZE_BYTES);
13233 /* We are decoding thumb insn. */
13234 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13240 /* Record registers. */
13241 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13242 if (arm_record.arm_regs)
13244 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13246 if (record_full_arch_list_add_reg
13247 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13251 /* Record memories. */
13252 if (arm_record.arm_mems)
13254 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13256 if (record_full_arch_list_add_mem
13257 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13258 arm_record.arm_mems[no_of_rec].len))
13263 if (record_full_arch_list_add_end ())
13268 deallocate_reg_mem (&arm_record);