1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
50 #include "gdb/sim-arm.h"
53 #include "coff/internal.h"
59 #include "record-full.h"
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
84 /* Per-objfile data used for mapping symbols. */
85 static const struct objfile_data *arm_objfile_data_key;
87 struct arm_mapping_symbol
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
93 DEF_VEC_O(arm_mapping_symbol_s);
95 struct arm_per_objfile
97 VEC(arm_mapping_symbol_s) **section_maps;
100 /* The list of available "set arm ..." and "show arm ..." commands. */
101 static struct cmd_list_element *setarmcmdlist = NULL;
102 static struct cmd_list_element *showarmcmdlist = NULL;
104 /* The type of floating-point to use. Keep this in sync with enum
105 arm_float_model, and the help string in _initialize_arm_tdep. */
106 static const char *const fp_model_strings[] =
116 /* A variable that can be configured by the user. */
117 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
118 static const char *current_fp_model = "auto";
120 /* The ABI to use. Keep this in sync with arm_abi_kind. */
121 static const char *const arm_abi_strings[] =
129 /* A variable that can be configured by the user. */
130 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
131 static const char *arm_abi_string = "auto";
133 /* The execution mode to assume. */
134 static const char *const arm_mode_strings[] =
142 static const char *arm_fallback_mode_string = "auto";
143 static const char *arm_force_mode_string = "auto";
145 /* Internal override of the execution mode. -1 means no override,
146 0 means override to ARM mode, 1 means override to Thumb mode.
147 The effect is the same as if arm_force_mode has been set by the
148 user (except the internal override has precedence over a user's
149 arm_force_mode override). */
150 static int arm_override_mode = -1;
152 /* Number of different reg name sets (options). */
153 static int num_disassembly_options;
155 /* The standard register names, and all the valid aliases for them. Note
156 that `fp', `sp' and `pc' are not added in this alias list, because they
157 have been added as builtin user registers in
158 std-regs.c:_initialize_frame_reg. */
163 } arm_register_aliases[] = {
164 /* Basic register numbers. */
181 /* Synonyms (argument and variable registers). */
194 /* Other platform-specific names for r9. */
200 /* Names used by GCC (not listed in the ARM EABI). */
202 /* A special name from the older ATPCS. */
206 static const char *const arm_register_names[] =
207 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
208 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
209 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
210 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
211 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
212 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
213 "fps", "cpsr" }; /* 24 25 */
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void set_disassembly_style (void);
227 static void convert_from_extended (const struct floatformat *, const void *,
229 static void convert_to_extended (const struct floatformat *, void *,
232 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, gdb_byte *buf);
235 static void arm_neon_quad_write (struct gdbarch *gdbarch,
236 struct regcache *regcache,
237 int regnum, const gdb_byte *buf);
239 struct arm_prologue_cache
241 /* The stack pointer at the time this frame was created; i.e. the
242 caller's stack pointer when this function was called. It is used
243 to identify this frame. */
246 /* The frame base for this frame is just prev_sp - frame size.
247 FRAMESIZE is the distance from the frame pointer to the
248 initial stack pointer. */
252 /* The register used to hold the frame pointer for this frame. */
255 /* Saved register offsets. */
256 struct trad_frame_saved_reg *saved_regs;
259 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
260 CORE_ADDR prologue_start,
261 CORE_ADDR prologue_end,
262 struct arm_prologue_cache *cache);
264 /* Architecture version for displaced stepping. This effects the behaviour of
265 certain instructions, and really should not be hard-wired. */
267 #define DISPLACED_STEPPING_ARCH_VERSION 5
269 /* Set to true if the 32-bit mode is in use. */
273 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
276 arm_psr_thumb_bit (struct gdbarch *gdbarch)
278 if (gdbarch_tdep (gdbarch)->is_m)
284 /* Determine if FRAME is executing in Thumb mode. */
287 arm_frame_is_thumb (struct frame_info *frame)
290 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
292 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
293 directly (from a signal frame or dummy frame) or by interpreting
294 the saved LR (from a prologue or DWARF frame). So consult it and
295 trust the unwinders. */
296 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
298 return (cpsr & t_bit) != 0;
301 /* Callback for VEC_lower_bound. */
304 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
305 const struct arm_mapping_symbol *rhs)
307 return lhs->value < rhs->value;
310 /* Search for the mapping symbol covering MEMADDR. If one is found,
311 return its type. Otherwise, return 0. If START is non-NULL,
312 set *START to the location of the mapping symbol. */
315 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
317 struct obj_section *sec;
319 /* If there are mapping symbols, consult them. */
320 sec = find_pc_section (memaddr);
323 struct arm_per_objfile *data;
324 VEC(arm_mapping_symbol_s) *map;
325 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
329 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
330 arm_objfile_data_key);
333 map = data->section_maps[sec->the_bfd_section->index];
334 if (!VEC_empty (arm_mapping_symbol_s, map))
336 struct arm_mapping_symbol *map_sym;
338 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
339 arm_compare_mapping_symbols);
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx < VEC_length (arm_mapping_symbol_s, map))
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
348 if (map_sym->value == map_key.value)
351 *start = map_sym->value + obj_section_addr (sec);
352 return map_sym->type;
358 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
360 *start = map_sym->value + obj_section_addr (sec);
361 return map_sym->type;
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
375 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
377 struct bound_minimal_symbol sym;
379 struct displaced_step_closure* dsc
380 = get_displaced_step_closure_by_addr(memaddr);
382 /* If checking the mode of displaced instruction in copy area, the mode
383 should be determined by instruction on the original address. */
387 fprintf_unfiltered (gdb_stdlog,
388 "displaced: check mode of %.8lx instead of %.8lx\n",
389 (unsigned long) dsc->insn_addr,
390 (unsigned long) memaddr);
391 memaddr = dsc->insn_addr;
394 /* If bit 0 of the address is set, assume this is a Thumb address. */
395 if (IS_THUMB_ADDR (memaddr))
398 /* Respect internal mode override if active. */
399 if (arm_override_mode != -1)
400 return arm_override_mode;
402 /* If the user wants to override the symbol table, let him. */
403 if (strcmp (arm_force_mode_string, "arm") == 0)
405 if (strcmp (arm_force_mode_string, "thumb") == 0)
408 /* ARM v6-M and v7-M are always in Thumb mode. */
409 if (gdbarch_tdep (gdbarch)->is_m)
412 /* If there are mapping symbols, consult them. */
413 type = arm_find_mapping_symbol (memaddr, NULL);
417 /* Thumb functions have a "special" bit set in minimal symbols. */
418 sym = lookup_minimal_symbol_by_pc (memaddr);
420 return (MSYMBOL_IS_SPECIAL (sym.minsym));
422 /* If the user wants to override the fallback mode, let them. */
423 if (strcmp (arm_fallback_mode_string, "arm") == 0)
425 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
428 /* If we couldn't find any symbol, but we're talking to a running
429 target, then trust the current value of $cpsr. This lets
430 "display/i $pc" always show the correct mode (though if there is
431 a symbol table we will not reach here, so it still may not be
432 displayed in the mode it will be executed). */
433 if (target_has_registers)
434 return arm_frame_is_thumb (get_current_frame ());
436 /* Otherwise we're out of luck; we assume ARM. */
440 /* Remove useless bits from addresses in a running program. */
442 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
444 /* On M-profile devices, do not strip the low bit from EXC_RETURN
445 (the magic exception return address). */
446 if (gdbarch_tdep (gdbarch)->is_m
447 && (val & 0xfffffff0) == 0xfffffff0)
451 return UNMAKE_THUMB_ADDR (val);
453 return (val & 0x03fffffc);
456 /* Return 1 if PC is the start of a compiler helper function which
457 can be safely ignored during prologue skipping. IS_THUMB is true
458 if the function is known to be a Thumb function due to the way it
461 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
463 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
464 struct bound_minimal_symbol msym;
466 msym = lookup_minimal_symbol_by_pc (pc);
467 if (msym.minsym != NULL
468 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
469 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
471 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
473 /* The GNU linker's Thumb call stub to foo is named
475 if (strstr (name, "_from_thumb") != NULL)
478 /* On soft-float targets, __truncdfsf2 is called to convert promoted
479 arguments to their argument types in non-prototyped
481 if (startswith (name, "__truncdfsf2"))
483 if (startswith (name, "__aeabi_d2f"))
486 /* Internal functions related to thread-local storage. */
487 if (startswith (name, "__tls_get_addr"))
489 if (startswith (name, "__aeabi_read_tp"))
494 /* If we run against a stripped glibc, we may be unable to identify
495 special functions by name. Check for one important case,
496 __aeabi_read_tp, by comparing the *code* against the default
497 implementation (this is hand-written ARM assembler in glibc). */
500 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
501 == 0xe3e00a0f /* mov r0, #0xffff0fff */
502 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
503 == 0xe240f01f) /* sub pc, r0, #31 */
510 /* Support routines for instruction parsing. */
511 #define submask(x) ((1L << ((x) + 1)) - 1)
512 #define bit(obj,st) (((obj) >> (st)) & 1)
513 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
514 #define sbits(obj,st,fn) \
515 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
516 #define BranchDest(addr,instr) \
517 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
519 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
520 the first 16-bit of instruction, and INSN2 is the second 16-bit of
522 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
523 ((bits ((insn1), 0, 3) << 12) \
524 | (bits ((insn1), 10, 10) << 11) \
525 | (bits ((insn2), 12, 14) << 8) \
526 | bits ((insn2), 0, 7))
528 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
529 the 32-bit instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
531 ((bits ((insn), 16, 19) << 12) \
532 | bits ((insn), 0, 11))
534 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
537 thumb_expand_immediate (unsigned int imm)
539 unsigned int count = imm >> 7;
547 return (imm & 0xff) | ((imm & 0xff) << 16);
549 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
551 return (imm & 0xff) | ((imm & 0xff) << 8)
552 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
555 return (0x80 | (imm & 0x7f)) << (32 - count);
558 /* Return 1 if the 16-bit Thumb instruction INST might change
559 control flow, 0 otherwise. */
562 thumb_instruction_changes_pc (unsigned short inst)
564 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
567 if ((inst & 0xf000) == 0xd000) /* conditional branch */
570 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
573 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
576 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
579 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
585 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
586 might change control flow, 0 otherwise. */
589 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
591 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
593 /* Branches and miscellaneous control instructions. */
595 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
600 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
602 /* SUBS PC, LR, #imm8. */
605 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
607 /* Conditional branch. */
614 if ((inst1 & 0xfe50) == 0xe810)
616 /* Load multiple or RFE. */
618 if (bit (inst1, 7) && !bit (inst1, 8))
624 else if (!bit (inst1, 7) && bit (inst1, 8))
630 else if (bit (inst1, 7) && bit (inst1, 8))
635 else if (!bit (inst1, 7) && !bit (inst1, 8))
644 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
646 /* MOV PC or MOVS PC. */
650 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
653 if (bits (inst1, 0, 3) == 15)
659 if ((inst2 & 0x0fc0) == 0x0000)
665 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
684 thumb_instruction_restores_sp (unsigned short insn)
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
706 struct pv_area *stack;
707 struct cleanup *back_to;
709 CORE_ADDR unrecognized_pc = 0;
711 for (i = 0; i < 16; i++)
712 regs[i] = pv_register (i, 0);
713 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
714 back_to = make_cleanup_free_pv_area (stack);
716 while (start < limit)
720 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
722 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
730 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
731 whether to save LR (R14). */
732 mask = (insn & 0xff) | ((insn & 0x100) << 6);
734 /* Calculate offsets of saved R0-R7 and LR. */
735 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
736 if (mask & (1 << regno))
738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
740 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
743 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
745 offset = (insn & 0x7f) << 2; /* get scaled offset */
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
749 else if (thumb_instruction_restores_sp (insn))
751 /* Don't scan past the epilogue. */
754 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
755 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
757 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
758 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
759 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
761 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
762 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
763 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
765 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
766 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
767 && pv_is_constant (regs[bits (insn, 3, 5)]))
768 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
769 regs[bits (insn, 6, 8)]);
770 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
771 && pv_is_constant (regs[bits (insn, 3, 6)]))
773 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
774 int rm = bits (insn, 3, 6);
775 regs[rd] = pv_add (regs[rd], regs[rm]);
777 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
779 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
780 int src_reg = (insn & 0x78) >> 3;
781 regs[dst_reg] = regs[src_reg];
783 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
785 /* Handle stores to the stack. Normally pushes are used,
786 but with GCC -mtpcs-frame, there may be other stores
787 in the prologue to create the frame. */
788 int regno = (insn >> 8) & 0x7;
791 offset = (insn & 0xff) << 2;
792 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
794 if (pv_area_store_would_trash (stack, addr))
797 pv_area_store (stack, addr, 4, regs[regno]);
799 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
801 int rd = bits (insn, 0, 2);
802 int rn = bits (insn, 3, 5);
805 offset = bits (insn, 6, 10) << 2;
806 addr = pv_add_constant (regs[rn], offset);
808 if (pv_area_store_would_trash (stack, addr))
811 pv_area_store (stack, addr, 4, regs[rd]);
813 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
814 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
815 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
816 /* Ignore stores of argument registers to the stack. */
818 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
819 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
820 /* Ignore block loads from the stack, potentially copying
821 parameters from memory. */
823 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
824 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
825 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
826 /* Similarly ignore single loads from the stack. */
828 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
829 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
830 /* Skip register copies, i.e. saves to another register
831 instead of the stack. */
833 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
834 /* Recognize constant loads; even with small stacks these are necessary
836 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
837 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
839 /* Constant pool loads, for the same reason. */
840 unsigned int constant;
843 loc = start + 4 + bits (insn, 0, 7) * 4;
844 constant = read_memory_unsigned_integer (loc, 4, byte_order);
845 regs[bits (insn, 8, 10)] = pv_constant (constant);
847 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
849 unsigned short inst2;
851 inst2 = read_memory_unsigned_integer (start + 2, 2,
852 byte_order_for_code);
854 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
856 /* BL, BLX. Allow some special function calls when
857 skipping the prologue; GCC generates these before
858 storing arguments to the stack. */
860 int j1, j2, imm1, imm2;
862 imm1 = sbits (insn, 0, 10);
863 imm2 = bits (inst2, 0, 10);
864 j1 = bit (inst2, 13);
865 j2 = bit (inst2, 11);
867 offset = ((imm1 << 12) + (imm2 << 1));
868 offset ^= ((!j2) << 22) | ((!j1) << 23);
870 nextpc = start + 4 + offset;
871 /* For BLX make sure to clear the low bits. */
872 if (bit (inst2, 12) == 0)
873 nextpc = nextpc & 0xfffffffc;
875 if (!skip_prologue_function (gdbarch, nextpc,
876 bit (inst2, 12) != 0))
880 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
884 pv_t addr = regs[bits (insn, 0, 3)];
887 if (pv_area_store_would_trash (stack, addr))
890 /* Calculate offsets of saved registers. */
891 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
892 if (inst2 & (1 << regno))
894 addr = pv_add_constant (addr, -4);
895 pv_area_store (stack, addr, 4, regs[regno]);
899 regs[bits (insn, 0, 3)] = addr;
902 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
904 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
906 int regno1 = bits (inst2, 12, 15);
907 int regno2 = bits (inst2, 8, 11);
908 pv_t addr = regs[bits (insn, 0, 3)];
910 offset = inst2 & 0xff;
912 addr = pv_add_constant (addr, offset);
914 addr = pv_add_constant (addr, -offset);
916 if (pv_area_store_would_trash (stack, addr))
919 pv_area_store (stack, addr, 4, regs[regno1]);
920 pv_area_store (stack, pv_add_constant (addr, 4),
924 regs[bits (insn, 0, 3)] = addr;
927 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
928 && (inst2 & 0x0c00) == 0x0c00
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 int regno = bits (inst2, 12, 15);
932 pv_t addr = regs[bits (insn, 0, 3)];
934 offset = inst2 & 0xff;
936 addr = pv_add_constant (addr, offset);
938 addr = pv_add_constant (addr, -offset);
940 if (pv_area_store_would_trash (stack, addr))
943 pv_area_store (stack, addr, 4, regs[regno]);
946 regs[bits (insn, 0, 3)] = addr;
949 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
952 int regno = bits (inst2, 12, 15);
955 offset = inst2 & 0xfff;
956 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
958 if (pv_area_store_would_trash (stack, addr))
961 pv_area_store (stack, addr, 4, regs[regno]);
964 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
969 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
970 && (inst2 & 0x0d00) == 0x0c00
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
977 && (inst2 & 0x8000) == 0x0000
978 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
979 /* Ignore block loads from the stack, potentially copying
980 parameters from memory. */
983 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore dual loads from the stack. */
989 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
990 && (inst2 & 0x0d00) == 0x0c00
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore single loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
996 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
997 /* Similarly ignore single loads from the stack. */
1000 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)],
1009 thumb_expand_immediate (imm));
1012 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1013 && (inst2 & 0x8000) == 0x0000)
1015 unsigned int imm = ((bits (insn, 10, 10) << 11)
1016 | (bits (inst2, 12, 14) << 8)
1017 | bits (inst2, 0, 7));
1019 regs[bits (inst2, 8, 11)]
1020 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1023 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1024 && (inst2 & 0x8000) == 0x0000)
1026 unsigned int imm = ((bits (insn, 10, 10) << 11)
1027 | (bits (inst2, 12, 14) << 8)
1028 | bits (inst2, 0, 7));
1030 regs[bits (inst2, 8, 11)]
1031 = pv_add_constant (regs[bits (insn, 0, 3)],
1032 - (CORE_ADDR) thumb_expand_immediate (imm));
1035 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1036 && (inst2 & 0x8000) == 0x0000)
1038 unsigned int imm = ((bits (insn, 10, 10) << 11)
1039 | (bits (inst2, 12, 14) << 8)
1040 | bits (inst2, 0, 7));
1042 regs[bits (inst2, 8, 11)]
1043 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1046 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1048 unsigned int imm = ((bits (insn, 10, 10) << 11)
1049 | (bits (inst2, 12, 14) << 8)
1050 | bits (inst2, 0, 7));
1052 regs[bits (inst2, 8, 11)]
1053 = pv_constant (thumb_expand_immediate (imm));
1056 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1059 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1061 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1064 else if (insn == 0xea5f /* mov.w Rd,Rm */
1065 && (inst2 & 0xf0f0) == 0)
1067 int dst_reg = (inst2 & 0x0f00) >> 8;
1068 int src_reg = inst2 & 0xf;
1069 regs[dst_reg] = regs[src_reg];
1072 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1074 /* Constant pool loads. */
1075 unsigned int constant;
1078 offset = bits (inst2, 0, 11);
1080 loc = start + 4 + offset;
1082 loc = start + 4 - offset;
1084 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1085 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1088 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1090 /* Constant pool loads. */
1091 unsigned int constant;
1094 offset = bits (inst2, 0, 7) << 2;
1096 loc = start + 4 + offset;
1098 loc = start + 4 - offset;
1100 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1101 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1103 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1104 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1107 else if (thumb2_instruction_changes_pc (insn, inst2))
1109 /* Don't scan past anything that might change control flow. */
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1121 else if (thumb_instruction_changes_pc (insn))
1123 /* Don't scan past anything that might change control flow. */
1128 /* The optimizer might shove anything into the prologue,
1129 so we just skip what we don't recognize. */
1130 unrecognized_pc = start;
1137 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1138 paddress (gdbarch, start));
1140 if (unrecognized_pc == 0)
1141 unrecognized_pc = start;
1145 do_cleanups (back_to);
1146 return unrecognized_pc;
1149 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1151 /* Frame pointer is fp. Frame size is constant. */
1152 cache->framereg = ARM_FP_REGNUM;
1153 cache->framesize = -regs[ARM_FP_REGNUM].k;
1155 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is r7. Frame size is constant. */
1158 cache->framereg = THUMB_FP_REGNUM;
1159 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1163 /* Try the stack pointer... this is a bit desperate. */
1164 cache->framereg = ARM_SP_REGNUM;
1165 cache->framesize = -regs[ARM_SP_REGNUM].k;
1168 for (i = 0; i < 16; i++)
1169 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1170 cache->saved_regs[i].addr = offset;
1172 do_cleanups (back_to);
1173 return unrecognized_pc;
1177 /* Try to analyze the instructions starting from PC, which load symbol
1178 __stack_chk_guard. Return the address of instruction after loading this
1179 symbol, set the dest register number to *BASEREG, and set the size of
1180 instructions for loading symbol in OFFSET. Return 0 if instructions are
1184 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1185 unsigned int *destreg, int *offset)
1187 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1188 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1189 unsigned int low, high, address;
1194 unsigned short insn1
1195 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1197 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1199 *destreg = bits (insn1, 8, 10);
1201 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1202 address = read_memory_unsigned_integer (address, 4,
1203 byte_order_for_code);
1205 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1207 unsigned short insn2
1208 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1210 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1213 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1215 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1217 /* movt Rd, #const */
1218 if ((insn1 & 0xfbc0) == 0xf2c0)
1220 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1221 *destreg = bits (insn2, 8, 11);
1223 address = (high << 16 | low);
1230 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1232 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1234 address = bits (insn, 0, 11) + pc + 8;
1235 address = read_memory_unsigned_integer (address, 4,
1236 byte_order_for_code);
1238 *destreg = bits (insn, 12, 15);
1241 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1243 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1246 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1248 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1250 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251 *destreg = bits (insn, 12, 15);
1253 address = (high << 16 | low);
1261 /* Try to skip a sequence of instructions used for stack protector. If PC
1262 points to the first instruction of this sequence, return the address of
1263 first instruction after this sequence, otherwise, return original PC.
1265 On arm, this sequence of instructions is composed of mainly three steps,
1266 Step 1: load symbol __stack_chk_guard,
1267 Step 2: load from address of __stack_chk_guard,
1268 Step 3: store it to somewhere else.
1270 Usually, instructions on step 2 and step 3 are the same on various ARM
1271 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1272 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1273 instructions in step 1 vary from different ARM architectures. On ARMv7,
1276 movw Rn, #:lower16:__stack_chk_guard
1277 movt Rn, #:upper16:__stack_chk_guard
1284 .word __stack_chk_guard
1286 Since ldr/str is a very popular instruction, we can't use them as
1287 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1288 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1289 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1292 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1294 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1295 unsigned int basereg;
1296 struct bound_minimal_symbol stack_chk_guard;
1298 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1301 /* Try to parse the instructions in Step 1. */
1302 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1307 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1308 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1309 Otherwise, this sequence cannot be for stack protector. */
1310 if (stack_chk_guard.minsym == NULL
1311 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1316 unsigned int destreg;
1318 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1320 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6800)
1323 if (bits (insn, 3, 5) != basereg)
1325 destreg = bits (insn, 0, 2);
1327 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1328 byte_order_for_code);
1329 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1330 if ((insn & 0xf800) != 0x6000)
1332 if (destreg != bits (insn, 0, 2))
1337 unsigned int destreg;
1339 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1341 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1342 if ((insn & 0x0e500000) != 0x04100000)
1344 if (bits (insn, 16, 19) != basereg)
1346 destreg = bits (insn, 12, 15);
1347 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1348 insn = read_memory_unsigned_integer (pc + offset + 4,
1349 4, byte_order_for_code);
1350 if ((insn & 0x0e500000) != 0x04000000)
1352 if (bits (insn, 12, 15) != destreg)
1355 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1358 return pc + offset + 4;
1360 return pc + offset + 8;
1363 /* Advance the PC across any function entry prologue instructions to
1364 reach some "real" code.
1366 The APCS (ARM Procedure Call Standard) defines the following
1370 [stmfd sp!, {a1,a2,a3,a4}]
1371 stmfd sp!, {...,fp,ip,lr,pc}
1372 [stfe f7, [sp, #-12]!]
1373 [stfe f6, [sp, #-12]!]
1374 [stfe f5, [sp, #-12]!]
1375 [stfe f4, [sp, #-12]!]
1376 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1379 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1381 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1383 CORE_ADDR func_addr, limit_pc;
1385 /* See if we can determine the end of the prologue via the symbol table.
1386 If so, then return either PC, or the PC after the prologue, whichever
1388 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1390 CORE_ADDR post_prologue_pc
1391 = skip_prologue_using_sal (gdbarch, func_addr);
1392 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1394 if (post_prologue_pc)
1396 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1399 /* GCC always emits a line note before the prologue and another
1400 one after, even if the two are at the same address or on the
1401 same line. Take advantage of this so that we do not need to
1402 know every instruction that might appear in the prologue. We
1403 will have producer information for most binaries; if it is
1404 missing (e.g. for -gstabs), assuming the GNU tools. */
1405 if (post_prologue_pc
1407 || COMPUNIT_PRODUCER (cust) == NULL
1408 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1409 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1410 return post_prologue_pc;
1412 if (post_prologue_pc != 0)
1414 CORE_ADDR analyzed_limit;
1416 /* For non-GCC compilers, make sure the entire line is an
1417 acceptable prologue; GDB will round this function's
1418 return value up to the end of the following line so we
1419 can not skip just part of a line (and we do not want to).
1421 RealView does not treat the prologue specially, but does
1422 associate prologue code with the opening brace; so this
1423 lets us skip the first line if we think it is the opening
1425 if (arm_pc_is_thumb (gdbarch, func_addr))
1426 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1427 post_prologue_pc, NULL);
1429 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1430 post_prologue_pc, NULL);
1432 if (analyzed_limit != post_prologue_pc)
1435 return post_prologue_pc;
1439 /* Can't determine prologue from the symbol table, need to examine
1442 /* Find an upper limit on the function prologue using the debug
1443 information. If the debug information could not be used to provide
1444 that bound, then use an arbitrary large number as the upper bound. */
1445 /* Like arm_scan_prologue, stop no later than pc + 64. */
1446 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1448 limit_pc = pc + 64; /* Magic. */
1451 /* Check if this is Thumb code. */
1452 if (arm_pc_is_thumb (gdbarch, pc))
1453 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1455 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1459 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1460 This function decodes a Thumb function prologue to determine:
1461 1) the size of the stack frame
1462 2) which registers are saved on it
1463 3) the offsets of saved regs
1464 4) the offset from the stack pointer to the frame pointer
1466 A typical Thumb function prologue would create this stack frame
1467 (offsets relative to FP)
1468 old SP -> 24 stack parameters
1471 R7 -> 0 local variables (16 bytes)
1472 SP -> -12 additional stack space (12 bytes)
1473 The frame size would thus be 36 bytes, and the frame offset would be
1474 12 bytes. The frame register is R7.
1476 The comments for thumb_skip_prolog() describe the algorithm we use
1477 to detect the end of the prolog. */
1481 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1482 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1484 CORE_ADDR prologue_start;
1485 CORE_ADDR prologue_end;
1487 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1490 /* See comment in arm_scan_prologue for an explanation of
1492 if (prologue_end > prologue_start + 64)
1494 prologue_end = prologue_start + 64;
1498 /* We're in the boondocks: we have no idea where the start of the
1502 prologue_end = min (prologue_end, prev_pc);
1504 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1507 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1510 arm_instruction_changes_pc (uint32_t this_instr)
1512 if (bits (this_instr, 28, 31) == INST_NV)
1513 /* Unconditional instructions. */
1514 switch (bits (this_instr, 24, 27))
1518 /* Branch with Link and change to Thumb. */
1523 /* Coprocessor register transfer. */
1524 if (bits (this_instr, 12, 15) == 15)
1525 error (_("Invalid update to pc in instruction"));
1531 switch (bits (this_instr, 25, 27))
1534 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1536 /* Multiplies and extra load/stores. */
1537 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1538 /* Neither multiplies nor extension load/stores are allowed
1542 /* Otherwise, miscellaneous instructions. */
1544 /* BX <reg>, BXJ <reg>, BLX <reg> */
1545 if (bits (this_instr, 4, 27) == 0x12fff1
1546 || bits (this_instr, 4, 27) == 0x12fff2
1547 || bits (this_instr, 4, 27) == 0x12fff3)
1550 /* Other miscellaneous instructions are unpredictable if they
1554 /* Data processing instruction. Fall through. */
1557 if (bits (this_instr, 12, 15) == 15)
1564 /* Media instructions and architecturally undefined instructions. */
1565 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1569 if (bit (this_instr, 20) == 0)
1573 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1579 /* Load/store multiple. */
1580 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1586 /* Branch and branch with link. */
1591 /* Coprocessor transfers or SWIs can not affect PC. */
1595 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1599 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1603 arm_instruction_restores_sp (unsigned int insn)
1605 if (bits (insn, 28, 31) != INST_NV)
1607 if ((insn & 0x0df0f000) == 0x0080d000
1608 /* ADD SP (register or immediate). */
1609 || (insn & 0x0df0f000) == 0x0040d000
1610 /* SUB SP (register or immediate). */
1611 || (insn & 0x0ffffff0) == 0x01a0d000
1613 || (insn & 0x0fff0000) == 0x08bd0000
1615 || (insn & 0x0fff0000) == 0x049d0000)
1616 /* POP of a single register. */
1623 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1624 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1625 fill it in. Return the first address not recognized as a prologue
1628 We recognize all the instructions typically found in ARM prologues,
1629 plus harmless instructions which can be skipped (either for analysis
1630 purposes, or a more restrictive set that can be skipped when finding
1631 the end of the prologue). */
1634 arm_analyze_prologue (struct gdbarch *gdbarch,
1635 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1636 struct arm_prologue_cache *cache)
1638 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1639 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1641 CORE_ADDR offset, current_pc;
1642 pv_t regs[ARM_FPS_REGNUM];
1643 struct pv_area *stack;
1644 struct cleanup *back_to;
1645 CORE_ADDR unrecognized_pc = 0;
1647 /* Search the prologue looking for instructions that set up the
1648 frame pointer, adjust the stack pointer, and save registers.
1650 Be careful, however, and if it doesn't look like a prologue,
1651 don't try to scan it. If, for instance, a frameless function
1652 begins with stmfd sp!, then we will tell ourselves there is
1653 a frame, which will confuse stack traceback, as well as "finish"
1654 and other operations that rely on a knowledge of the stack
1657 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1658 regs[regno] = pv_register (regno, 0);
1659 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1660 back_to = make_cleanup_free_pv_area (stack);
1662 for (current_pc = prologue_start;
1663 current_pc < prologue_end;
1667 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1669 if (insn == 0xe1a0c00d) /* mov ip, sp */
1671 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1674 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 unsigned imm = insn & 0xff; /* immediate value */
1678 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1679 int rd = bits (insn, 12, 15);
1680 imm = (imm >> rot) | (imm << (32 - rot));
1681 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1684 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1694 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1697 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1700 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1701 regs[bits (insn, 12, 15)]);
1704 else if ((insn & 0xffff0000) == 0xe92d0000)
1705 /* stmfd sp!, {..., fp, ip, lr, pc}
1707 stmfd sp!, {a1, a2, a3, a4} */
1709 int mask = insn & 0xffff;
1711 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1714 /* Calculate offsets of saved registers. */
1715 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1716 if (mask & (1 << regno))
1719 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1720 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1723 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1724 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1725 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1727 /* No need to add this to saved_regs -- it's just an arg reg. */
1730 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1731 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1732 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1737 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1739 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1741 /* No need to add this to saved_regs -- it's just arg regs. */
1744 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1746 unsigned imm = insn & 0xff; /* immediate value */
1747 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1748 imm = (imm >> rot) | (imm << (32 - rot));
1749 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1751 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1758 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1760 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1762 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1765 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1766 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1767 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1769 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1771 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1773 int n_saved_fp_regs;
1774 unsigned int fp_start_reg, fp_bound_reg;
1776 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1779 if ((insn & 0x800) == 0x800) /* N0 is set */
1781 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1782 n_saved_fp_regs = 3;
1784 n_saved_fp_regs = 1;
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 2;
1791 n_saved_fp_regs = 4;
1794 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1795 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1796 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1798 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1799 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1800 regs[fp_start_reg++]);
1803 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1805 /* Allow some special function calls when skipping the
1806 prologue; GCC generates these before storing arguments to
1808 CORE_ADDR dest = BranchDest (current_pc, insn);
1810 if (skip_prologue_function (gdbarch, dest, 0))
1815 else if ((insn & 0xf0000000) != 0xe0000000)
1816 break; /* Condition not true, exit early. */
1817 else if (arm_instruction_changes_pc (insn))
1818 /* Don't scan past anything that might change control flow. */
1820 else if (arm_instruction_restores_sp (insn))
1822 /* Don't scan past the epilogue. */
1825 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1826 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1827 /* Ignore block loads from the stack, potentially copying
1828 parameters from memory. */
1830 else if ((insn & 0xfc500000) == 0xe4100000
1831 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1832 /* Similarly ignore single loads from the stack. */
1834 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1835 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1836 register instead of the stack. */
1840 /* The optimizer might shove anything into the prologue, if
1841 we build up cache (cache != NULL) from scanning prologue,
1842 we just skip what we don't recognize and scan further to
1843 make cache as complete as possible. However, if we skip
1844 prologue, we'll stop immediately on unrecognized
1846 unrecognized_pc = current_pc;
1854 if (unrecognized_pc == 0)
1855 unrecognized_pc = current_pc;
1859 int framereg, framesize;
1861 /* The frame size is just the distance from the frame register
1862 to the original stack pointer. */
1863 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1865 /* Frame pointer is fp. */
1866 framereg = ARM_FP_REGNUM;
1867 framesize = -regs[ARM_FP_REGNUM].k;
1871 /* Try the stack pointer... this is a bit desperate. */
1872 framereg = ARM_SP_REGNUM;
1873 framesize = -regs[ARM_SP_REGNUM].k;
1876 cache->framereg = framereg;
1877 cache->framesize = framesize;
1879 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1880 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1881 cache->saved_regs[regno].addr = offset;
1885 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1886 paddress (gdbarch, unrecognized_pc));
1888 do_cleanups (back_to);
1889 return unrecognized_pc;
1893 arm_scan_prologue (struct frame_info *this_frame,
1894 struct arm_prologue_cache *cache)
1896 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1897 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1899 CORE_ADDR prologue_start, prologue_end, current_pc;
1900 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1901 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1902 pv_t regs[ARM_FPS_REGNUM];
1903 struct pv_area *stack;
1904 struct cleanup *back_to;
1907 /* Assume there is no frame until proven otherwise. */
1908 cache->framereg = ARM_SP_REGNUM;
1909 cache->framesize = 0;
1911 /* Check for Thumb prologue. */
1912 if (arm_frame_is_thumb (this_frame))
1914 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1918 /* Find the function prologue. If we can't find the function in
1919 the symbol table, peek in the stack frame to find the PC. */
1920 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1923 /* One way to find the end of the prologue (which works well
1924 for unoptimized code) is to do the following:
1926 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1929 prologue_end = prev_pc;
1930 else if (sal.end < prologue_end)
1931 prologue_end = sal.end;
1933 This mechanism is very accurate so long as the optimizer
1934 doesn't move any instructions from the function body into the
1935 prologue. If this happens, sal.end will be the last
1936 instruction in the first hunk of prologue code just before
1937 the first instruction that the scheduler has moved from
1938 the body to the prologue.
1940 In order to make sure that we scan all of the prologue
1941 instructions, we use a slightly less accurate mechanism which
1942 may scan more than necessary. To help compensate for this
1943 lack of accuracy, the prologue scanning loop below contains
1944 several clauses which'll cause the loop to terminate early if
1945 an implausible prologue instruction is encountered.
1951 is a suitable endpoint since it accounts for the largest
1952 possible prologue plus up to five instructions inserted by
1955 if (prologue_end > prologue_start + 64)
1957 prologue_end = prologue_start + 64; /* See above. */
1962 /* We have no symbol information. Our only option is to assume this
1963 function has a standard stack frame and the normal frame register.
1964 Then, we can find the value of our frame pointer on entrance to
1965 the callee (or at the present moment if this is the innermost frame).
1966 The value stored there should be the address of the stmfd + 8. */
1967 CORE_ADDR frame_loc;
1968 LONGEST return_value;
1970 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1971 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1975 prologue_start = gdbarch_addr_bits_remove
1976 (gdbarch, return_value) - 8;
1977 prologue_end = prologue_start + 64; /* See above. */
1981 if (prev_pc < prologue_end)
1982 prologue_end = prev_pc;
1984 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1987 static struct arm_prologue_cache *
1988 arm_make_prologue_cache (struct frame_info *this_frame)
1991 struct arm_prologue_cache *cache;
1992 CORE_ADDR unwound_fp;
1994 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1995 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1997 arm_scan_prologue (this_frame, cache);
1999 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2000 if (unwound_fp == 0)
2003 cache->prev_sp = unwound_fp + cache->framesize;
2005 /* Calculate actual addresses of saved registers using offsets
2006 determined by arm_scan_prologue. */
2007 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2008 if (trad_frame_addr_p (cache->saved_regs, reg))
2009 cache->saved_regs[reg].addr += cache->prev_sp;
2014 /* Implementation of the stop_reason hook for arm_prologue frames. */
2016 static enum unwind_stop_reason
2017 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2020 struct arm_prologue_cache *cache;
2023 if (*this_cache == NULL)
2024 *this_cache = arm_make_prologue_cache (this_frame);
2025 cache = (struct arm_prologue_cache *) *this_cache;
2027 /* This is meant to halt the backtrace at "_start". */
2028 pc = get_frame_pc (this_frame);
2029 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2030 return UNWIND_OUTERMOST;
2032 /* If we've hit a wall, stop. */
2033 if (cache->prev_sp == 0)
2034 return UNWIND_OUTERMOST;
2036 return UNWIND_NO_REASON;
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2043 arm_prologue_this_id (struct frame_info *this_frame,
2045 struct frame_id *this_id)
2047 struct arm_prologue_cache *cache;
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = (struct arm_prologue_cache *) *this_cache;
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 pc = get_frame_pc (this_frame);
2059 func = get_frame_func (this_frame);
2063 id = frame_id_build (cache->prev_sp, func);
2067 static struct value *
2068 arm_prologue_prev_register (struct frame_info *this_frame,
2072 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2073 struct arm_prologue_cache *cache;
2075 if (*this_cache == NULL)
2076 *this_cache = arm_make_prologue_cache (this_frame);
2077 cache = (struct arm_prologue_cache *) *this_cache;
2079 /* If we are asked to unwind the PC, then we need to return the LR
2080 instead. The prologue may save PC, but it will point into this
2081 frame's prologue, not the next frame's resume location. Also
2082 strip the saved T bit. A valid LR may have the low bit set, but
2083 a valid PC never does. */
2084 if (prev_regnum == ARM_PC_REGNUM)
2088 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2089 return frame_unwind_got_constant (this_frame, prev_regnum,
2090 arm_addr_bits_remove (gdbarch, lr));
2093 /* SP is generally not saved to the stack, but this frame is
2094 identified by the next frame's stack pointer at the time of the call.
2095 The value was already reconstructed into PREV_SP. */
2096 if (prev_regnum == ARM_SP_REGNUM)
2097 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2099 /* The CPSR may have been changed by the call instruction and by the
2100 called function. The only bit we can reconstruct is the T bit,
2101 by checking the low bit of LR as of the call. This is a reliable
2102 indicator of Thumb-ness except for some ARM v4T pre-interworking
2103 Thumb code, which could get away with a clear low bit as long as
2104 the called function did not use bx. Guess that all other
2105 bits are unchanged; the condition flags are presumably lost,
2106 but the processor status is likely valid. */
2107 if (prev_regnum == ARM_PS_REGNUM)
2110 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2112 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2114 if (IS_THUMB_ADDR (lr))
2118 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2121 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2125 struct frame_unwind arm_prologue_unwind = {
2127 arm_prologue_unwind_stop_reason,
2128 arm_prologue_this_id,
2129 arm_prologue_prev_register,
2131 default_frame_sniffer
2134 /* Maintain a list of ARM exception table entries per objfile, similar to the
2135 list of mapping symbols. We only cache entries for standard ARM-defined
2136 personality routines; the cache will contain only the frame unwinding
2137 instructions associated with the entry (not the descriptors). */
2139 static const struct objfile_data *arm_exidx_data_key;
2141 struct arm_exidx_entry
2146 typedef struct arm_exidx_entry arm_exidx_entry_s;
2147 DEF_VEC_O(arm_exidx_entry_s);
2149 struct arm_exidx_data
2151 VEC(arm_exidx_entry_s) **section_maps;
2155 arm_exidx_data_free (struct objfile *objfile, void *arg)
2157 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2160 for (i = 0; i < objfile->obfd->section_count; i++)
2161 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2165 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2166 const struct arm_exidx_entry *rhs)
2168 return lhs->addr < rhs->addr;
2171 static struct obj_section *
2172 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2174 struct obj_section *osect;
2176 ALL_OBJFILE_OSECTIONS (objfile, osect)
2177 if (bfd_get_section_flags (objfile->obfd,
2178 osect->the_bfd_section) & SEC_ALLOC)
2180 bfd_vma start, size;
2181 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2182 size = bfd_get_section_size (osect->the_bfd_section);
2184 if (start <= vma && vma < start + size)
2191 /* Parse contents of exception table and exception index sections
2192 of OBJFILE, and fill in the exception table entry cache.
2194 For each entry that refers to a standard ARM-defined personality
2195 routine, extract the frame unwinding instructions (from either
2196 the index or the table section). The unwinding instructions
2198 - extracting them from the rest of the table data
2199 - converting to host endianness
2200 - appending the implicit 0xb0 ("Finish") code
2202 The extracted and normalized instructions are stored for later
2203 retrieval by the arm_find_exidx_entry routine. */
2206 arm_exidx_new_objfile (struct objfile *objfile)
2208 struct cleanup *cleanups;
2209 struct arm_exidx_data *data;
2210 asection *exidx, *extab;
2211 bfd_vma exidx_vma = 0, extab_vma = 0;
2212 bfd_size_type exidx_size = 0, extab_size = 0;
2213 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2216 /* If we've already touched this file, do nothing. */
2217 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2219 cleanups = make_cleanup (null_cleanup, NULL);
2221 /* Read contents of exception table and index. */
2222 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2225 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2226 exidx_size = bfd_get_section_size (exidx);
2227 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2228 make_cleanup (xfree, exidx_data);
2230 if (!bfd_get_section_contents (objfile->obfd, exidx,
2231 exidx_data, 0, exidx_size))
2233 do_cleanups (cleanups);
2238 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2241 extab_vma = bfd_section_vma (objfile->obfd, extab);
2242 extab_size = bfd_get_section_size (extab);
2243 extab_data = (gdb_byte *) xmalloc (extab_size);
2244 make_cleanup (xfree, extab_data);
2246 if (!bfd_get_section_contents (objfile->obfd, extab,
2247 extab_data, 0, extab_size))
2249 do_cleanups (cleanups);
2254 /* Allocate exception table data structure. */
2255 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2256 set_objfile_data (objfile, arm_exidx_data_key, data);
2257 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2258 objfile->obfd->section_count,
2259 VEC(arm_exidx_entry_s) *);
2261 /* Fill in exception table. */
2262 for (i = 0; i < exidx_size / 8; i++)
2264 struct arm_exidx_entry new_exidx_entry;
2265 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2266 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2267 bfd_vma addr = 0, word = 0;
2268 int n_bytes = 0, n_words = 0;
2269 struct obj_section *sec;
2270 gdb_byte *entry = NULL;
2272 /* Extract address of start of function. */
2273 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2274 idx += exidx_vma + i * 8;
2276 /* Find section containing function and compute section offset. */
2277 sec = arm_obj_section_from_vma (objfile, idx);
2280 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2282 /* Determine address of exception table entry. */
2285 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2287 else if ((val & 0xff000000) == 0x80000000)
2289 /* Exception table entry embedded in .ARM.exidx
2290 -- must be short form. */
2294 else if (!(val & 0x80000000))
2296 /* Exception table entry in .ARM.extab. */
2297 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2298 addr += exidx_vma + i * 8 + 4;
2300 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2302 word = bfd_h_get_32 (objfile->obfd,
2303 extab_data + addr - extab_vma);
2306 if ((word & 0xff000000) == 0x80000000)
2311 else if ((word & 0xff000000) == 0x81000000
2312 || (word & 0xff000000) == 0x82000000)
2316 n_words = ((word >> 16) & 0xff);
2318 else if (!(word & 0x80000000))
2321 struct obj_section *pers_sec;
2322 int gnu_personality = 0;
2324 /* Custom personality routine. */
2325 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2326 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2328 /* Check whether we've got one of the variants of the
2329 GNU personality routines. */
2330 pers_sec = arm_obj_section_from_vma (objfile, pers);
2333 static const char *personality[] =
2335 "__gcc_personality_v0",
2336 "__gxx_personality_v0",
2337 "__gcj_personality_v0",
2338 "__gnu_objc_personality_v0",
2342 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2345 for (k = 0; personality[k]; k++)
2346 if (lookup_minimal_symbol_by_pc_name
2347 (pc, personality[k], objfile))
2349 gnu_personality = 1;
2354 /* If so, the next word contains a word count in the high
2355 byte, followed by the same unwind instructions as the
2356 pre-defined forms. */
2358 && addr + 4 <= extab_vma + extab_size)
2360 word = bfd_h_get_32 (objfile->obfd,
2361 extab_data + addr - extab_vma);
2364 n_words = ((word >> 24) & 0xff);
2370 /* Sanity check address. */
2372 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2373 n_words = n_bytes = 0;
2375 /* The unwind instructions reside in WORD (only the N_BYTES least
2376 significant bytes are valid), followed by N_WORDS words in the
2377 extab section starting at ADDR. */
2378 if (n_bytes || n_words)
2381 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2382 n_bytes + n_words * 4 + 1);
2385 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2389 word = bfd_h_get_32 (objfile->obfd,
2390 extab_data + addr - extab_vma);
2393 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2394 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2395 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2396 *p++ = (gdb_byte) (word & 0xff);
2399 /* Implied "Finish" to terminate the list. */
2403 /* Push entry onto vector. They are guaranteed to always
2404 appear in order of increasing addresses. */
2405 new_exidx_entry.addr = idx;
2406 new_exidx_entry.entry = entry;
2407 VEC_safe_push (arm_exidx_entry_s,
2408 data->section_maps[sec->the_bfd_section->index],
2412 do_cleanups (cleanups);
2415 /* Search for the exception table entry covering MEMADDR. If one is found,
2416 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2417 set *START to the start of the region covered by this entry. */
2420 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2422 struct obj_section *sec;
2424 sec = find_pc_section (memaddr);
2427 struct arm_exidx_data *data;
2428 VEC(arm_exidx_entry_s) *map;
2429 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2432 data = ((struct arm_exidx_data *)
2433 objfile_data (sec->objfile, arm_exidx_data_key));
2436 map = data->section_maps[sec->the_bfd_section->index];
2437 if (!VEC_empty (arm_exidx_entry_s, map))
2439 struct arm_exidx_entry *map_sym;
2441 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2442 arm_compare_exidx_entries);
2444 /* VEC_lower_bound finds the earliest ordered insertion
2445 point. If the following symbol starts at this exact
2446 address, we use that; otherwise, the preceding
2447 exception table entry covers this address. */
2448 if (idx < VEC_length (arm_exidx_entry_s, map))
2450 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2451 if (map_sym->addr == map_key.addr)
2454 *start = map_sym->addr + obj_section_addr (sec);
2455 return map_sym->entry;
2461 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2463 *start = map_sym->addr + obj_section_addr (sec);
2464 return map_sym->entry;
2473 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2474 instruction list from the ARM exception table entry ENTRY, allocate and
2475 return a prologue cache structure describing how to unwind this frame.
2477 Return NULL if the unwinding instruction list contains a "spare",
2478 "reserved" or "refuse to unwind" instruction as defined in section
2479 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2480 for the ARM Architecture" document. */
2482 static struct arm_prologue_cache *
2483 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2488 struct arm_prologue_cache *cache;
2489 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2490 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2496 /* Whenever we reload SP, we actually have to retrieve its
2497 actual value in the current frame. */
2500 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2502 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2503 vsp = get_frame_register_unsigned (this_frame, reg);
2507 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2508 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2514 /* Decode next unwind instruction. */
2517 if ((insn & 0xc0) == 0)
2519 int offset = insn & 0x3f;
2520 vsp += (offset << 2) + 4;
2522 else if ((insn & 0xc0) == 0x40)
2524 int offset = insn & 0x3f;
2525 vsp -= (offset << 2) + 4;
2527 else if ((insn & 0xf0) == 0x80)
2529 int mask = ((insn & 0xf) << 8) | *entry++;
2532 /* The special case of an all-zero mask identifies
2533 "Refuse to unwind". We return NULL to fall back
2534 to the prologue analyzer. */
2538 /* Pop registers r4..r15 under mask. */
2539 for (i = 0; i < 12; i++)
2540 if (mask & (1 << i))
2542 cache->saved_regs[4 + i].addr = vsp;
2546 /* Special-case popping SP -- we need to reload vsp. */
2547 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2550 else if ((insn & 0xf0) == 0x90)
2552 int reg = insn & 0xf;
2554 /* Reserved cases. */
2555 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2558 /* Set SP from another register and mark VSP for reload. */
2559 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2562 else if ((insn & 0xf0) == 0xa0)
2564 int count = insn & 0x7;
2565 int pop_lr = (insn & 0x8) != 0;
2568 /* Pop r4..r[4+count]. */
2569 for (i = 0; i <= count; i++)
2571 cache->saved_regs[4 + i].addr = vsp;
2575 /* If indicated by flag, pop LR as well. */
2578 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2582 else if (insn == 0xb0)
2584 /* We could only have updated PC by popping into it; if so, it
2585 will show up as address. Otherwise, copy LR into PC. */
2586 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2587 cache->saved_regs[ARM_PC_REGNUM]
2588 = cache->saved_regs[ARM_LR_REGNUM];
2593 else if (insn == 0xb1)
2595 int mask = *entry++;
2598 /* All-zero mask and mask >= 16 is "spare". */
2599 if (mask == 0 || mask >= 16)
2602 /* Pop r0..r3 under mask. */
2603 for (i = 0; i < 4; i++)
2604 if (mask & (1 << i))
2606 cache->saved_regs[i].addr = vsp;
2610 else if (insn == 0xb2)
2612 ULONGEST offset = 0;
2617 offset |= (*entry & 0x7f) << shift;
2620 while (*entry++ & 0x80);
2622 vsp += 0x204 + (offset << 2);
2624 else if (insn == 0xb3)
2626 int start = *entry >> 4;
2627 int count = (*entry++) & 0xf;
2630 /* Only registers D0..D15 are valid here. */
2631 if (start + count >= 16)
2634 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2635 for (i = 0; i <= count; i++)
2637 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2641 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2644 else if ((insn & 0xf8) == 0xb8)
2646 int count = insn & 0x7;
2649 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2650 for (i = 0; i <= count; i++)
2652 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2656 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2659 else if (insn == 0xc6)
2661 int start = *entry >> 4;
2662 int count = (*entry++) & 0xf;
2665 /* Only registers WR0..WR15 are valid. */
2666 if (start + count >= 16)
2669 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2670 for (i = 0; i <= count; i++)
2672 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2676 else if (insn == 0xc7)
2678 int mask = *entry++;
2681 /* All-zero mask and mask >= 16 is "spare". */
2682 if (mask == 0 || mask >= 16)
2685 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2686 for (i = 0; i < 4; i++)
2687 if (mask & (1 << i))
2689 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2693 else if ((insn & 0xf8) == 0xc0)
2695 int count = insn & 0x7;
2698 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2699 for (i = 0; i <= count; i++)
2701 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2705 else if (insn == 0xc8)
2707 int start = *entry >> 4;
2708 int count = (*entry++) & 0xf;
2711 /* Only registers D0..D31 are valid. */
2712 if (start + count >= 16)
2715 /* Pop VFP double-precision registers
2716 D[16+start]..D[16+start+count]. */
2717 for (i = 0; i <= count; i++)
2719 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2723 else if (insn == 0xc9)
2725 int start = *entry >> 4;
2726 int count = (*entry++) & 0xf;
2729 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2730 for (i = 0; i <= count; i++)
2732 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2736 else if ((insn & 0xf8) == 0xd0)
2738 int count = insn & 0x7;
2741 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2742 for (i = 0; i <= count; i++)
2744 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2750 /* Everything else is "spare". */
2755 /* If we restore SP from a register, assume this was the frame register.
2756 Otherwise just fall back to SP as frame register. */
2757 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2758 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2760 cache->framereg = ARM_SP_REGNUM;
2762 /* Determine offset to previous frame. */
2764 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2766 /* We already got the previous SP. */
2767 cache->prev_sp = vsp;
2772 /* Unwinding via ARM exception table entries. Note that the sniffer
2773 already computes a filled-in prologue cache, which is then used
2774 with the same arm_prologue_this_id and arm_prologue_prev_register
2775 routines also used for prologue-parsing based unwinding. */
2778 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2782 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2784 CORE_ADDR addr_in_block, exidx_region, func_start;
2785 struct arm_prologue_cache *cache;
2788 /* See if we have an ARM exception table entry covering this address. */
2789 addr_in_block = get_frame_address_in_block (this_frame);
2790 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2794 /* The ARM exception table does not describe unwind information
2795 for arbitrary PC values, but is guaranteed to be correct only
2796 at call sites. We have to decide here whether we want to use
2797 ARM exception table information for this frame, or fall back
2798 to using prologue parsing. (Note that if we have DWARF CFI,
2799 this sniffer isn't even called -- CFI is always preferred.)
2801 Before we make this decision, however, we check whether we
2802 actually have *symbol* information for the current frame.
2803 If not, prologue parsing would not work anyway, so we might
2804 as well use the exception table and hope for the best. */
2805 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2809 /* If the next frame is "normal", we are at a call site in this
2810 frame, so exception information is guaranteed to be valid. */
2811 if (get_next_frame (this_frame)
2812 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2815 /* We also assume exception information is valid if we're currently
2816 blocked in a system call. The system library is supposed to
2817 ensure this, so that e.g. pthread cancellation works. */
2818 if (arm_frame_is_thumb (this_frame))
2822 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2823 byte_order_for_code, &insn)
2824 && (insn & 0xff00) == 0xdf00 /* svc */)
2831 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2832 byte_order_for_code, &insn)
2833 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2837 /* Bail out if we don't know that exception information is valid. */
2841 /* The ARM exception index does not mark the *end* of the region
2842 covered by the entry, and some functions will not have any entry.
2843 To correctly recognize the end of the covered region, the linker
2844 should have inserted dummy records with a CANTUNWIND marker.
2846 Unfortunately, current versions of GNU ld do not reliably do
2847 this, and thus we may have found an incorrect entry above.
2848 As a (temporary) sanity check, we only use the entry if it
2849 lies *within* the bounds of the function. Note that this check
2850 might reject perfectly valid entries that just happen to cover
2851 multiple functions; therefore this check ought to be removed
2852 once the linker is fixed. */
2853 if (func_start > exidx_region)
2857 /* Decode the list of unwinding instructions into a prologue cache.
2858 Note that this may fail due to e.g. a "refuse to unwind" code. */
2859 cache = arm_exidx_fill_cache (this_frame, entry);
2863 *this_prologue_cache = cache;
2867 struct frame_unwind arm_exidx_unwind = {
2869 default_frame_unwind_stop_reason,
2870 arm_prologue_this_id,
2871 arm_prologue_prev_register,
2873 arm_exidx_unwind_sniffer
2876 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2877 trampoline, return the target PC. Otherwise return 0.
2879 void call0a (char c, short s, int i, long l) {}
2883 (*pointer_to_call0a) (c, s, i, l);
2886 Instead of calling a stub library function _call_via_xx (xx is
2887 the register name), GCC may inline the trampoline in the object
2888 file as below (register r2 has the address of call0a).
2891 .type main, %function
2900 The trampoline 'bx r2' doesn't belong to main. */
2903 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2905 /* The heuristics of recognizing such trampoline is that FRAME is
2906 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2907 if (arm_frame_is_thumb (frame))
2911 if (target_read_memory (pc, buf, 2) == 0)
2913 struct gdbarch *gdbarch = get_frame_arch (frame);
2914 enum bfd_endian byte_order_for_code
2915 = gdbarch_byte_order_for_code (gdbarch);
2917 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2919 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2922 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2924 /* Clear the LSB so that gdb core sets step-resume
2925 breakpoint at the right address. */
2926 return UNMAKE_THUMB_ADDR (dest);
2934 static struct arm_prologue_cache *
2935 arm_make_stub_cache (struct frame_info *this_frame)
2937 struct arm_prologue_cache *cache;
2939 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2940 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2942 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2947 /* Our frame ID for a stub frame is the current SP and LR. */
2950 arm_stub_this_id (struct frame_info *this_frame,
2952 struct frame_id *this_id)
2954 struct arm_prologue_cache *cache;
2956 if (*this_cache == NULL)
2957 *this_cache = arm_make_stub_cache (this_frame);
2958 cache = (struct arm_prologue_cache *) *this_cache;
2960 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2964 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2965 struct frame_info *this_frame,
2966 void **this_prologue_cache)
2968 CORE_ADDR addr_in_block;
2970 CORE_ADDR pc, start_addr;
2973 addr_in_block = get_frame_address_in_block (this_frame);
2974 pc = get_frame_pc (this_frame);
2975 if (in_plt_section (addr_in_block)
2976 /* We also use the stub winder if the target memory is unreadable
2977 to avoid having the prologue unwinder trying to read it. */
2978 || target_read_memory (pc, dummy, 4) != 0)
2981 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2982 && arm_skip_bx_reg (this_frame, pc) != 0)
2988 struct frame_unwind arm_stub_unwind = {
2990 default_frame_unwind_stop_reason,
2992 arm_prologue_prev_register,
2994 arm_stub_unwind_sniffer
2997 /* Put here the code to store, into CACHE->saved_regs, the addresses
2998 of the saved registers of frame described by THIS_FRAME. CACHE is
3001 static struct arm_prologue_cache *
3002 arm_m_exception_cache (struct frame_info *this_frame)
3004 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3006 struct arm_prologue_cache *cache;
3007 CORE_ADDR unwound_sp;
3010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3013 unwound_sp = get_frame_register_unsigned (this_frame,
3016 /* The hardware saves eight 32-bit words, comprising xPSR,
3017 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3018 "B1.5.6 Exception entry behavior" in
3019 "ARMv7-M Architecture Reference Manual". */
3020 cache->saved_regs[0].addr = unwound_sp;
3021 cache->saved_regs[1].addr = unwound_sp + 4;
3022 cache->saved_regs[2].addr = unwound_sp + 8;
3023 cache->saved_regs[3].addr = unwound_sp + 12;
3024 cache->saved_regs[12].addr = unwound_sp + 16;
3025 cache->saved_regs[14].addr = unwound_sp + 20;
3026 cache->saved_regs[15].addr = unwound_sp + 24;
3027 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3029 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3030 aligner between the top of the 32-byte stack frame and the
3031 previous context's stack pointer. */
3032 cache->prev_sp = unwound_sp + 32;
3033 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3034 && (xpsr & (1 << 9)) != 0)
3035 cache->prev_sp += 4;
3040 /* Implementation of function hook 'this_id' in
3041 'struct frame_uwnind'. */
3044 arm_m_exception_this_id (struct frame_info *this_frame,
3046 struct frame_id *this_id)
3048 struct arm_prologue_cache *cache;
3050 if (*this_cache == NULL)
3051 *this_cache = arm_m_exception_cache (this_frame);
3052 cache = (struct arm_prologue_cache *) *this_cache;
3054 /* Our frame ID for a stub frame is the current SP and LR. */
3055 *this_id = frame_id_build (cache->prev_sp,
3056 get_frame_pc (this_frame));
3059 /* Implementation of function hook 'prev_register' in
3060 'struct frame_uwnind'. */
3062 static struct value *
3063 arm_m_exception_prev_register (struct frame_info *this_frame,
3067 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3068 struct arm_prologue_cache *cache;
3070 if (*this_cache == NULL)
3071 *this_cache = arm_m_exception_cache (this_frame);
3072 cache = (struct arm_prologue_cache *) *this_cache;
3074 /* The value was already reconstructed into PREV_SP. */
3075 if (prev_regnum == ARM_SP_REGNUM)
3076 return frame_unwind_got_constant (this_frame, prev_regnum,
3079 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3083 /* Implementation of function hook 'sniffer' in
3084 'struct frame_uwnind'. */
3087 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3088 struct frame_info *this_frame,
3089 void **this_prologue_cache)
3091 CORE_ADDR this_pc = get_frame_pc (this_frame);
3093 /* No need to check is_m; this sniffer is only registered for
3094 M-profile architectures. */
3096 /* Exception frames return to one of these magic PCs. Other values
3097 are not defined as of v7-M. See details in "B1.5.8 Exception
3098 return behavior" in "ARMv7-M Architecture Reference Manual". */
3099 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3100 || this_pc == 0xfffffffd)
3106 /* Frame unwinder for M-profile exceptions. */
3108 struct frame_unwind arm_m_exception_unwind =
3111 default_frame_unwind_stop_reason,
3112 arm_m_exception_this_id,
3113 arm_m_exception_prev_register,
3115 arm_m_exception_unwind_sniffer
3119 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3121 struct arm_prologue_cache *cache;
3123 if (*this_cache == NULL)
3124 *this_cache = arm_make_prologue_cache (this_frame);
3125 cache = (struct arm_prologue_cache *) *this_cache;
3127 return cache->prev_sp - cache->framesize;
3130 struct frame_base arm_normal_base = {
3131 &arm_prologue_unwind,
3132 arm_normal_frame_base,
3133 arm_normal_frame_base,
3134 arm_normal_frame_base
3137 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3138 dummy frame. The frame ID's base needs to match the TOS value
3139 saved by save_dummy_frame_tos() and returned from
3140 arm_push_dummy_call, and the PC needs to match the dummy frame's
3143 static struct frame_id
3144 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3146 return frame_id_build (get_frame_register_unsigned (this_frame,
3148 get_frame_pc (this_frame));
3151 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3152 be used to construct the previous frame's ID, after looking up the
3153 containing function). */
3156 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3159 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3160 return arm_addr_bits_remove (gdbarch, pc);
3164 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3166 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3169 static struct value *
3170 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3173 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3175 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3180 /* The PC is normally copied from the return column, which
3181 describes saves of LR. However, that version may have an
3182 extra bit set to indicate Thumb state. The bit is not
3184 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3185 return frame_unwind_got_constant (this_frame, regnum,
3186 arm_addr_bits_remove (gdbarch, lr));
3189 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3190 cpsr = get_frame_register_unsigned (this_frame, regnum);
3191 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3192 if (IS_THUMB_ADDR (lr))
3196 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3199 internal_error (__FILE__, __LINE__,
3200 _("Unexpected register %d"), regnum);
3205 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3206 struct dwarf2_frame_state_reg *reg,
3207 struct frame_info *this_frame)
3213 reg->how = DWARF2_FRAME_REG_FN;
3214 reg->loc.fn = arm_dwarf2_prev_register;
3217 reg->how = DWARF2_FRAME_REG_CFA;
3222 /* Implement the stack_frame_destroyed_p gdbarch method. */
3225 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3227 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3228 unsigned int insn, insn2;
3229 int found_return = 0, found_stack_adjust = 0;
3230 CORE_ADDR func_start, func_end;
3234 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3237 /* The epilogue is a sequence of instructions along the following lines:
3239 - add stack frame size to SP or FP
3240 - [if frame pointer used] restore SP from FP
3241 - restore registers from SP [may include PC]
3242 - a return-type instruction [if PC wasn't already restored]
3244 In a first pass, we scan forward from the current PC and verify the
3245 instructions we find as compatible with this sequence, ending in a
3248 However, this is not sufficient to distinguish indirect function calls
3249 within a function from indirect tail calls in the epilogue in some cases.
3250 Therefore, if we didn't already find any SP-changing instruction during
3251 forward scan, we add a backward scanning heuristic to ensure we actually
3252 are in the epilogue. */
3255 while (scan_pc < func_end && !found_return)
3257 if (target_read_memory (scan_pc, buf, 2))
3261 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3263 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3265 else if (insn == 0x46f7) /* mov pc, lr */
3267 else if (thumb_instruction_restores_sp (insn))
3269 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3272 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3274 if (target_read_memory (scan_pc, buf, 2))
3278 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3280 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3282 if (insn2 & 0x8000) /* <registers> include PC. */
3285 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3286 && (insn2 & 0x0fff) == 0x0b04)
3288 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3291 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3292 && (insn2 & 0x0e00) == 0x0a00)
3304 /* Since any instruction in the epilogue sequence, with the possible
3305 exception of return itself, updates the stack pointer, we need to
3306 scan backwards for at most one instruction. Try either a 16-bit or
3307 a 32-bit instruction. This is just a heuristic, so we do not worry
3308 too much about false positives. */
3310 if (pc - 4 < func_start)
3312 if (target_read_memory (pc - 4, buf, 4))
3315 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3316 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3318 if (thumb_instruction_restores_sp (insn2))
3319 found_stack_adjust = 1;
3320 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3321 found_stack_adjust = 1;
3322 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3323 && (insn2 & 0x0fff) == 0x0b04)
3324 found_stack_adjust = 1;
3325 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3326 && (insn2 & 0x0e00) == 0x0a00)
3327 found_stack_adjust = 1;
3329 return found_stack_adjust;
3332 /* Implement the stack_frame_destroyed_p gdbarch method. */
3335 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3337 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3340 CORE_ADDR func_start, func_end;
3342 if (arm_pc_is_thumb (gdbarch, pc))
3343 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3345 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3348 /* We are in the epilogue if the previous instruction was a stack
3349 adjustment and the next instruction is a possible return (bx, mov
3350 pc, or pop). We could have to scan backwards to find the stack
3351 adjustment, or forwards to find the return, but this is a decent
3352 approximation. First scan forwards. */
3355 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3356 if (bits (insn, 28, 31) != INST_NV)
3358 if ((insn & 0x0ffffff0) == 0x012fff10)
3361 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3364 else if ((insn & 0x0fff0000) == 0x08bd0000
3365 && (insn & 0x0000c000) != 0)
3366 /* POP (LDMIA), including PC or LR. */
3373 /* Scan backwards. This is just a heuristic, so do not worry about
3374 false positives from mode changes. */
3376 if (pc < func_start + 4)
3379 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3380 if (arm_instruction_restores_sp (insn))
3387 /* When arguments must be pushed onto the stack, they go on in reverse
3388 order. The code below implements a FILO (stack) to do this. */
3393 struct stack_item *prev;
3397 static struct stack_item *
3398 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3400 struct stack_item *si;
3401 si = XNEW (struct stack_item);
3402 si->data = (gdb_byte *) xmalloc (len);
3405 memcpy (si->data, contents, len);
3409 static struct stack_item *
3410 pop_stack_item (struct stack_item *si)
3412 struct stack_item *dead = si;
3420 /* Return the alignment (in bytes) of the given type. */
3423 arm_type_align (struct type *t)
3429 t = check_typedef (t);
3430 switch (TYPE_CODE (t))
3433 /* Should never happen. */
3434 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3438 case TYPE_CODE_ENUM:
3442 case TYPE_CODE_RANGE:
3444 case TYPE_CODE_CHAR:
3445 case TYPE_CODE_BOOL:
3446 return TYPE_LENGTH (t);
3448 case TYPE_CODE_ARRAY:
3449 if (TYPE_VECTOR (t))
3451 /* Use the natural alignment for vector types (the same for
3452 scalar type), but the maximum alignment is 64-bit. */
3453 if (TYPE_LENGTH (t) > 8)
3456 return TYPE_LENGTH (t);
3459 return arm_type_align (TYPE_TARGET_TYPE (t));
3460 case TYPE_CODE_COMPLEX:
3461 return arm_type_align (TYPE_TARGET_TYPE (t));
3463 case TYPE_CODE_STRUCT:
3464 case TYPE_CODE_UNION:
3466 for (n = 0; n < TYPE_NFIELDS (t); n++)
3468 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3476 /* Possible base types for a candidate for passing and returning in
3479 enum arm_vfp_cprc_base_type
3488 /* The length of one element of base type B. */
3491 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3495 case VFP_CPRC_SINGLE:
3497 case VFP_CPRC_DOUBLE:
3499 case VFP_CPRC_VEC64:
3501 case VFP_CPRC_VEC128:
3504 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3509 /* The character ('s', 'd' or 'q') for the type of VFP register used
3510 for passing base type B. */
3513 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3517 case VFP_CPRC_SINGLE:
3519 case VFP_CPRC_DOUBLE:
3521 case VFP_CPRC_VEC64:
3523 case VFP_CPRC_VEC128:
3526 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3531 /* Determine whether T may be part of a candidate for passing and
3532 returning in VFP registers, ignoring the limit on the total number
3533 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3534 classification of the first valid component found; if it is not
3535 VFP_CPRC_UNKNOWN, all components must have the same classification
3536 as *BASE_TYPE. If it is found that T contains a type not permitted
3537 for passing and returning in VFP registers, a type differently
3538 classified from *BASE_TYPE, or two types differently classified
3539 from each other, return -1, otherwise return the total number of
3540 base-type elements found (possibly 0 in an empty structure or
3541 array). Vector types are not currently supported, matching the
3542 generic AAPCS support. */
3545 arm_vfp_cprc_sub_candidate (struct type *t,
3546 enum arm_vfp_cprc_base_type *base_type)
3548 t = check_typedef (t);
3549 switch (TYPE_CODE (t))
3552 switch (TYPE_LENGTH (t))
3555 if (*base_type == VFP_CPRC_UNKNOWN)
3556 *base_type = VFP_CPRC_SINGLE;
3557 else if (*base_type != VFP_CPRC_SINGLE)
3562 if (*base_type == VFP_CPRC_UNKNOWN)
3563 *base_type = VFP_CPRC_DOUBLE;
3564 else if (*base_type != VFP_CPRC_DOUBLE)
3573 case TYPE_CODE_COMPLEX:
3574 /* Arguments of complex T where T is one of the types float or
3575 double get treated as if they are implemented as:
3584 switch (TYPE_LENGTH (t))
3587 if (*base_type == VFP_CPRC_UNKNOWN)
3588 *base_type = VFP_CPRC_SINGLE;
3589 else if (*base_type != VFP_CPRC_SINGLE)
3594 if (*base_type == VFP_CPRC_UNKNOWN)
3595 *base_type = VFP_CPRC_DOUBLE;
3596 else if (*base_type != VFP_CPRC_DOUBLE)
3605 case TYPE_CODE_ARRAY:
3607 if (TYPE_VECTOR (t))
3609 /* A 64-bit or 128-bit containerized vector type are VFP
3611 switch (TYPE_LENGTH (t))
3614 if (*base_type == VFP_CPRC_UNKNOWN)
3615 *base_type = VFP_CPRC_VEC64;
3618 if (*base_type == VFP_CPRC_UNKNOWN)
3619 *base_type = VFP_CPRC_VEC128;
3630 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3634 if (TYPE_LENGTH (t) == 0)
3636 gdb_assert (count == 0);
3639 else if (count == 0)
3641 unitlen = arm_vfp_cprc_unit_length (*base_type);
3642 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3643 return TYPE_LENGTH (t) / unitlen;
3648 case TYPE_CODE_STRUCT:
3653 for (i = 0; i < TYPE_NFIELDS (t); i++)
3655 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3657 if (sub_count == -1)
3661 if (TYPE_LENGTH (t) == 0)
3663 gdb_assert (count == 0);
3666 else if (count == 0)
3668 unitlen = arm_vfp_cprc_unit_length (*base_type);
3669 if (TYPE_LENGTH (t) != unitlen * count)
3674 case TYPE_CODE_UNION:
3679 for (i = 0; i < TYPE_NFIELDS (t); i++)
3681 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3683 if (sub_count == -1)
3685 count = (count > sub_count ? count : sub_count);
3687 if (TYPE_LENGTH (t) == 0)
3689 gdb_assert (count == 0);
3692 else if (count == 0)
3694 unitlen = arm_vfp_cprc_unit_length (*base_type);
3695 if (TYPE_LENGTH (t) != unitlen * count)
3707 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3708 if passed to or returned from a non-variadic function with the VFP
3709 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3710 *BASE_TYPE to the base type for T and *COUNT to the number of
3711 elements of that base type before returning. */
3714 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3717 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3718 int c = arm_vfp_cprc_sub_candidate (t, &b);
3719 if (c <= 0 || c > 4)
3726 /* Return 1 if the VFP ABI should be used for passing arguments to and
3727 returning values from a function of type FUNC_TYPE, 0
3731 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3733 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3734 /* Variadic functions always use the base ABI. Assume that functions
3735 without debug info are not variadic. */
3736 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3738 /* The VFP ABI is only supported as a variant of AAPCS. */
3739 if (tdep->arm_abi != ARM_ABI_AAPCS)
3741 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3744 /* We currently only support passing parameters in integer registers, which
3745 conforms with GCC's default model, and VFP argument passing following
3746 the VFP variant of AAPCS. Several other variants exist and
3747 we should probably support some of them based on the selected ABI. */
3750 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3751 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3752 struct value **args, CORE_ADDR sp, int struct_return,
3753 CORE_ADDR struct_addr)
3755 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3759 struct stack_item *si = NULL;
3762 unsigned vfp_regs_free = (1 << 16) - 1;
3764 /* Determine the type of this function and whether the VFP ABI
3766 ftype = check_typedef (value_type (function));
3767 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3768 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3769 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3771 /* Set the return address. For the ARM, the return breakpoint is
3772 always at BP_ADDR. */
3773 if (arm_pc_is_thumb (gdbarch, bp_addr))
3775 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3777 /* Walk through the list of args and determine how large a temporary
3778 stack is required. Need to take care here as structs may be
3779 passed on the stack, and we have to push them. */
3782 argreg = ARM_A1_REGNUM;
3785 /* The struct_return pointer occupies the first parameter
3786 passing register. */
3790 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3791 gdbarch_register_name (gdbarch, argreg),
3792 paddress (gdbarch, struct_addr));
3793 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3797 for (argnum = 0; argnum < nargs; argnum++)
3800 struct type *arg_type;
3801 struct type *target_type;
3802 enum type_code typecode;
3803 const bfd_byte *val;
3805 enum arm_vfp_cprc_base_type vfp_base_type;
3807 int may_use_core_reg = 1;
3809 arg_type = check_typedef (value_type (args[argnum]));
3810 len = TYPE_LENGTH (arg_type);
3811 target_type = TYPE_TARGET_TYPE (arg_type);
3812 typecode = TYPE_CODE (arg_type);
3813 val = value_contents (args[argnum]);
3815 align = arm_type_align (arg_type);
3816 /* Round alignment up to a whole number of words. */
3817 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3818 /* Different ABIs have different maximum alignments. */
3819 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3821 /* The APCS ABI only requires word alignment. */
3822 align = INT_REGISTER_SIZE;
3826 /* The AAPCS requires at most doubleword alignment. */
3827 if (align > INT_REGISTER_SIZE * 2)
3828 align = INT_REGISTER_SIZE * 2;
3832 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3840 /* Because this is a CPRC it cannot go in a core register or
3841 cause a core register to be skipped for alignment.
3842 Either it goes in VFP registers and the rest of this loop
3843 iteration is skipped for this argument, or it goes on the
3844 stack (and the stack alignment code is correct for this
3846 may_use_core_reg = 0;
3848 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3849 shift = unit_length / 4;
3850 mask = (1 << (shift * vfp_base_count)) - 1;
3851 for (regno = 0; regno < 16; regno += shift)
3852 if (((vfp_regs_free >> regno) & mask) == mask)
3861 vfp_regs_free &= ~(mask << regno);
3862 reg_scaled = regno / shift;
3863 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3864 for (i = 0; i < vfp_base_count; i++)
3868 if (reg_char == 'q')
3869 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3870 val + i * unit_length);
3873 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3874 reg_char, reg_scaled + i);
3875 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3877 regcache_cooked_write (regcache, regnum,
3878 val + i * unit_length);
3885 /* This CPRC could not go in VFP registers, so all VFP
3886 registers are now marked as used. */
3891 /* Push stack padding for dowubleword alignment. */
3892 if (nstack & (align - 1))
3894 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3895 nstack += INT_REGISTER_SIZE;
3898 /* Doubleword aligned quantities must go in even register pairs. */
3899 if (may_use_core_reg
3900 && argreg <= ARM_LAST_ARG_REGNUM
3901 && align > INT_REGISTER_SIZE
3905 /* If the argument is a pointer to a function, and it is a
3906 Thumb function, create a LOCAL copy of the value and set
3907 the THUMB bit in it. */
3908 if (TYPE_CODE_PTR == typecode
3909 && target_type != NULL
3910 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3912 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3913 if (arm_pc_is_thumb (gdbarch, regval))
3915 bfd_byte *copy = (bfd_byte *) alloca (len);
3916 store_unsigned_integer (copy, len, byte_order,
3917 MAKE_THUMB_ADDR (regval));
3922 /* Copy the argument to general registers or the stack in
3923 register-sized pieces. Large arguments are split between
3924 registers and stack. */
3927 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3929 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3931 /* The argument is being passed in a general purpose
3934 = extract_unsigned_integer (val, partial_len, byte_order);
3935 if (byte_order == BFD_ENDIAN_BIG)
3936 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3938 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3940 gdbarch_register_name
3942 phex (regval, INT_REGISTER_SIZE));
3943 regcache_cooked_write_unsigned (regcache, argreg, regval);
3948 /* Push the arguments onto the stack. */
3950 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3952 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3953 nstack += INT_REGISTER_SIZE;
3960 /* If we have an odd number of words to push, then decrement the stack
3961 by one word now, so first stack argument will be dword aligned. */
3968 write_memory (sp, si->data, si->len);
3969 si = pop_stack_item (si);
3972 /* Finally, update teh SP register. */
3973 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3979 /* Always align the frame to an 8-byte boundary. This is required on
3980 some platforms and harmless on the rest. */
3983 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3985 /* Align the stack to eight bytes. */
3986 return sp & ~ (CORE_ADDR) 7;
3990 print_fpu_flags (struct ui_file *file, int flags)
3992 if (flags & (1 << 0))
3993 fputs_filtered ("IVO ", file);
3994 if (flags & (1 << 1))
3995 fputs_filtered ("DVZ ", file);
3996 if (flags & (1 << 2))
3997 fputs_filtered ("OFL ", file);
3998 if (flags & (1 << 3))
3999 fputs_filtered ("UFL ", file);
4000 if (flags & (1 << 4))
4001 fputs_filtered ("INX ", file);
4002 fputc_filtered ('\n', file);
4005 /* Print interesting information about the floating point processor
4006 (if present) or emulator. */
4008 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4009 struct frame_info *frame, const char *args)
4011 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4014 type = (status >> 24) & 127;
4015 if (status & (1 << 31))
4016 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4018 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4019 /* i18n: [floating point unit] mask */
4020 fputs_filtered (_("mask: "), file);
4021 print_fpu_flags (file, status >> 16);
4022 /* i18n: [floating point unit] flags */
4023 fputs_filtered (_("flags: "), file);
4024 print_fpu_flags (file, status);
4027 /* Construct the ARM extended floating point type. */
4028 static struct type *
4029 arm_ext_type (struct gdbarch *gdbarch)
4031 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4033 if (!tdep->arm_ext_type)
4035 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4036 floatformats_arm_ext);
4038 return tdep->arm_ext_type;
4041 static struct type *
4042 arm_neon_double_type (struct gdbarch *gdbarch)
4044 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4046 if (tdep->neon_double_type == NULL)
4048 struct type *t, *elem;
4050 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4052 elem = builtin_type (gdbarch)->builtin_uint8;
4053 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4054 elem = builtin_type (gdbarch)->builtin_uint16;
4055 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4056 elem = builtin_type (gdbarch)->builtin_uint32;
4057 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4058 elem = builtin_type (gdbarch)->builtin_uint64;
4059 append_composite_type_field (t, "u64", elem);
4060 elem = builtin_type (gdbarch)->builtin_float;
4061 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4062 elem = builtin_type (gdbarch)->builtin_double;
4063 append_composite_type_field (t, "f64", elem);
4065 TYPE_VECTOR (t) = 1;
4066 TYPE_NAME (t) = "neon_d";
4067 tdep->neon_double_type = t;
4070 return tdep->neon_double_type;
4073 /* FIXME: The vector types are not correctly ordered on big-endian
4074 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4075 bits of d0 - regardless of what unit size is being held in d0. So
4076 the offset of the first uint8 in d0 is 7, but the offset of the
4077 first float is 4. This code works as-is for little-endian
4080 static struct type *
4081 arm_neon_quad_type (struct gdbarch *gdbarch)
4083 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4085 if (tdep->neon_quad_type == NULL)
4087 struct type *t, *elem;
4089 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4091 elem = builtin_type (gdbarch)->builtin_uint8;
4092 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4093 elem = builtin_type (gdbarch)->builtin_uint16;
4094 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4095 elem = builtin_type (gdbarch)->builtin_uint32;
4096 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4097 elem = builtin_type (gdbarch)->builtin_uint64;
4098 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4099 elem = builtin_type (gdbarch)->builtin_float;
4100 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4101 elem = builtin_type (gdbarch)->builtin_double;
4102 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4104 TYPE_VECTOR (t) = 1;
4105 TYPE_NAME (t) = "neon_q";
4106 tdep->neon_quad_type = t;
4109 return tdep->neon_quad_type;
4112 /* Return the GDB type object for the "standard" data type of data in
4115 static struct type *
4116 arm_register_type (struct gdbarch *gdbarch, int regnum)
4118 int num_regs = gdbarch_num_regs (gdbarch);
4120 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4121 && regnum >= num_regs && regnum < num_regs + 32)
4122 return builtin_type (gdbarch)->builtin_float;
4124 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4125 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4126 return arm_neon_quad_type (gdbarch);
4128 /* If the target description has register information, we are only
4129 in this function so that we can override the types of
4130 double-precision registers for NEON. */
4131 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4133 struct type *t = tdesc_register_type (gdbarch, regnum);
4135 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4136 && TYPE_CODE (t) == TYPE_CODE_FLT
4137 && gdbarch_tdep (gdbarch)->have_neon)
4138 return arm_neon_double_type (gdbarch);
4143 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4145 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4146 return builtin_type (gdbarch)->builtin_void;
4148 return arm_ext_type (gdbarch);
4150 else if (regnum == ARM_SP_REGNUM)
4151 return builtin_type (gdbarch)->builtin_data_ptr;
4152 else if (regnum == ARM_PC_REGNUM)
4153 return builtin_type (gdbarch)->builtin_func_ptr;
4154 else if (regnum >= ARRAY_SIZE (arm_register_names))
4155 /* These registers are only supported on targets which supply
4156 an XML description. */
4157 return builtin_type (gdbarch)->builtin_int0;
4159 return builtin_type (gdbarch)->builtin_uint32;
4162 /* Map a DWARF register REGNUM onto the appropriate GDB register
4166 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4168 /* Core integer regs. */
4169 if (reg >= 0 && reg <= 15)
4172 /* Legacy FPA encoding. These were once used in a way which
4173 overlapped with VFP register numbering, so their use is
4174 discouraged, but GDB doesn't support the ARM toolchain
4175 which used them for VFP. */
4176 if (reg >= 16 && reg <= 23)
4177 return ARM_F0_REGNUM + reg - 16;
4179 /* New assignments for the FPA registers. */
4180 if (reg >= 96 && reg <= 103)
4181 return ARM_F0_REGNUM + reg - 96;
4183 /* WMMX register assignments. */
4184 if (reg >= 104 && reg <= 111)
4185 return ARM_WCGR0_REGNUM + reg - 104;
4187 if (reg >= 112 && reg <= 127)
4188 return ARM_WR0_REGNUM + reg - 112;
4190 if (reg >= 192 && reg <= 199)
4191 return ARM_WC0_REGNUM + reg - 192;
4193 /* VFP v2 registers. A double precision value is actually
4194 in d1 rather than s2, but the ABI only defines numbering
4195 for the single precision registers. This will "just work"
4196 in GDB for little endian targets (we'll read eight bytes,
4197 starting in s0 and then progressing to s1), but will be
4198 reversed on big endian targets with VFP. This won't
4199 be a problem for the new Neon quad registers; you're supposed
4200 to use DW_OP_piece for those. */
4201 if (reg >= 64 && reg <= 95)
4205 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4206 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4210 /* VFP v3 / Neon registers. This range is also used for VFP v2
4211 registers, except that it now describes d0 instead of s0. */
4212 if (reg >= 256 && reg <= 287)
4216 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4217 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4224 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4226 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4229 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4231 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4232 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4234 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4235 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4237 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4238 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4240 if (reg < NUM_GREGS)
4241 return SIM_ARM_R0_REGNUM + reg;
4244 if (reg < NUM_FREGS)
4245 return SIM_ARM_FP0_REGNUM + reg;
4248 if (reg < NUM_SREGS)
4249 return SIM_ARM_FPS_REGNUM + reg;
4252 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4255 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4256 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4257 It is thought that this is is the floating-point register format on
4258 little-endian systems. */
4261 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4262 void *dbl, int endianess)
4266 if (endianess == BFD_ENDIAN_BIG)
4267 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4269 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4271 floatformat_from_doublest (fmt, &d, dbl);
4275 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4280 floatformat_to_doublest (fmt, ptr, &d);
4281 if (endianess == BFD_ENDIAN_BIG)
4282 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4284 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4289 condition_true (unsigned long cond, unsigned long status_reg)
4291 if (cond == INST_AL || cond == INST_NV)
4297 return ((status_reg & FLAG_Z) != 0);
4299 return ((status_reg & FLAG_Z) == 0);
4301 return ((status_reg & FLAG_C) != 0);
4303 return ((status_reg & FLAG_C) == 0);
4305 return ((status_reg & FLAG_N) != 0);
4307 return ((status_reg & FLAG_N) == 0);
4309 return ((status_reg & FLAG_V) != 0);
4311 return ((status_reg & FLAG_V) == 0);
4313 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4315 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4317 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4319 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4321 return (((status_reg & FLAG_Z) == 0)
4322 && (((status_reg & FLAG_N) == 0)
4323 == ((status_reg & FLAG_V) == 0)));
4325 return (((status_reg & FLAG_Z) != 0)
4326 || (((status_reg & FLAG_N) == 0)
4327 != ((status_reg & FLAG_V) == 0)));
4332 static unsigned long
4333 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4334 unsigned long pc_val, unsigned long status_reg)
4336 unsigned long res, shift;
4337 int rm = bits (inst, 0, 3);
4338 unsigned long shifttype = bits (inst, 5, 6);
4342 int rs = bits (inst, 8, 11);
4343 shift = (rs == 15 ? pc_val + 8
4344 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4347 shift = bits (inst, 7, 11);
4349 res = (rm == ARM_PC_REGNUM
4350 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4351 : get_frame_register_unsigned (frame, rm));
4356 res = shift >= 32 ? 0 : res << shift;
4360 res = shift >= 32 ? 0 : res >> shift;
4366 res = ((res & 0x80000000L)
4367 ? ~((~res) >> shift) : res >> shift);
4370 case 3: /* ROR/RRX */
4373 res = (res >> 1) | (carry ? 0x80000000L : 0);
4375 res = (res >> shift) | (res << (32 - shift));
4379 return res & 0xffffffff;
4382 /* Return number of 1-bits in VAL. */
4385 bitcount (unsigned long val)
4388 for (nbits = 0; val != 0; nbits++)
4389 val &= val - 1; /* Delete rightmost 1-bit in val. */
4394 thumb_advance_itstate (unsigned int itstate)
4396 /* Preserve IT[7:5], the first three bits of the condition. Shift
4397 the upcoming condition flags left by one bit. */
4398 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4400 /* If we have finished the IT block, clear the state. */
4401 if ((itstate & 0x0f) == 0)
4407 /* Find the next PC after the current instruction executes. In some
4408 cases we can not statically determine the answer (see the IT state
4409 handling in this function); in that case, a breakpoint may be
4410 inserted in addition to the returned PC, which will be used to set
4411 another breakpoint by our caller. */
4414 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4416 struct gdbarch *gdbarch = get_frame_arch (frame);
4417 struct address_space *aspace = get_frame_address_space (frame);
4418 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4419 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4420 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4421 unsigned short inst1;
4422 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4423 unsigned long offset;
4424 ULONGEST status, itstate;
4426 nextpc = MAKE_THUMB_ADDR (nextpc);
4427 pc_val = MAKE_THUMB_ADDR (pc_val);
4429 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4431 /* Thumb-2 conditional execution support. There are eight bits in
4432 the CPSR which describe conditional execution state. Once
4433 reconstructed (they're in a funny order), the low five bits
4434 describe the low bit of the condition for each instruction and
4435 how many instructions remain. The high three bits describe the
4436 base condition. One of the low four bits will be set if an IT
4437 block is active. These bits read as zero on earlier
4439 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4440 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4442 /* If-Then handling. On GNU/Linux, where this routine is used, we
4443 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4444 can disable execution of the undefined instruction. So we might
4445 miss the breakpoint if we set it on a skipped conditional
4446 instruction. Because conditional instructions can change the
4447 flags, affecting the execution of further instructions, we may
4448 need to set two breakpoints. */
4450 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4452 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4454 /* An IT instruction. Because this instruction does not
4455 modify the flags, we can accurately predict the next
4456 executed instruction. */
4457 itstate = inst1 & 0x00ff;
4458 pc += thumb_insn_size (inst1);
4460 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4462 inst1 = read_memory_unsigned_integer (pc, 2,
4463 byte_order_for_code);
4464 pc += thumb_insn_size (inst1);
4465 itstate = thumb_advance_itstate (itstate);
4468 return MAKE_THUMB_ADDR (pc);
4470 else if (itstate != 0)
4472 /* We are in a conditional block. Check the condition. */
4473 if (! condition_true (itstate >> 4, status))
4475 /* Advance to the next executed instruction. */
4476 pc += thumb_insn_size (inst1);
4477 itstate = thumb_advance_itstate (itstate);
4479 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4481 inst1 = read_memory_unsigned_integer (pc, 2,
4482 byte_order_for_code);
4483 pc += thumb_insn_size (inst1);
4484 itstate = thumb_advance_itstate (itstate);
4487 return MAKE_THUMB_ADDR (pc);
4489 else if ((itstate & 0x0f) == 0x08)
4491 /* This is the last instruction of the conditional
4492 block, and it is executed. We can handle it normally
4493 because the following instruction is not conditional,
4494 and we must handle it normally because it is
4495 permitted to branch. Fall through. */
4501 /* There are conditional instructions after this one.
4502 If this instruction modifies the flags, then we can
4503 not predict what the next executed instruction will
4504 be. Fortunately, this instruction is architecturally
4505 forbidden to branch; we know it will fall through.
4506 Start by skipping past it. */
4507 pc += thumb_insn_size (inst1);
4508 itstate = thumb_advance_itstate (itstate);
4510 /* Set a breakpoint on the following instruction. */
4511 gdb_assert ((itstate & 0x0f) != 0);
4512 arm_insert_single_step_breakpoint (gdbarch, aspace,
4513 MAKE_THUMB_ADDR (pc));
4514 cond_negated = (itstate >> 4) & 1;
4516 /* Skip all following instructions with the same
4517 condition. If there is a later instruction in the IT
4518 block with the opposite condition, set the other
4519 breakpoint there. If not, then set a breakpoint on
4520 the instruction after the IT block. */
4523 inst1 = read_memory_unsigned_integer (pc, 2,
4524 byte_order_for_code);
4525 pc += thumb_insn_size (inst1);
4526 itstate = thumb_advance_itstate (itstate);
4528 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4530 return MAKE_THUMB_ADDR (pc);
4534 else if (itstate & 0x0f)
4536 /* We are in a conditional block. Check the condition. */
4537 int cond = itstate >> 4;
4539 if (! condition_true (cond, status))
4540 /* Advance to the next instruction. All the 32-bit
4541 instructions share a common prefix. */
4542 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4544 /* Otherwise, handle the instruction normally. */
4547 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4551 /* Fetch the saved PC from the stack. It's stored above
4552 all of the other registers. */
4553 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4554 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4555 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4557 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4559 unsigned long cond = bits (inst1, 8, 11);
4560 if (cond == 0x0f) /* 0x0f = SWI */
4562 struct gdbarch_tdep *tdep;
4563 tdep = gdbarch_tdep (gdbarch);
4565 if (tdep->syscall_next_pc != NULL)
4566 nextpc = tdep->syscall_next_pc (frame);
4569 else if (cond != 0x0f && condition_true (cond, status))
4570 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4572 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4574 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4576 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4578 unsigned short inst2;
4579 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4581 /* Default to the next instruction. */
4583 nextpc = MAKE_THUMB_ADDR (nextpc);
4585 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4587 /* Branches and miscellaneous control instructions. */
4589 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4592 int j1, j2, imm1, imm2;
4594 imm1 = sbits (inst1, 0, 10);
4595 imm2 = bits (inst2, 0, 10);
4596 j1 = bit (inst2, 13);
4597 j2 = bit (inst2, 11);
4599 offset = ((imm1 << 12) + (imm2 << 1));
4600 offset ^= ((!j2) << 22) | ((!j1) << 23);
4602 nextpc = pc_val + offset;
4603 /* For BLX make sure to clear the low bits. */
4604 if (bit (inst2, 12) == 0)
4605 nextpc = nextpc & 0xfffffffc;
4607 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4609 /* SUBS PC, LR, #imm8. */
4610 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4611 nextpc -= inst2 & 0x00ff;
4613 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4615 /* Conditional branch. */
4616 if (condition_true (bits (inst1, 6, 9), status))
4618 int sign, j1, j2, imm1, imm2;
4620 sign = sbits (inst1, 10, 10);
4621 imm1 = bits (inst1, 0, 5);
4622 imm2 = bits (inst2, 0, 10);
4623 j1 = bit (inst2, 13);
4624 j2 = bit (inst2, 11);
4626 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4627 offset += (imm1 << 12) + (imm2 << 1);
4629 nextpc = pc_val + offset;
4633 else if ((inst1 & 0xfe50) == 0xe810)
4635 /* Load multiple or RFE. */
4636 int rn, offset, load_pc = 1;
4638 rn = bits (inst1, 0, 3);
4639 if (bit (inst1, 7) && !bit (inst1, 8))
4642 if (!bit (inst2, 15))
4644 offset = bitcount (inst2) * 4 - 4;
4646 else if (!bit (inst1, 7) && bit (inst1, 8))
4649 if (!bit (inst2, 15))
4653 else if (bit (inst1, 7) && bit (inst1, 8))
4658 else if (!bit (inst1, 7) && !bit (inst1, 8))
4668 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4669 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4672 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4674 /* MOV PC or MOVS PC. */
4675 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4676 nextpc = MAKE_THUMB_ADDR (nextpc);
4678 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4682 int rn, load_pc = 1;
4684 rn = bits (inst1, 0, 3);
4685 base = get_frame_register_unsigned (frame, rn);
4686 if (rn == ARM_PC_REGNUM)
4688 base = (base + 4) & ~(CORE_ADDR) 0x3;
4690 base += bits (inst2, 0, 11);
4692 base -= bits (inst2, 0, 11);
4694 else if (bit (inst1, 7))
4695 base += bits (inst2, 0, 11);
4696 else if (bit (inst2, 11))
4698 if (bit (inst2, 10))
4701 base += bits (inst2, 0, 7);
4703 base -= bits (inst2, 0, 7);
4706 else if ((inst2 & 0x0fc0) == 0x0000)
4708 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4709 base += get_frame_register_unsigned (frame, rm) << shift;
4716 nextpc = get_frame_memory_unsigned (frame, base, 4);
4718 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4721 CORE_ADDR tbl_reg, table, offset, length;
4723 tbl_reg = bits (inst1, 0, 3);
4724 if (tbl_reg == 0x0f)
4725 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4727 table = get_frame_register_unsigned (frame, tbl_reg);
4729 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4730 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4731 nextpc = pc_val + length;
4733 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4736 CORE_ADDR tbl_reg, table, offset, length;
4738 tbl_reg = bits (inst1, 0, 3);
4739 if (tbl_reg == 0x0f)
4740 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4742 table = get_frame_register_unsigned (frame, tbl_reg);
4744 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4745 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4746 nextpc = pc_val + length;
4749 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4751 if (bits (inst1, 3, 6) == 0x0f)
4752 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4754 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4756 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4758 if (bits (inst1, 3, 6) == 0x0f)
4761 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4763 nextpc = MAKE_THUMB_ADDR (nextpc);
4765 else if ((inst1 & 0xf500) == 0xb100)
4768 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4769 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4771 if (bit (inst1, 11) && reg != 0)
4772 nextpc = pc_val + imm;
4773 else if (!bit (inst1, 11) && reg == 0)
4774 nextpc = pc_val + imm;
4779 /* Get the raw next address. PC is the current program counter, in
4780 FRAME, which is assumed to be executing in ARM mode.
4782 The value returned has the execution state of the next instruction
4783 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4784 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4788 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4790 struct gdbarch *gdbarch = get_frame_arch (frame);
4791 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4792 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4793 unsigned long pc_val;
4794 unsigned long this_instr;
4795 unsigned long status;
4798 pc_val = (unsigned long) pc;
4799 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4801 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4802 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4804 if (bits (this_instr, 28, 31) == INST_NV)
4805 switch (bits (this_instr, 24, 27))
4810 /* Branch with Link and change to Thumb. */
4811 nextpc = BranchDest (pc, this_instr);
4812 nextpc |= bit (this_instr, 24) << 1;
4813 nextpc = MAKE_THUMB_ADDR (nextpc);
4819 /* Coprocessor register transfer. */
4820 if (bits (this_instr, 12, 15) == 15)
4821 error (_("Invalid update to pc in instruction"));
4824 else if (condition_true (bits (this_instr, 28, 31), status))
4826 switch (bits (this_instr, 24, 27))
4829 case 0x1: /* data processing */
4833 unsigned long operand1, operand2, result = 0;
4837 if (bits (this_instr, 12, 15) != 15)
4840 if (bits (this_instr, 22, 25) == 0
4841 && bits (this_instr, 4, 7) == 9) /* multiply */
4842 error (_("Invalid update to pc in instruction"));
4844 /* BX <reg>, BLX <reg> */
4845 if (bits (this_instr, 4, 27) == 0x12fff1
4846 || bits (this_instr, 4, 27) == 0x12fff3)
4848 rn = bits (this_instr, 0, 3);
4849 nextpc = ((rn == ARM_PC_REGNUM)
4851 : get_frame_register_unsigned (frame, rn));
4856 /* Multiply into PC. */
4857 c = (status & FLAG_C) ? 1 : 0;
4858 rn = bits (this_instr, 16, 19);
4859 operand1 = ((rn == ARM_PC_REGNUM)
4861 : get_frame_register_unsigned (frame, rn));
4863 if (bit (this_instr, 25))
4865 unsigned long immval = bits (this_instr, 0, 7);
4866 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4867 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4870 else /* operand 2 is a shifted register. */
4871 operand2 = shifted_reg_val (frame, this_instr, c,
4874 switch (bits (this_instr, 21, 24))
4877 result = operand1 & operand2;
4881 result = operand1 ^ operand2;
4885 result = operand1 - operand2;
4889 result = operand2 - operand1;
4893 result = operand1 + operand2;
4897 result = operand1 + operand2 + c;
4901 result = operand1 - operand2 + c;
4905 result = operand2 - operand1 + c;
4911 case 0xb: /* tst, teq, cmp, cmn */
4912 result = (unsigned long) nextpc;
4916 result = operand1 | operand2;
4920 /* Always step into a function. */
4925 result = operand1 & ~operand2;
4933 /* In 26-bit APCS the bottom two bits of the result are
4934 ignored, and we always end up in ARM state. */
4936 nextpc = arm_addr_bits_remove (gdbarch, result);
4944 case 0x5: /* data transfer */
4947 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4949 /* Media instructions and architecturally undefined
4954 if (bit (this_instr, 20))
4957 if (bits (this_instr, 12, 15) == 15)
4963 if (bit (this_instr, 22))
4964 error (_("Invalid update to pc in instruction"));
4966 /* byte write to PC */
4967 rn = bits (this_instr, 16, 19);
4968 base = ((rn == ARM_PC_REGNUM)
4970 : get_frame_register_unsigned (frame, rn));
4972 if (bit (this_instr, 24))
4975 int c = (status & FLAG_C) ? 1 : 0;
4976 unsigned long offset =
4977 (bit (this_instr, 25)
4978 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4979 : bits (this_instr, 0, 11));
4981 if (bit (this_instr, 23))
4987 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4994 case 0x9: /* block transfer */
4995 if (bit (this_instr, 20))
4998 if (bit (this_instr, 15))
5002 unsigned long rn_val
5003 = get_frame_register_unsigned (frame,
5004 bits (this_instr, 16, 19));
5006 if (bit (this_instr, 23))
5009 unsigned long reglist = bits (this_instr, 0, 14);
5010 offset = bitcount (reglist) * 4;
5011 if (bit (this_instr, 24)) /* pre */
5014 else if (bit (this_instr, 24))
5018 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5025 case 0xb: /* branch & link */
5026 case 0xa: /* branch */
5028 nextpc = BranchDest (pc, this_instr);
5034 case 0xe: /* coproc ops */
5038 struct gdbarch_tdep *tdep;
5039 tdep = gdbarch_tdep (gdbarch);
5041 if (tdep->syscall_next_pc != NULL)
5042 nextpc = tdep->syscall_next_pc (frame);
5048 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5056 /* Determine next PC after current instruction executes. Will call either
5057 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5058 loop is detected. */
5061 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5065 if (arm_frame_is_thumb (frame))
5066 nextpc = thumb_get_next_pc_raw (frame, pc);
5068 nextpc = arm_get_next_pc_raw (frame, pc);
5073 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5074 of the appropriate mode (as encoded in the PC value), even if this
5075 differs from what would be expected according to the symbol tables. */
5078 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5079 struct address_space *aspace,
5082 struct cleanup *old_chain
5083 = make_cleanup_restore_integer (&arm_override_mode);
5085 arm_override_mode = IS_THUMB_ADDR (pc);
5086 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5088 insert_single_step_breakpoint (gdbarch, aspace, pc);
5090 do_cleanups (old_chain);
5093 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5094 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5095 is found, attempt to step through it. A breakpoint is placed at the end of
5099 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5101 struct gdbarch *gdbarch = get_frame_arch (frame);
5102 struct address_space *aspace = get_frame_address_space (frame);
5103 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5104 CORE_ADDR pc = get_frame_pc (frame);
5105 CORE_ADDR breaks[2] = {-1, -1};
5107 unsigned short insn1, insn2;
5110 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5111 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5112 ULONGEST status, itstate;
5114 /* We currently do not support atomic sequences within an IT block. */
5115 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5116 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5120 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5121 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5123 if (thumb_insn_size (insn1) != 4)
5126 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5128 if (!((insn1 & 0xfff0) == 0xe850
5129 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5132 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5134 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5136 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5139 if (thumb_insn_size (insn1) != 4)
5141 /* Assume that there is at most one conditional branch in the
5142 atomic sequence. If a conditional branch is found, put a
5143 breakpoint in its destination address. */
5144 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5146 if (last_breakpoint > 0)
5147 return 0; /* More than one conditional branch found,
5148 fallback to the standard code. */
5150 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5154 /* We do not support atomic sequences that use any *other*
5155 instructions but conditional branches to change the PC.
5156 Fall back to standard code to avoid losing control of
5158 else if (thumb_instruction_changes_pc (insn1))
5163 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5166 /* Assume that there is at most one conditional branch in the
5167 atomic sequence. If a conditional branch is found, put a
5168 breakpoint in its destination address. */
5169 if ((insn1 & 0xf800) == 0xf000
5170 && (insn2 & 0xd000) == 0x8000
5171 && (insn1 & 0x0380) != 0x0380)
5173 int sign, j1, j2, imm1, imm2;
5174 unsigned int offset;
5176 sign = sbits (insn1, 10, 10);
5177 imm1 = bits (insn1, 0, 5);
5178 imm2 = bits (insn2, 0, 10);
5179 j1 = bit (insn2, 13);
5180 j2 = bit (insn2, 11);
5182 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5183 offset += (imm1 << 12) + (imm2 << 1);
5185 if (last_breakpoint > 0)
5186 return 0; /* More than one conditional branch found,
5187 fallback to the standard code. */
5189 breaks[1] = loc + offset;
5193 /* We do not support atomic sequences that use any *other*
5194 instructions but conditional branches to change the PC.
5195 Fall back to standard code to avoid losing control of
5197 else if (thumb2_instruction_changes_pc (insn1, insn2))
5200 /* If we find a strex{,b,h,d}, we're done. */
5201 if ((insn1 & 0xfff0) == 0xe840
5202 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5207 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5208 if (insn_count == atomic_sequence_length)
5211 /* Insert a breakpoint right after the end of the atomic sequence. */
5214 /* Check for duplicated breakpoints. Check also for a breakpoint
5215 placed (branch instruction's destination) anywhere in sequence. */
5217 && (breaks[1] == breaks[0]
5218 || (breaks[1] >= pc && breaks[1] < loc)))
5219 last_breakpoint = 0;
5221 /* Effectively inserts the breakpoints. */
5222 for (index = 0; index <= last_breakpoint; index++)
5223 arm_insert_single_step_breakpoint (gdbarch, aspace,
5224 MAKE_THUMB_ADDR (breaks[index]));
5230 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5232 struct gdbarch *gdbarch = get_frame_arch (frame);
5233 struct address_space *aspace = get_frame_address_space (frame);
5234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5235 CORE_ADDR pc = get_frame_pc (frame);
5236 CORE_ADDR breaks[2] = {-1, -1};
5241 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5242 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5244 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5245 Note that we do not currently support conditionally executed atomic
5247 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5249 if ((insn & 0xff9000f0) != 0xe1900090)
5252 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5254 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5256 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5259 /* Assume that there is at most one conditional branch in the atomic
5260 sequence. If a conditional branch is found, put a breakpoint in
5261 its destination address. */
5262 if (bits (insn, 24, 27) == 0xa)
5264 if (last_breakpoint > 0)
5265 return 0; /* More than one conditional branch found, fallback
5266 to the standard single-step code. */
5268 breaks[1] = BranchDest (loc - 4, insn);
5272 /* We do not support atomic sequences that use any *other* instructions
5273 but conditional branches to change the PC. Fall back to standard
5274 code to avoid losing control of execution. */
5275 else if (arm_instruction_changes_pc (insn))
5278 /* If we find a strex{,b,h,d}, we're done. */
5279 if ((insn & 0xff9000f0) == 0xe1800090)
5283 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5284 if (insn_count == atomic_sequence_length)
5287 /* Insert a breakpoint right after the end of the atomic sequence. */
5290 /* Check for duplicated breakpoints. Check also for a breakpoint
5291 placed (branch instruction's destination) anywhere in sequence. */
5293 && (breaks[1] == breaks[0]
5294 || (breaks[1] >= pc && breaks[1] < loc)))
5295 last_breakpoint = 0;
5297 /* Effectively inserts the breakpoints. */
5298 for (index = 0; index <= last_breakpoint; index++)
5299 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5305 arm_deal_with_atomic_sequence (struct frame_info *frame)
5307 if (arm_frame_is_thumb (frame))
5308 return thumb_deal_with_atomic_sequence_raw (frame);
5310 return arm_deal_with_atomic_sequence_raw (frame);
5313 /* single_step() is called just before we want to resume the inferior,
5314 if we want to single-step it but there is no hardware or kernel
5315 single-step support. We find the target of the coming instruction
5316 and breakpoint it. */
5319 arm_software_single_step (struct frame_info *frame)
5321 struct gdbarch *gdbarch = get_frame_arch (frame);
5322 struct address_space *aspace = get_frame_address_space (frame);
5325 if (arm_deal_with_atomic_sequence (frame))
5328 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5329 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5334 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5335 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5336 NULL if an error occurs. BUF is freed. */
5339 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5340 int old_len, int new_len)
5343 int bytes_to_read = new_len - old_len;
5345 new_buf = (gdb_byte *) xmalloc (new_len);
5346 memcpy (new_buf + bytes_to_read, buf, old_len);
5348 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5356 /* An IT block is at most the 2-byte IT instruction followed by
5357 four 4-byte instructions. The furthest back we must search to
5358 find an IT block that affects the current instruction is thus
5359 2 + 3 * 4 == 14 bytes. */
5360 #define MAX_IT_BLOCK_PREFIX 14
5362 /* Use a quick scan if there are more than this many bytes of
5364 #define IT_SCAN_THRESHOLD 32
5366 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5367 A breakpoint in an IT block may not be hit, depending on the
5370 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5374 CORE_ADDR boundary, func_start;
5376 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5377 int i, any, last_it, last_it_count;
5379 /* If we are using BKPT breakpoints, none of this is necessary. */
5380 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5383 /* ARM mode does not have this problem. */
5384 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5387 /* We are setting a breakpoint in Thumb code that could potentially
5388 contain an IT block. The first step is to find how much Thumb
5389 code there is; we do not need to read outside of known Thumb
5391 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5393 /* Thumb-2 code must have mapping symbols to have a chance. */
5396 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5398 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5399 && func_start > boundary)
5400 boundary = func_start;
5402 /* Search for a candidate IT instruction. We have to do some fancy
5403 footwork to distinguish a real IT instruction from the second
5404 half of a 32-bit instruction, but there is no need for that if
5405 there's no candidate. */
5406 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5408 /* No room for an IT instruction. */
5411 buf = (gdb_byte *) xmalloc (buf_len);
5412 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5415 for (i = 0; i < buf_len; i += 2)
5417 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5418 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5430 /* OK, the code bytes before this instruction contain at least one
5431 halfword which resembles an IT instruction. We know that it's
5432 Thumb code, but there are still two possibilities. Either the
5433 halfword really is an IT instruction, or it is the second half of
5434 a 32-bit Thumb instruction. The only way we can tell is to
5435 scan forwards from a known instruction boundary. */
5436 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5440 /* There's a lot of code before this instruction. Start with an
5441 optimistic search; it's easy to recognize halfwords that can
5442 not be the start of a 32-bit instruction, and use that to
5443 lock on to the instruction boundaries. */
5444 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5447 buf_len = IT_SCAN_THRESHOLD;
5450 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5452 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5453 if (thumb_insn_size (inst1) == 2)
5460 /* At this point, if DEFINITE, BUF[I] is the first place we
5461 are sure that we know the instruction boundaries, and it is far
5462 enough from BPADDR that we could not miss an IT instruction
5463 affecting BPADDR. If ! DEFINITE, give up - start from a
5467 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5471 buf_len = bpaddr - boundary;
5477 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5480 buf_len = bpaddr - boundary;
5484 /* Scan forwards. Find the last IT instruction before BPADDR. */
5489 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5491 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5496 else if (inst1 & 0x0002)
5498 else if (inst1 & 0x0004)
5503 i += thumb_insn_size (inst1);
5509 /* There wasn't really an IT instruction after all. */
5512 if (last_it_count < 1)
5513 /* It was too far away. */
5516 /* This really is a trouble spot. Move the breakpoint to the IT
5518 return bpaddr - buf_len + last_it;
5521 /* ARM displaced stepping support.
5523 Generally ARM displaced stepping works as follows:
5525 1. When an instruction is to be single-stepped, it is first decoded by
5526 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5527 Depending on the type of instruction, it is then copied to a scratch
5528 location, possibly in a modified form. The copy_* set of functions
5529 performs such modification, as necessary. A breakpoint is placed after
5530 the modified instruction in the scratch space to return control to GDB.
5531 Note in particular that instructions which modify the PC will no longer
5532 do so after modification.
5534 2. The instruction is single-stepped, by setting the PC to the scratch
5535 location address, and resuming. Control returns to GDB when the
5538 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5539 function used for the current instruction. This function's job is to
5540 put the CPU/memory state back to what it would have been if the
5541 instruction had been executed unmodified in its original location. */
5543 /* NOP instruction (mov r0, r0). */
5544 #define ARM_NOP 0xe1a00000
5545 #define THUMB_NOP 0x4600
5547 /* Helper for register reads for displaced stepping. In particular, this
5548 returns the PC as it would be seen by the instruction at its original
5552 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5556 CORE_ADDR from = dsc->insn_addr;
5558 if (regno == ARM_PC_REGNUM)
5560 /* Compute pipeline offset:
5561 - When executing an ARM instruction, PC reads as the address of the
5562 current instruction plus 8.
5563 - When executing a Thumb instruction, PC reads as the address of the
5564 current instruction plus 4. */
5571 if (debug_displaced)
5572 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5573 (unsigned long) from);
5574 return (ULONGEST) from;
5578 regcache_cooked_read_unsigned (regs, regno, &ret);
5579 if (debug_displaced)
5580 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5581 regno, (unsigned long) ret);
5587 displaced_in_arm_mode (struct regcache *regs)
5590 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5592 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5594 return (ps & t_bit) == 0;
5597 /* Write to the PC as from a branch instruction. */
5600 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5604 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5605 architecture versions < 6. */
5606 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5607 val & ~(ULONGEST) 0x3);
5609 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5610 val & ~(ULONGEST) 0x1);
5613 /* Write to the PC as from a branch-exchange instruction. */
5616 bx_write_pc (struct regcache *regs, ULONGEST val)
5619 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5621 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5625 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5626 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5628 else if ((val & 2) == 0)
5630 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5631 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5635 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5636 mode, align dest to 4 bytes). */
5637 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5638 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5639 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5643 /* Write to the PC as if from a load instruction. */
5646 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5649 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5650 bx_write_pc (regs, val);
5652 branch_write_pc (regs, dsc, val);
5655 /* Write to the PC as if from an ALU instruction. */
5658 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5661 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5662 bx_write_pc (regs, val);
5664 branch_write_pc (regs, dsc, val);
5667 /* Helper for writing to registers for displaced stepping. Writing to the PC
5668 has a varying effects depending on the instruction which does the write:
5669 this is controlled by the WRITE_PC argument. */
5672 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5673 int regno, ULONGEST val, enum pc_write_style write_pc)
5675 if (regno == ARM_PC_REGNUM)
5677 if (debug_displaced)
5678 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5679 (unsigned long) val);
5682 case BRANCH_WRITE_PC:
5683 branch_write_pc (regs, dsc, val);
5687 bx_write_pc (regs, val);
5691 load_write_pc (regs, dsc, val);
5695 alu_write_pc (regs, dsc, val);
5698 case CANNOT_WRITE_PC:
5699 warning (_("Instruction wrote to PC in an unexpected way when "
5700 "single-stepping"));
5704 internal_error (__FILE__, __LINE__,
5705 _("Invalid argument to displaced_write_reg"));
5708 dsc->wrote_to_pc = 1;
5712 if (debug_displaced)
5713 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5714 regno, (unsigned long) val);
5715 regcache_cooked_write_unsigned (regs, regno, val);
5719 /* This function is used to concisely determine if an instruction INSN
5720 references PC. Register fields of interest in INSN should have the
5721 corresponding fields of BITMASK set to 0b1111. The function
5722 returns return 1 if any of these fields in INSN reference the PC
5723 (also 0b1111, r15), else it returns 0. */
5726 insn_references_pc (uint32_t insn, uint32_t bitmask)
5728 uint32_t lowbit = 1;
5730 while (bitmask != 0)
5734 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5740 mask = lowbit * 0xf;
5742 if ((insn & mask) == mask)
5751 /* The simplest copy function. Many instructions have the same effect no
5752 matter what address they are executed at: in those cases, use this. */
5755 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5756 const char *iname, struct displaced_step_closure *dsc)
5758 if (debug_displaced)
5759 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5760 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5763 dsc->modinsn[0] = insn;
5769 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5770 uint16_t insn2, const char *iname,
5771 struct displaced_step_closure *dsc)
5773 if (debug_displaced)
5774 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5775 "opcode/class '%s' unmodified\n", insn1, insn2,
5778 dsc->modinsn[0] = insn1;
5779 dsc->modinsn[1] = insn2;
5785 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5788 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5790 struct displaced_step_closure *dsc)
5792 if (debug_displaced)
5793 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5794 "opcode/class '%s' unmodified\n", insn,
5797 dsc->modinsn[0] = insn;
5802 /* Preload instructions with immediate offset. */
5805 cleanup_preload (struct gdbarch *gdbarch,
5806 struct regcache *regs, struct displaced_step_closure *dsc)
5808 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5809 if (!dsc->u.preload.immed)
5810 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5814 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5815 struct displaced_step_closure *dsc, unsigned int rn)
5818 /* Preload instructions:
5820 {pli/pld} [rn, #+/-imm]
5822 {pli/pld} [r0, #+/-imm]. */
5824 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5825 rn_val = displaced_read_reg (regs, dsc, rn);
5826 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5827 dsc->u.preload.immed = 1;
5829 dsc->cleanup = &cleanup_preload;
5833 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5834 struct displaced_step_closure *dsc)
5836 unsigned int rn = bits (insn, 16, 19);
5838 if (!insn_references_pc (insn, 0x000f0000ul))
5839 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5841 if (debug_displaced)
5842 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5843 (unsigned long) insn);
5845 dsc->modinsn[0] = insn & 0xfff0ffff;
5847 install_preload (gdbarch, regs, dsc, rn);
5853 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5854 struct regcache *regs, struct displaced_step_closure *dsc)
5856 unsigned int rn = bits (insn1, 0, 3);
5857 unsigned int u_bit = bit (insn1, 7);
5858 int imm12 = bits (insn2, 0, 11);
5861 if (rn != ARM_PC_REGNUM)
5862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5864 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5865 PLD (literal) Encoding T1. */
5866 if (debug_displaced)
5867 fprintf_unfiltered (gdb_stdlog,
5868 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5869 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5875 /* Rewrite instruction {pli/pld} PC imm12 into:
5876 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5880 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5882 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5883 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5885 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5887 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5888 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5889 dsc->u.preload.immed = 0;
5891 /* {pli/pld} [r0, r1] */
5892 dsc->modinsn[0] = insn1 & 0xfff0;
5893 dsc->modinsn[1] = 0xf001;
5896 dsc->cleanup = &cleanup_preload;
5900 /* Preload instructions with register offset. */
5903 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5904 struct displaced_step_closure *dsc, unsigned int rn,
5907 ULONGEST rn_val, rm_val;
5909 /* Preload register-offset instructions:
5911 {pli/pld} [rn, rm {, shift}]
5913 {pli/pld} [r0, r1 {, shift}]. */
5915 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5916 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5917 rn_val = displaced_read_reg (regs, dsc, rn);
5918 rm_val = displaced_read_reg (regs, dsc, rm);
5919 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5920 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5921 dsc->u.preload.immed = 0;
5923 dsc->cleanup = &cleanup_preload;
5927 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5928 struct regcache *regs,
5929 struct displaced_step_closure *dsc)
5931 unsigned int rn = bits (insn, 16, 19);
5932 unsigned int rm = bits (insn, 0, 3);
5935 if (!insn_references_pc (insn, 0x000f000ful))
5936 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5938 if (debug_displaced)
5939 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5940 (unsigned long) insn);
5942 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5944 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5948 /* Copy/cleanup coprocessor load and store instructions. */
5951 cleanup_copro_load_store (struct gdbarch *gdbarch,
5952 struct regcache *regs,
5953 struct displaced_step_closure *dsc)
5955 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5957 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5959 if (dsc->u.ldst.writeback)
5960 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5964 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5965 struct displaced_step_closure *dsc,
5966 int writeback, unsigned int rn)
5970 /* Coprocessor load/store instructions:
5972 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5974 {stc/stc2} [r0, #+/-imm].
5976 ldc/ldc2 are handled identically. */
5978 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5979 rn_val = displaced_read_reg (regs, dsc, rn);
5980 /* PC should be 4-byte aligned. */
5981 rn_val = rn_val & 0xfffffffc;
5982 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5984 dsc->u.ldst.writeback = writeback;
5985 dsc->u.ldst.rn = rn;
5987 dsc->cleanup = &cleanup_copro_load_store;
5991 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5992 struct regcache *regs,
5993 struct displaced_step_closure *dsc)
5995 unsigned int rn = bits (insn, 16, 19);
5997 if (!insn_references_pc (insn, 0x000f0000ul))
5998 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6000 if (debug_displaced)
6001 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6002 "load/store insn %.8lx\n", (unsigned long) insn);
6004 dsc->modinsn[0] = insn & 0xfff0ffff;
6006 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6012 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6013 uint16_t insn2, struct regcache *regs,
6014 struct displaced_step_closure *dsc)
6016 unsigned int rn = bits (insn1, 0, 3);
6018 if (rn != ARM_PC_REGNUM)
6019 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6020 "copro load/store", dsc);
6022 if (debug_displaced)
6023 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6024 "load/store insn %.4x%.4x\n", insn1, insn2);
6026 dsc->modinsn[0] = insn1 & 0xfff0;
6027 dsc->modinsn[1] = insn2;
6030 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6031 doesn't support writeback, so pass 0. */
6032 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6037 /* Clean up branch instructions (actually perform the branch, by setting
6041 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6042 struct displaced_step_closure *dsc)
6044 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6045 int branch_taken = condition_true (dsc->u.branch.cond, status);
6046 enum pc_write_style write_pc = dsc->u.branch.exchange
6047 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6052 if (dsc->u.branch.link)
6054 /* The value of LR should be the next insn of current one. In order
6055 not to confuse logic hanlding later insn `bx lr', if current insn mode
6056 is Thumb, the bit 0 of LR value should be set to 1. */
6057 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6060 next_insn_addr |= 0x1;
6062 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6066 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6069 /* Copy B/BL/BLX instructions with immediate destinations. */
6072 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6073 struct displaced_step_closure *dsc,
6074 unsigned int cond, int exchange, int link, long offset)
6076 /* Implement "BL<cond> <label>" as:
6078 Preparation: cond <- instruction condition
6079 Insn: mov r0, r0 (nop)
6080 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6082 B<cond> similar, but don't set r14 in cleanup. */
6084 dsc->u.branch.cond = cond;
6085 dsc->u.branch.link = link;
6086 dsc->u.branch.exchange = exchange;
6088 dsc->u.branch.dest = dsc->insn_addr;
6089 if (link && exchange)
6090 /* For BLX, offset is computed from the Align (PC, 4). */
6091 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6094 dsc->u.branch.dest += 4 + offset;
6096 dsc->u.branch.dest += 8 + offset;
6098 dsc->cleanup = &cleanup_branch;
6101 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6102 struct regcache *regs, struct displaced_step_closure *dsc)
6104 unsigned int cond = bits (insn, 28, 31);
6105 int exchange = (cond == 0xf);
6106 int link = exchange || bit (insn, 24);
6109 if (debug_displaced)
6110 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6111 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6112 (unsigned long) insn);
6114 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6115 then arrange the switch into Thumb mode. */
6116 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6118 offset = bits (insn, 0, 23) << 2;
6120 if (bit (offset, 25))
6121 offset = offset | ~0x3ffffff;
6123 dsc->modinsn[0] = ARM_NOP;
6125 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6130 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6131 uint16_t insn2, struct regcache *regs,
6132 struct displaced_step_closure *dsc)
6134 int link = bit (insn2, 14);
6135 int exchange = link && !bit (insn2, 12);
6138 int j1 = bit (insn2, 13);
6139 int j2 = bit (insn2, 11);
6140 int s = sbits (insn1, 10, 10);
6141 int i1 = !(j1 ^ bit (insn1, 10));
6142 int i2 = !(j2 ^ bit (insn1, 10));
6144 if (!link && !exchange) /* B */
6146 offset = (bits (insn2, 0, 10) << 1);
6147 if (bit (insn2, 12)) /* Encoding T4 */
6149 offset |= (bits (insn1, 0, 9) << 12)
6155 else /* Encoding T3 */
6157 offset |= (bits (insn1, 0, 5) << 12)
6161 cond = bits (insn1, 6, 9);
6166 offset = (bits (insn1, 0, 9) << 12);
6167 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6168 offset |= exchange ?
6169 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6172 if (debug_displaced)
6173 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6174 "%.4x %.4x with offset %.8lx\n",
6175 link ? (exchange) ? "blx" : "bl" : "b",
6176 insn1, insn2, offset);
6178 dsc->modinsn[0] = THUMB_NOP;
6180 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6184 /* Copy B Thumb instructions. */
6186 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6187 struct displaced_step_closure *dsc)
6189 unsigned int cond = 0;
6191 unsigned short bit_12_15 = bits (insn, 12, 15);
6192 CORE_ADDR from = dsc->insn_addr;
6194 if (bit_12_15 == 0xd)
6196 /* offset = SignExtend (imm8:0, 32) */
6197 offset = sbits ((insn << 1), 0, 8);
6198 cond = bits (insn, 8, 11);
6200 else if (bit_12_15 == 0xe) /* Encoding T2 */
6202 offset = sbits ((insn << 1), 0, 11);
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog,
6208 "displaced: copying b immediate insn %.4x "
6209 "with offset %d\n", insn, offset);
6211 dsc->u.branch.cond = cond;
6212 dsc->u.branch.link = 0;
6213 dsc->u.branch.exchange = 0;
6214 dsc->u.branch.dest = from + 4 + offset;
6216 dsc->modinsn[0] = THUMB_NOP;
6218 dsc->cleanup = &cleanup_branch;
6223 /* Copy BX/BLX with register-specified destinations. */
6226 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6227 struct displaced_step_closure *dsc, int link,
6228 unsigned int cond, unsigned int rm)
6230 /* Implement {BX,BLX}<cond> <reg>" as:
6232 Preparation: cond <- instruction condition
6233 Insn: mov r0, r0 (nop)
6234 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6236 Don't set r14 in cleanup for BX. */
6238 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6240 dsc->u.branch.cond = cond;
6241 dsc->u.branch.link = link;
6243 dsc->u.branch.exchange = 1;
6245 dsc->cleanup = &cleanup_branch;
6249 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6250 struct regcache *regs, struct displaced_step_closure *dsc)
6252 unsigned int cond = bits (insn, 28, 31);
6255 int link = bit (insn, 5);
6256 unsigned int rm = bits (insn, 0, 3);
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6260 (unsigned long) insn);
6262 dsc->modinsn[0] = ARM_NOP;
6264 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6269 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6270 struct regcache *regs,
6271 struct displaced_step_closure *dsc)
6273 int link = bit (insn, 7);
6274 unsigned int rm = bits (insn, 3, 6);
6276 if (debug_displaced)
6277 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6278 (unsigned short) insn);
6280 dsc->modinsn[0] = THUMB_NOP;
6282 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6288 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6291 cleanup_alu_imm (struct gdbarch *gdbarch,
6292 struct regcache *regs, struct displaced_step_closure *dsc)
6294 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6295 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6296 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6297 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6301 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6302 struct displaced_step_closure *dsc)
6304 unsigned int rn = bits (insn, 16, 19);
6305 unsigned int rd = bits (insn, 12, 15);
6306 unsigned int op = bits (insn, 21, 24);
6307 int is_mov = (op == 0xd);
6308 ULONGEST rd_val, rn_val;
6310 if (!insn_references_pc (insn, 0x000ff000ul))
6311 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6313 if (debug_displaced)
6314 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6315 "%.8lx\n", is_mov ? "move" : "ALU",
6316 (unsigned long) insn);
6318 /* Instruction is of form:
6320 <op><cond> rd, [rn,] #imm
6324 Preparation: tmp1, tmp2 <- r0, r1;
6326 Insn: <op><cond> r0, r1, #imm
6327 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6330 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6331 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6332 rn_val = displaced_read_reg (regs, dsc, rn);
6333 rd_val = displaced_read_reg (regs, dsc, rd);
6334 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6335 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6339 dsc->modinsn[0] = insn & 0xfff00fff;
6341 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6343 dsc->cleanup = &cleanup_alu_imm;
6349 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6350 uint16_t insn2, struct regcache *regs,
6351 struct displaced_step_closure *dsc)
6353 unsigned int op = bits (insn1, 5, 8);
6354 unsigned int rn, rm, rd;
6355 ULONGEST rd_val, rn_val;
6357 rn = bits (insn1, 0, 3); /* Rn */
6358 rm = bits (insn2, 0, 3); /* Rm */
6359 rd = bits (insn2, 8, 11); /* Rd */
6361 /* This routine is only called for instruction MOV. */
6362 gdb_assert (op == 0x2 && rn == 0xf);
6364 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6365 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6367 if (debug_displaced)
6368 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6369 "ALU", insn1, insn2);
6371 /* Instruction is of form:
6373 <op><cond> rd, [rn,] #imm
6377 Preparation: tmp1, tmp2 <- r0, r1;
6379 Insn: <op><cond> r0, r1, #imm
6380 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6383 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6384 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6385 rn_val = displaced_read_reg (regs, dsc, rn);
6386 rd_val = displaced_read_reg (regs, dsc, rd);
6387 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6388 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6391 dsc->modinsn[0] = insn1;
6392 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6395 dsc->cleanup = &cleanup_alu_imm;
6400 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6403 cleanup_alu_reg (struct gdbarch *gdbarch,
6404 struct regcache *regs, struct displaced_step_closure *dsc)
6409 rd_val = displaced_read_reg (regs, dsc, 0);
6411 for (i = 0; i < 3; i++)
6412 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6414 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6418 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6419 struct displaced_step_closure *dsc,
6420 unsigned int rd, unsigned int rn, unsigned int rm)
6422 ULONGEST rd_val, rn_val, rm_val;
6424 /* Instruction is of form:
6426 <op><cond> rd, [rn,] rm [, <shift>]
6430 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6431 r0, r1, r2 <- rd, rn, rm
6432 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6433 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6436 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6437 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6438 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6439 rd_val = displaced_read_reg (regs, dsc, rd);
6440 rn_val = displaced_read_reg (regs, dsc, rn);
6441 rm_val = displaced_read_reg (regs, dsc, rm);
6442 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6443 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6444 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6447 dsc->cleanup = &cleanup_alu_reg;
6451 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6452 struct displaced_step_closure *dsc)
6454 unsigned int op = bits (insn, 21, 24);
6455 int is_mov = (op == 0xd);
6457 if (!insn_references_pc (insn, 0x000ff00ful))
6458 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6460 if (debug_displaced)
6461 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6462 is_mov ? "move" : "ALU", (unsigned long) insn);
6465 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6467 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6469 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6475 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6476 struct regcache *regs,
6477 struct displaced_step_closure *dsc)
6481 rm = bits (insn, 3, 6);
6482 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6484 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6485 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6487 if (debug_displaced)
6488 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6489 (unsigned short) insn);
6491 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6493 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6498 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6501 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6502 struct regcache *regs,
6503 struct displaced_step_closure *dsc)
6505 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6508 for (i = 0; i < 4; i++)
6509 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6511 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6515 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6516 struct displaced_step_closure *dsc,
6517 unsigned int rd, unsigned int rn, unsigned int rm,
6521 ULONGEST rd_val, rn_val, rm_val, rs_val;
6523 /* Instruction is of form:
6525 <op><cond> rd, [rn,] rm, <shift> rs
6529 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6530 r0, r1, r2, r3 <- rd, rn, rm, rs
6531 Insn: <op><cond> r0, r1, r2, <shift> r3
6533 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6537 for (i = 0; i < 4; i++)
6538 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6540 rd_val = displaced_read_reg (regs, dsc, rd);
6541 rn_val = displaced_read_reg (regs, dsc, rn);
6542 rm_val = displaced_read_reg (regs, dsc, rm);
6543 rs_val = displaced_read_reg (regs, dsc, rs);
6544 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6545 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6546 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6547 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6549 dsc->cleanup = &cleanup_alu_shifted_reg;
6553 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6554 struct regcache *regs,
6555 struct displaced_step_closure *dsc)
6557 unsigned int op = bits (insn, 21, 24);
6558 int is_mov = (op == 0xd);
6559 unsigned int rd, rn, rm, rs;
6561 if (!insn_references_pc (insn, 0x000fff0ful))
6562 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6564 if (debug_displaced)
6565 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6566 "%.8lx\n", is_mov ? "move" : "ALU",
6567 (unsigned long) insn);
6569 rn = bits (insn, 16, 19);
6570 rm = bits (insn, 0, 3);
6571 rs = bits (insn, 8, 11);
6572 rd = bits (insn, 12, 15);
6575 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6577 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6579 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6584 /* Clean up load instructions. */
6587 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6588 struct displaced_step_closure *dsc)
6590 ULONGEST rt_val, rt_val2 = 0, rn_val;
6592 rt_val = displaced_read_reg (regs, dsc, 0);
6593 if (dsc->u.ldst.xfersize == 8)
6594 rt_val2 = displaced_read_reg (regs, dsc, 1);
6595 rn_val = displaced_read_reg (regs, dsc, 2);
6597 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6598 if (dsc->u.ldst.xfersize > 4)
6599 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6600 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6601 if (!dsc->u.ldst.immed)
6602 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6604 /* Handle register writeback. */
6605 if (dsc->u.ldst.writeback)
6606 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6607 /* Put result in right place. */
6608 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6609 if (dsc->u.ldst.xfersize == 8)
6610 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6613 /* Clean up store instructions. */
6616 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6617 struct displaced_step_closure *dsc)
6619 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6621 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6622 if (dsc->u.ldst.xfersize > 4)
6623 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6624 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6625 if (!dsc->u.ldst.immed)
6626 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6627 if (!dsc->u.ldst.restore_r4)
6628 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6631 if (dsc->u.ldst.writeback)
6632 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6635 /* Copy "extra" load/store instructions. These are halfword/doubleword
6636 transfers, which have a different encoding to byte/word transfers. */
6639 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6640 struct regcache *regs, struct displaced_step_closure *dsc)
6642 unsigned int op1 = bits (insn, 20, 24);
6643 unsigned int op2 = bits (insn, 5, 6);
6644 unsigned int rt = bits (insn, 12, 15);
6645 unsigned int rn = bits (insn, 16, 19);
6646 unsigned int rm = bits (insn, 0, 3);
6647 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6648 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6649 int immed = (op1 & 0x4) != 0;
6651 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6653 if (!insn_references_pc (insn, 0x000ff00ful))
6654 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6656 if (debug_displaced)
6657 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6658 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6659 (unsigned long) insn);
6661 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6664 internal_error (__FILE__, __LINE__,
6665 _("copy_extra_ld_st: instruction decode error"));
6667 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6668 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6669 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6671 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6673 rt_val = displaced_read_reg (regs, dsc, rt);
6674 if (bytesize[opcode] == 8)
6675 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6676 rn_val = displaced_read_reg (regs, dsc, rn);
6678 rm_val = displaced_read_reg (regs, dsc, rm);
6680 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6681 if (bytesize[opcode] == 8)
6682 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6683 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6685 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6688 dsc->u.ldst.xfersize = bytesize[opcode];
6689 dsc->u.ldst.rn = rn;
6690 dsc->u.ldst.immed = immed;
6691 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6692 dsc->u.ldst.restore_r4 = 0;
6695 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6697 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6698 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6700 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6702 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6703 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6705 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6710 /* Copy byte/half word/word loads and stores. */
6713 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6714 struct displaced_step_closure *dsc, int load,
6715 int immed, int writeback, int size, int usermode,
6716 int rt, int rm, int rn)
6718 ULONGEST rt_val, rn_val, rm_val = 0;
6720 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6721 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6723 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6725 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6727 rt_val = displaced_read_reg (regs, dsc, rt);
6728 rn_val = displaced_read_reg (regs, dsc, rn);
6730 rm_val = displaced_read_reg (regs, dsc, rm);
6732 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6733 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6735 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6737 dsc->u.ldst.xfersize = size;
6738 dsc->u.ldst.rn = rn;
6739 dsc->u.ldst.immed = immed;
6740 dsc->u.ldst.writeback = writeback;
6742 /* To write PC we can do:
6744 Before this sequence of instructions:
6745 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6746 r2 is the Rn value got from dispalced_read_reg.
6748 Insn1: push {pc} Write address of STR instruction + offset on stack
6749 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6750 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6751 = addr(Insn1) + offset - addr(Insn3) - 8
6753 Insn4: add r4, r4, #8 r4 = offset - 8
6754 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6756 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6758 Otherwise we don't know what value to write for PC, since the offset is
6759 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6760 of this can be found in Section "Saving from r15" in
6761 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6763 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6768 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6769 uint16_t insn2, struct regcache *regs,
6770 struct displaced_step_closure *dsc, int size)
6772 unsigned int u_bit = bit (insn1, 7);
6773 unsigned int rt = bits (insn2, 12, 15);
6774 int imm12 = bits (insn2, 0, 11);
6777 if (debug_displaced)
6778 fprintf_unfiltered (gdb_stdlog,
6779 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6780 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6786 /* Rewrite instruction LDR Rt imm12 into:
6788 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6792 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6795 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6796 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6797 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6799 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6801 pc_val = pc_val & 0xfffffffc;
6803 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6804 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6808 dsc->u.ldst.xfersize = size;
6809 dsc->u.ldst.immed = 0;
6810 dsc->u.ldst.writeback = 0;
6811 dsc->u.ldst.restore_r4 = 0;
6813 /* LDR R0, R2, R3 */
6814 dsc->modinsn[0] = 0xf852;
6815 dsc->modinsn[1] = 0x3;
6818 dsc->cleanup = &cleanup_load;
6824 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6825 uint16_t insn2, struct regcache *regs,
6826 struct displaced_step_closure *dsc,
6827 int writeback, int immed)
6829 unsigned int rt = bits (insn2, 12, 15);
6830 unsigned int rn = bits (insn1, 0, 3);
6831 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6832 /* In LDR (register), there is also a register Rm, which is not allowed to
6833 be PC, so we don't have to check it. */
6835 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6836 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6839 if (debug_displaced)
6840 fprintf_unfiltered (gdb_stdlog,
6841 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6842 rt, rn, insn1, insn2);
6844 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6847 dsc->u.ldst.restore_r4 = 0;
6850 /* ldr[b]<cond> rt, [rn, #imm], etc.
6852 ldr[b]<cond> r0, [r2, #imm]. */
6854 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6855 dsc->modinsn[1] = insn2 & 0x0fff;
6858 /* ldr[b]<cond> rt, [rn, rm], etc.
6860 ldr[b]<cond> r0, [r2, r3]. */
6862 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6863 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6873 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6874 struct regcache *regs,
6875 struct displaced_step_closure *dsc,
6876 int load, int size, int usermode)
6878 int immed = !bit (insn, 25);
6879 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6880 unsigned int rt = bits (insn, 12, 15);
6881 unsigned int rn = bits (insn, 16, 19);
6882 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6884 if (!insn_references_pc (insn, 0x000ff00ful))
6885 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6887 if (debug_displaced)
6888 fprintf_unfiltered (gdb_stdlog,
6889 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6890 load ? (size == 1 ? "ldrb" : "ldr")
6891 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6893 (unsigned long) insn);
6895 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6896 usermode, rt, rm, rn);
6898 if (load || rt != ARM_PC_REGNUM)
6900 dsc->u.ldst.restore_r4 = 0;
6903 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6905 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6906 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6908 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6910 {ldr,str}[b]<cond> r0, [r2, r3]. */
6911 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6915 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6916 dsc->u.ldst.restore_r4 = 1;
6917 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6918 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6919 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6920 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6921 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6925 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6927 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6932 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6937 /* Cleanup LDM instructions with fully-populated register list. This is an
6938 unfortunate corner case: it's impossible to implement correctly by modifying
6939 the instruction. The issue is as follows: we have an instruction,
6943 which we must rewrite to avoid loading PC. A possible solution would be to
6944 do the load in two halves, something like (with suitable cleanup
6948 ldm[id][ab] r8!, {r0-r7}
6950 ldm[id][ab] r8, {r7-r14}
6953 but at present there's no suitable place for <temp>, since the scratch space
6954 is overwritten before the cleanup routine is called. For now, we simply
6955 emulate the instruction. */
6958 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6959 struct displaced_step_closure *dsc)
6961 int inc = dsc->u.block.increment;
6962 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6963 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6964 uint32_t regmask = dsc->u.block.regmask;
6965 int regno = inc ? 0 : 15;
6966 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6967 int exception_return = dsc->u.block.load && dsc->u.block.user
6968 && (regmask & 0x8000) != 0;
6969 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6970 int do_transfer = condition_true (dsc->u.block.cond, status);
6971 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6976 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6977 sensible we can do here. Complain loudly. */
6978 if (exception_return)
6979 error (_("Cannot single-step exception return"));
6981 /* We don't handle any stores here for now. */
6982 gdb_assert (dsc->u.block.load != 0);
6984 if (debug_displaced)
6985 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6986 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6987 dsc->u.block.increment ? "inc" : "dec",
6988 dsc->u.block.before ? "before" : "after");
6995 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6998 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7001 xfer_addr += bump_before;
7003 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7004 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7006 xfer_addr += bump_after;
7008 regmask &= ~(1 << regno);
7011 if (dsc->u.block.writeback)
7012 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7016 /* Clean up an STM which included the PC in the register list. */
7019 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7020 struct displaced_step_closure *dsc)
7022 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7023 int store_executed = condition_true (dsc->u.block.cond, status);
7024 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7025 CORE_ADDR stm_insn_addr;
7028 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7030 /* If condition code fails, there's nothing else to do. */
7031 if (!store_executed)
7034 if (dsc->u.block.increment)
7036 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7038 if (dsc->u.block.before)
7043 pc_stored_at = dsc->u.block.xfer_addr;
7045 if (dsc->u.block.before)
7049 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7050 stm_insn_addr = dsc->scratch_base;
7051 offset = pc_val - stm_insn_addr;
7053 if (debug_displaced)
7054 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7055 "STM instruction\n", offset);
7057 /* Rewrite the stored PC to the proper value for the non-displaced original
7059 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7060 dsc->insn_addr + offset);
7063 /* Clean up an LDM which includes the PC in the register list. We clumped all
7064 the registers in the transferred list into a contiguous range r0...rX (to
7065 avoid loading PC directly and losing control of the debugged program), so we
7066 must undo that here. */
7069 cleanup_block_load_pc (struct gdbarch *gdbarch,
7070 struct regcache *regs,
7071 struct displaced_step_closure *dsc)
7073 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7074 int load_executed = condition_true (dsc->u.block.cond, status);
7075 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7076 unsigned int regs_loaded = bitcount (mask);
7077 unsigned int num_to_shuffle = regs_loaded, clobbered;
7079 /* The method employed here will fail if the register list is fully populated
7080 (we need to avoid loading PC directly). */
7081 gdb_assert (num_to_shuffle < 16);
7086 clobbered = (1 << num_to_shuffle) - 1;
7088 while (num_to_shuffle > 0)
7090 if ((mask & (1 << write_reg)) != 0)
7092 unsigned int read_reg = num_to_shuffle - 1;
7094 if (read_reg != write_reg)
7096 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7097 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7098 if (debug_displaced)
7099 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7100 "loaded register r%d to r%d\n"), read_reg,
7103 else if (debug_displaced)
7104 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7105 "r%d already in the right place\n"),
7108 clobbered &= ~(1 << write_reg);
7116 /* Restore any registers we scribbled over. */
7117 for (write_reg = 0; clobbered != 0; write_reg++)
7119 if ((clobbered & (1 << write_reg)) != 0)
7121 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7123 if (debug_displaced)
7124 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7125 "clobbered register r%d\n"), write_reg);
7126 clobbered &= ~(1 << write_reg);
7130 /* Perform register writeback manually. */
7131 if (dsc->u.block.writeback)
7133 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7135 if (dsc->u.block.increment)
7136 new_rn_val += regs_loaded * 4;
7138 new_rn_val -= regs_loaded * 4;
7140 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7145 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7146 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7149 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7150 struct regcache *regs,
7151 struct displaced_step_closure *dsc)
7153 int load = bit (insn, 20);
7154 int user = bit (insn, 22);
7155 int increment = bit (insn, 23);
7156 int before = bit (insn, 24);
7157 int writeback = bit (insn, 21);
7158 int rn = bits (insn, 16, 19);
7160 /* Block transfers which don't mention PC can be run directly
7162 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7163 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7165 if (rn == ARM_PC_REGNUM)
7167 warning (_("displaced: Unpredictable LDM or STM with "
7168 "base register r15"));
7169 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7172 if (debug_displaced)
7173 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7174 "%.8lx\n", (unsigned long) insn);
7176 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7177 dsc->u.block.rn = rn;
7179 dsc->u.block.load = load;
7180 dsc->u.block.user = user;
7181 dsc->u.block.increment = increment;
7182 dsc->u.block.before = before;
7183 dsc->u.block.writeback = writeback;
7184 dsc->u.block.cond = bits (insn, 28, 31);
7186 dsc->u.block.regmask = insn & 0xffff;
7190 if ((insn & 0xffff) == 0xffff)
7192 /* LDM with a fully-populated register list. This case is
7193 particularly tricky. Implement for now by fully emulating the
7194 instruction (which might not behave perfectly in all cases, but
7195 these instructions should be rare enough for that not to matter
7197 dsc->modinsn[0] = ARM_NOP;
7199 dsc->cleanup = &cleanup_block_load_all;
7203 /* LDM of a list of registers which includes PC. Implement by
7204 rewriting the list of registers to be transferred into a
7205 contiguous chunk r0...rX before doing the transfer, then shuffling
7206 registers into the correct places in the cleanup routine. */
7207 unsigned int regmask = insn & 0xffff;
7208 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7209 unsigned int to = 0, from = 0, i, new_rn;
7211 for (i = 0; i < num_in_list; i++)
7212 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7214 /* Writeback makes things complicated. We need to avoid clobbering
7215 the base register with one of the registers in our modified
7216 register list, but just using a different register can't work in
7219 ldm r14!, {r0-r13,pc}
7221 which would need to be rewritten as:
7225 but that can't work, because there's no free register for N.
7227 Solve this by turning off the writeback bit, and emulating
7228 writeback manually in the cleanup routine. */
7233 new_regmask = (1 << num_in_list) - 1;
7235 if (debug_displaced)
7236 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7237 "{..., pc}: original reg list %.4x, modified "
7238 "list %.4x\n"), rn, writeback ? "!" : "",
7239 (int) insn & 0xffff, new_regmask);
7241 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7243 dsc->cleanup = &cleanup_block_load_pc;
7248 /* STM of a list of registers which includes PC. Run the instruction
7249 as-is, but out of line: this will store the wrong value for the PC,
7250 so we must manually fix up the memory in the cleanup routine.
7251 Doing things this way has the advantage that we can auto-detect
7252 the offset of the PC write (which is architecture-dependent) in
7253 the cleanup routine. */
7254 dsc->modinsn[0] = insn;
7256 dsc->cleanup = &cleanup_block_store_pc;
7263 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7264 struct regcache *regs,
7265 struct displaced_step_closure *dsc)
7267 int rn = bits (insn1, 0, 3);
7268 int load = bit (insn1, 4);
7269 int writeback = bit (insn1, 5);
7271 /* Block transfers which don't mention PC can be run directly
7273 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7274 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7276 if (rn == ARM_PC_REGNUM)
7278 warning (_("displaced: Unpredictable LDM or STM with "
7279 "base register r15"));
7280 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7281 "unpredictable ldm/stm", dsc);
7284 if (debug_displaced)
7285 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7286 "%.4x%.4x\n", insn1, insn2);
7288 /* Clear bit 13, since it should be always zero. */
7289 dsc->u.block.regmask = (insn2 & 0xdfff);
7290 dsc->u.block.rn = rn;
7292 dsc->u.block.load = load;
7293 dsc->u.block.user = 0;
7294 dsc->u.block.increment = bit (insn1, 7);
7295 dsc->u.block.before = bit (insn1, 8);
7296 dsc->u.block.writeback = writeback;
7297 dsc->u.block.cond = INST_AL;
7298 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7302 if (dsc->u.block.regmask == 0xffff)
7304 /* This branch is impossible to happen. */
7309 unsigned int regmask = dsc->u.block.regmask;
7310 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7311 unsigned int to = 0, from = 0, i, new_rn;
7313 for (i = 0; i < num_in_list; i++)
7314 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7319 new_regmask = (1 << num_in_list) - 1;
7321 if (debug_displaced)
7322 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7323 "{..., pc}: original reg list %.4x, modified "
7324 "list %.4x\n"), rn, writeback ? "!" : "",
7325 (int) dsc->u.block.regmask, new_regmask);
7327 dsc->modinsn[0] = insn1;
7328 dsc->modinsn[1] = (new_regmask & 0xffff);
7331 dsc->cleanup = &cleanup_block_load_pc;
7336 dsc->modinsn[0] = insn1;
7337 dsc->modinsn[1] = insn2;
7339 dsc->cleanup = &cleanup_block_store_pc;
7344 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7345 for Linux, where some SVC instructions must be treated specially. */
7348 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7349 struct displaced_step_closure *dsc)
7351 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7353 if (debug_displaced)
7354 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7355 "%.8lx\n", (unsigned long) resume_addr);
7357 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7361 /* Common copy routine for svc instruciton. */
7364 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7365 struct displaced_step_closure *dsc)
7367 /* Preparation: none.
7368 Insn: unmodified svc.
7369 Cleanup: pc <- insn_addr + insn_size. */
7371 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7373 dsc->wrote_to_pc = 1;
7375 /* Allow OS-specific code to override SVC handling. */
7376 if (dsc->u.svc.copy_svc_os)
7377 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7380 dsc->cleanup = &cleanup_svc;
7386 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7387 struct regcache *regs, struct displaced_step_closure *dsc)
7390 if (debug_displaced)
7391 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7392 (unsigned long) insn);
7394 dsc->modinsn[0] = insn;
7396 return install_svc (gdbarch, regs, dsc);
7400 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7401 struct regcache *regs, struct displaced_step_closure *dsc)
7404 if (debug_displaced)
7405 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7408 dsc->modinsn[0] = insn;
7410 return install_svc (gdbarch, regs, dsc);
7413 /* Copy undefined instructions. */
7416 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7417 struct displaced_step_closure *dsc)
7419 if (debug_displaced)
7420 fprintf_unfiltered (gdb_stdlog,
7421 "displaced: copying undefined insn %.8lx\n",
7422 (unsigned long) insn);
7424 dsc->modinsn[0] = insn;
7430 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7431 struct displaced_step_closure *dsc)
7434 if (debug_displaced)
7435 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7436 "%.4x %.4x\n", (unsigned short) insn1,
7437 (unsigned short) insn2);
7439 dsc->modinsn[0] = insn1;
7440 dsc->modinsn[1] = insn2;
7446 /* Copy unpredictable instructions. */
7449 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7450 struct displaced_step_closure *dsc)
7452 if (debug_displaced)
7453 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7454 "%.8lx\n", (unsigned long) insn);
7456 dsc->modinsn[0] = insn;
7461 /* The decode_* functions are instruction decoding helpers. They mostly follow
7462 the presentation in the ARM ARM. */
7465 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7466 struct regcache *regs,
7467 struct displaced_step_closure *dsc)
7469 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7470 unsigned int rn = bits (insn, 16, 19);
7472 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7473 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7474 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7475 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7476 else if ((op1 & 0x60) == 0x20)
7477 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7478 else if ((op1 & 0x71) == 0x40)
7479 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7481 else if ((op1 & 0x77) == 0x41)
7482 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7483 else if ((op1 & 0x77) == 0x45)
7484 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7485 else if ((op1 & 0x77) == 0x51)
7488 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7490 return arm_copy_unpred (gdbarch, insn, dsc);
7492 else if ((op1 & 0x77) == 0x55)
7493 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7494 else if (op1 == 0x57)
7497 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7498 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7499 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7500 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7501 default: return arm_copy_unpred (gdbarch, insn, dsc);
7503 else if ((op1 & 0x63) == 0x43)
7504 return arm_copy_unpred (gdbarch, insn, dsc);
7505 else if ((op2 & 0x1) == 0x0)
7506 switch (op1 & ~0x80)
7509 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7511 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7512 case 0x71: case 0x75:
7514 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7515 case 0x63: case 0x67: case 0x73: case 0x77:
7516 return arm_copy_unpred (gdbarch, insn, dsc);
7518 return arm_copy_undef (gdbarch, insn, dsc);
7521 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7525 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7526 struct regcache *regs,
7527 struct displaced_step_closure *dsc)
7529 if (bit (insn, 27) == 0)
7530 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7531 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7532 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7535 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7538 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7540 case 0x4: case 0x5: case 0x6: case 0x7:
7541 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7544 switch ((insn & 0xe00000) >> 21)
7546 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7548 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7551 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7554 return arm_copy_undef (gdbarch, insn, dsc);
7559 int rn_f = (bits (insn, 16, 19) == 0xf);
7560 switch ((insn & 0xe00000) >> 21)
7563 /* ldc/ldc2 imm (undefined for rn == pc). */
7564 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7565 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7568 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7570 case 0x4: case 0x5: case 0x6: case 0x7:
7571 /* ldc/ldc2 lit (undefined for rn != pc). */
7572 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7573 : arm_copy_undef (gdbarch, insn, dsc);
7576 return arm_copy_undef (gdbarch, insn, dsc);
7581 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7584 if (bits (insn, 16, 19) == 0xf)
7586 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7588 return arm_copy_undef (gdbarch, insn, dsc);
7592 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7594 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7598 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7600 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7603 return arm_copy_undef (gdbarch, insn, dsc);
7607 /* Decode miscellaneous instructions in dp/misc encoding space. */
7610 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7611 struct regcache *regs,
7612 struct displaced_step_closure *dsc)
7614 unsigned int op2 = bits (insn, 4, 6);
7615 unsigned int op = bits (insn, 21, 22);
7616 unsigned int op1 = bits (insn, 16, 19);
7621 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7624 if (op == 0x1) /* bx. */
7625 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7627 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7629 return arm_copy_undef (gdbarch, insn, dsc);
7633 /* Not really supported. */
7634 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7636 return arm_copy_undef (gdbarch, insn, dsc);
7640 return arm_copy_bx_blx_reg (gdbarch, insn,
7641 regs, dsc); /* blx register. */
7643 return arm_copy_undef (gdbarch, insn, dsc);
7646 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7650 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7652 /* Not really supported. */
7653 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7656 return arm_copy_undef (gdbarch, insn, dsc);
7661 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7662 struct regcache *regs,
7663 struct displaced_step_closure *dsc)
7666 switch (bits (insn, 20, 24))
7669 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7672 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7674 case 0x12: case 0x16:
7675 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7678 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7682 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7684 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7685 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7686 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7687 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7688 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7689 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7690 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7691 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7692 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7693 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7694 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7695 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7696 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7697 /* 2nd arg means "unpriveleged". */
7698 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7702 /* Should be unreachable. */
7707 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7708 struct regcache *regs,
7709 struct displaced_step_closure *dsc)
7711 int a = bit (insn, 25), b = bit (insn, 4);
7712 uint32_t op1 = bits (insn, 20, 24);
7713 int rn_f = bits (insn, 16, 19) == 0xf;
7715 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7716 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7717 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7718 else if ((!a && (op1 & 0x17) == 0x02)
7719 || (a && (op1 & 0x17) == 0x02 && !b))
7720 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7721 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7722 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7723 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7724 else if ((!a && (op1 & 0x17) == 0x03)
7725 || (a && (op1 & 0x17) == 0x03 && !b))
7726 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7727 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7728 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7729 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7730 else if ((!a && (op1 & 0x17) == 0x06)
7731 || (a && (op1 & 0x17) == 0x06 && !b))
7732 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7733 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7734 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7735 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7736 else if ((!a && (op1 & 0x17) == 0x07)
7737 || (a && (op1 & 0x17) == 0x07 && !b))
7738 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7740 /* Should be unreachable. */
7745 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7746 struct displaced_step_closure *dsc)
7748 switch (bits (insn, 20, 24))
7750 case 0x00: case 0x01: case 0x02: case 0x03:
7751 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7753 case 0x04: case 0x05: case 0x06: case 0x07:
7754 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7756 case 0x08: case 0x09: case 0x0a: case 0x0b:
7757 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7758 return arm_copy_unmodified (gdbarch, insn,
7759 "decode/pack/unpack/saturate/reverse", dsc);
7762 if (bits (insn, 5, 7) == 0) /* op2. */
7764 if (bits (insn, 12, 15) == 0xf)
7765 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7767 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7770 return arm_copy_undef (gdbarch, insn, dsc);
7772 case 0x1a: case 0x1b:
7773 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7774 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7776 return arm_copy_undef (gdbarch, insn, dsc);
7778 case 0x1c: case 0x1d:
7779 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7781 if (bits (insn, 0, 3) == 0xf)
7782 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7784 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7787 return arm_copy_undef (gdbarch, insn, dsc);
7789 case 0x1e: case 0x1f:
7790 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7791 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7793 return arm_copy_undef (gdbarch, insn, dsc);
7796 /* Should be unreachable. */
7801 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7802 struct regcache *regs,
7803 struct displaced_step_closure *dsc)
7806 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7808 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7812 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7813 struct regcache *regs,
7814 struct displaced_step_closure *dsc)
7816 unsigned int opcode = bits (insn, 20, 24);
7820 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7821 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7823 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7824 case 0x12: case 0x16:
7825 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7827 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7828 case 0x13: case 0x17:
7829 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7831 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7832 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7833 /* Note: no writeback for these instructions. Bit 25 will always be
7834 zero though (via caller), so the following works OK. */
7835 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7838 /* Should be unreachable. */
7842 /* Decode shifted register instructions. */
7845 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7846 uint16_t insn2, struct regcache *regs,
7847 struct displaced_step_closure *dsc)
7849 /* PC is only allowed to be used in instruction MOV. */
7851 unsigned int op = bits (insn1, 5, 8);
7852 unsigned int rn = bits (insn1, 0, 3);
7854 if (op == 0x2 && rn == 0xf) /* MOV */
7855 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7857 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7858 "dp (shift reg)", dsc);
7862 /* Decode extension register load/store. Exactly the same as
7863 arm_decode_ext_reg_ld_st. */
7866 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7867 uint16_t insn2, struct regcache *regs,
7868 struct displaced_step_closure *dsc)
7870 unsigned int opcode = bits (insn1, 4, 8);
7874 case 0x04: case 0x05:
7875 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7876 "vfp/neon vmov", dsc);
7878 case 0x08: case 0x0c: /* 01x00 */
7879 case 0x0a: case 0x0e: /* 01x10 */
7880 case 0x12: case 0x16: /* 10x10 */
7881 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7882 "vfp/neon vstm/vpush", dsc);
7884 case 0x09: case 0x0d: /* 01x01 */
7885 case 0x0b: case 0x0f: /* 01x11 */
7886 case 0x13: case 0x17: /* 10x11 */
7887 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7888 "vfp/neon vldm/vpop", dsc);
7890 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7891 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7893 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7894 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7897 /* Should be unreachable. */
7902 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7903 struct regcache *regs, struct displaced_step_closure *dsc)
7905 unsigned int op1 = bits (insn, 20, 25);
7906 int op = bit (insn, 4);
7907 unsigned int coproc = bits (insn, 8, 11);
7908 unsigned int rn = bits (insn, 16, 19);
7910 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7911 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7912 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7913 && (coproc & 0xe) != 0xa)
7915 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7916 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7917 && (coproc & 0xe) != 0xa)
7918 /* ldc/ldc2 imm/lit. */
7919 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7920 else if ((op1 & 0x3e) == 0x00)
7921 return arm_copy_undef (gdbarch, insn, dsc);
7922 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7923 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7924 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7925 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7926 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7927 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7928 else if ((op1 & 0x30) == 0x20 && !op)
7930 if ((coproc & 0xe) == 0xa)
7931 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7933 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7935 else if ((op1 & 0x30) == 0x20 && op)
7936 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7937 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7938 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7939 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7940 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7941 else if ((op1 & 0x30) == 0x30)
7942 return arm_copy_svc (gdbarch, insn, regs, dsc);
7944 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7948 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7949 uint16_t insn2, struct regcache *regs,
7950 struct displaced_step_closure *dsc)
7952 unsigned int coproc = bits (insn2, 8, 11);
7953 unsigned int op1 = bits (insn1, 4, 9);
7954 unsigned int bit_5_8 = bits (insn1, 5, 8);
7955 unsigned int bit_9 = bit (insn1, 9);
7956 unsigned int bit_4 = bit (insn1, 4);
7957 unsigned int rn = bits (insn1, 0, 3);
7962 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7963 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7965 else if (bit_5_8 == 0) /* UNDEFINED. */
7966 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7969 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7970 if ((coproc & 0xe) == 0xa)
7971 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7973 else /* coproc is not 101x. */
7975 if (bit_4 == 0) /* STC/STC2. */
7976 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7978 else /* LDC/LDC2 {literal, immeidate}. */
7979 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7985 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7991 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7992 struct displaced_step_closure *dsc, int rd)
7998 Preparation: Rd <- PC
8004 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8005 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8009 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8010 struct displaced_step_closure *dsc,
8011 int rd, unsigned int imm)
8014 /* Encoding T2: ADDS Rd, #imm */
8015 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8017 install_pc_relative (gdbarch, regs, dsc, rd);
8023 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8024 struct regcache *regs,
8025 struct displaced_step_closure *dsc)
8027 unsigned int rd = bits (insn, 8, 10);
8028 unsigned int imm8 = bits (insn, 0, 7);
8030 if (debug_displaced)
8031 fprintf_unfiltered (gdb_stdlog,
8032 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8035 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8039 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8040 uint16_t insn2, struct regcache *regs,
8041 struct displaced_step_closure *dsc)
8043 unsigned int rd = bits (insn2, 8, 11);
8044 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8045 extract raw immediate encoding rather than computing immediate. When
8046 generating ADD or SUB instruction, we can simply perform OR operation to
8047 set immediate into ADD. */
8048 unsigned int imm_3_8 = insn2 & 0x70ff;
8049 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8051 if (debug_displaced)
8052 fprintf_unfiltered (gdb_stdlog,
8053 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8054 rd, imm_i, imm_3_8, insn1, insn2);
8056 if (bit (insn1, 7)) /* Encoding T2 */
8058 /* Encoding T3: SUB Rd, Rd, #imm */
8059 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8060 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8062 else /* Encoding T3 */
8064 /* Encoding T3: ADD Rd, Rd, #imm */
8065 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8066 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8070 install_pc_relative (gdbarch, regs, dsc, rd);
8076 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8077 struct regcache *regs,
8078 struct displaced_step_closure *dsc)
8080 unsigned int rt = bits (insn1, 8, 10);
8082 int imm8 = (bits (insn1, 0, 7) << 2);
8083 CORE_ADDR from = dsc->insn_addr;
8089 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8091 Insn: LDR R0, [R2, R3];
8092 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8094 if (debug_displaced)
8095 fprintf_unfiltered (gdb_stdlog,
8096 "displaced: copying thumb ldr r%d [pc #%d]\n"
8099 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8100 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8101 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8102 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8103 /* The assembler calculates the required value of the offset from the
8104 Align(PC,4) value of this instruction to the label. */
8105 pc = pc & 0xfffffffc;
8107 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8108 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8111 dsc->u.ldst.xfersize = 4;
8113 dsc->u.ldst.immed = 0;
8114 dsc->u.ldst.writeback = 0;
8115 dsc->u.ldst.restore_r4 = 0;
8117 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8119 dsc->cleanup = &cleanup_load;
8124 /* Copy Thumb cbnz/cbz insruction. */
8127 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8128 struct regcache *regs,
8129 struct displaced_step_closure *dsc)
8131 int non_zero = bit (insn1, 11);
8132 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8133 CORE_ADDR from = dsc->insn_addr;
8134 int rn = bits (insn1, 0, 2);
8135 int rn_val = displaced_read_reg (regs, dsc, rn);
8137 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8138 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8139 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8140 condition is false, let it be, cleanup_branch will do nothing. */
8141 if (dsc->u.branch.cond)
8143 dsc->u.branch.cond = INST_AL;
8144 dsc->u.branch.dest = from + 4 + imm5;
8147 dsc->u.branch.dest = from + 2;
8149 dsc->u.branch.link = 0;
8150 dsc->u.branch.exchange = 0;
8152 if (debug_displaced)
8153 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8154 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8155 rn, rn_val, insn1, dsc->u.branch.dest);
8157 dsc->modinsn[0] = THUMB_NOP;
8159 dsc->cleanup = &cleanup_branch;
8163 /* Copy Table Branch Byte/Halfword */
8165 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8166 uint16_t insn2, struct regcache *regs,
8167 struct displaced_step_closure *dsc)
8169 ULONGEST rn_val, rm_val;
8170 int is_tbh = bit (insn2, 4);
8171 CORE_ADDR halfwords = 0;
8172 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8174 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8175 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8181 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8182 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8188 target_read_memory (rn_val + rm_val, buf, 1);
8189 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8192 if (debug_displaced)
8193 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8194 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8195 (unsigned int) rn_val, (unsigned int) rm_val,
8196 (unsigned int) halfwords);
8198 dsc->u.branch.cond = INST_AL;
8199 dsc->u.branch.link = 0;
8200 dsc->u.branch.exchange = 0;
8201 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8203 dsc->cleanup = &cleanup_branch;
8209 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8210 struct displaced_step_closure *dsc)
8213 int val = displaced_read_reg (regs, dsc, 7);
8214 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8217 val = displaced_read_reg (regs, dsc, 8);
8218 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8221 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8226 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8227 struct regcache *regs,
8228 struct displaced_step_closure *dsc)
8230 dsc->u.block.regmask = insn1 & 0x00ff;
8232 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8235 (1) register list is full, that is, r0-r7 are used.
8236 Prepare: tmp[0] <- r8
8238 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8239 MOV r8, r7; Move value of r7 to r8;
8240 POP {r7}; Store PC value into r7.
8242 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8244 (2) register list is not full, supposing there are N registers in
8245 register list (except PC, 0 <= N <= 7).
8246 Prepare: for each i, 0 - N, tmp[i] <- ri.
8248 POP {r0, r1, ...., rN};
8250 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8251 from tmp[] properly.
8253 if (debug_displaced)
8254 fprintf_unfiltered (gdb_stdlog,
8255 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8256 dsc->u.block.regmask, insn1);
8258 if (dsc->u.block.regmask == 0xff)
8260 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8262 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8263 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8264 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8267 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8271 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8272 unsigned int new_regmask, bit = 1;
8273 unsigned int to = 0, from = 0, i, new_rn;
8275 for (i = 0; i < num_in_list + 1; i++)
8276 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8278 new_regmask = (1 << (num_in_list + 1)) - 1;
8280 if (debug_displaced)
8281 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8282 "{..., pc}: original reg list %.4x,"
8283 " modified list %.4x\n"),
8284 (int) dsc->u.block.regmask, new_regmask);
8286 dsc->u.block.regmask |= 0x8000;
8287 dsc->u.block.writeback = 0;
8288 dsc->u.block.cond = INST_AL;
8290 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8292 dsc->cleanup = &cleanup_block_load_pc;
8299 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8300 struct regcache *regs,
8301 struct displaced_step_closure *dsc)
8303 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8304 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8307 /* 16-bit thumb instructions. */
8308 switch (op_bit_12_15)
8310 /* Shift (imme), add, subtract, move and compare. */
8311 case 0: case 1: case 2: case 3:
8312 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8313 "shift/add/sub/mov/cmp",
8317 switch (op_bit_10_11)
8319 case 0: /* Data-processing */
8320 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8324 case 1: /* Special data instructions and branch and exchange. */
8326 unsigned short op = bits (insn1, 7, 9);
8327 if (op == 6 || op == 7) /* BX or BLX */
8328 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8329 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8330 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8332 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8336 default: /* LDR (literal) */
8337 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8340 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8341 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8344 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8345 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8346 else /* Generate SP-relative address */
8347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8349 case 11: /* Misc 16-bit instructions */
8351 switch (bits (insn1, 8, 11))
8353 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8354 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8356 case 12: case 13: /* POP */
8357 if (bit (insn1, 8)) /* PC is in register list. */
8358 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8360 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8362 case 15: /* If-Then, and hints */
8363 if (bits (insn1, 0, 3))
8364 /* If-Then makes up to four following instructions conditional.
8365 IT instruction itself is not conditional, so handle it as a
8366 common unmodified instruction. */
8367 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8370 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8373 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8378 if (op_bit_10_11 < 2) /* Store multiple registers */
8379 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8380 else /* Load multiple registers */
8381 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8383 case 13: /* Conditional branch and supervisor call */
8384 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8385 err = thumb_copy_b (gdbarch, insn1, dsc);
8387 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8389 case 14: /* Unconditional branch */
8390 err = thumb_copy_b (gdbarch, insn1, dsc);
8397 internal_error (__FILE__, __LINE__,
8398 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8402 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8403 uint16_t insn1, uint16_t insn2,
8404 struct regcache *regs,
8405 struct displaced_step_closure *dsc)
8407 int rt = bits (insn2, 12, 15);
8408 int rn = bits (insn1, 0, 3);
8409 int op1 = bits (insn1, 7, 8);
8412 switch (bits (insn1, 5, 6))
8414 case 0: /* Load byte and memory hints */
8415 if (rt == 0xf) /* PLD/PLI */
8418 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8419 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8421 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8426 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8427 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8430 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8431 "ldrb{reg, immediate}/ldrbt",
8436 case 1: /* Load halfword and memory hints. */
8437 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8438 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8439 "pld/unalloc memhint", dsc);
8443 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8446 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8450 case 2: /* Load word */
8452 int insn2_bit_8_11 = bits (insn2, 8, 11);
8455 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8456 else if (op1 == 0x1) /* Encoding T3 */
8457 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8459 else /* op1 == 0x0 */
8461 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8462 /* LDR (immediate) */
8463 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8464 dsc, bit (insn2, 8), 1);
8465 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8466 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8469 /* LDR (register) */
8470 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8476 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8483 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8484 uint16_t insn2, struct regcache *regs,
8485 struct displaced_step_closure *dsc)
8488 unsigned short op = bit (insn2, 15);
8489 unsigned int op1 = bits (insn1, 11, 12);
8495 switch (bits (insn1, 9, 10))
8500 /* Load/store {dual, execlusive}, table branch. */
8501 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8502 && bits (insn2, 5, 7) == 0)
8503 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8506 /* PC is not allowed to use in load/store {dual, exclusive}
8508 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8509 "load/store dual/ex", dsc);
8511 else /* load/store multiple */
8513 switch (bits (insn1, 7, 8))
8515 case 0: case 3: /* SRS, RFE */
8516 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8519 case 1: case 2: /* LDM/STM/PUSH/POP */
8520 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8527 /* Data-processing (shift register). */
8528 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8531 default: /* Coprocessor instructions. */
8532 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8537 case 2: /* op1 = 2 */
8538 if (op) /* Branch and misc control. */
8540 if (bit (insn2, 14) /* BLX/BL */
8541 || bit (insn2, 12) /* Unconditional branch */
8542 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8543 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8545 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8550 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8552 int op = bits (insn1, 4, 8);
8553 int rn = bits (insn1, 0, 3);
8554 if ((op == 0 || op == 0xa) && rn == 0xf)
8555 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8558 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8561 else /* Data processing (modified immeidate) */
8562 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8566 case 3: /* op1 = 3 */
8567 switch (bits (insn1, 9, 10))
8571 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8573 else /* NEON Load/Store and Store single data item */
8574 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8575 "neon elt/struct load/store",
8578 case 1: /* op1 = 3, bits (9, 10) == 1 */
8579 switch (bits (insn1, 7, 8))
8581 case 0: case 1: /* Data processing (register) */
8582 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8585 case 2: /* Multiply and absolute difference */
8586 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8587 "mul/mua/diff", dsc);
8589 case 3: /* Long multiply and divide */
8590 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8595 default: /* Coprocessor instructions */
8596 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8605 internal_error (__FILE__, __LINE__,
8606 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8611 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8612 CORE_ADDR to, struct regcache *regs,
8613 struct displaced_step_closure *dsc)
8615 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8617 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8619 if (debug_displaced)
8620 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8621 "at %.8lx\n", insn1, (unsigned long) from);
8624 dsc->insn_size = thumb_insn_size (insn1);
8625 if (thumb_insn_size (insn1) == 4)
8628 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8629 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8632 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8636 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8637 CORE_ADDR to, struct regcache *regs,
8638 struct displaced_step_closure *dsc)
8641 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8644 /* Most displaced instructions use a 1-instruction scratch space, so set this
8645 here and override below if/when necessary. */
8647 dsc->insn_addr = from;
8648 dsc->scratch_base = to;
8649 dsc->cleanup = NULL;
8650 dsc->wrote_to_pc = 0;
8652 if (!displaced_in_arm_mode (regs))
8653 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8657 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8658 if (debug_displaced)
8659 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8660 "at %.8lx\n", (unsigned long) insn,
8661 (unsigned long) from);
8663 if ((insn & 0xf0000000) == 0xf0000000)
8664 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8665 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8667 case 0x0: case 0x1: case 0x2: case 0x3:
8668 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8671 case 0x4: case 0x5: case 0x6:
8672 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8676 err = arm_decode_media (gdbarch, insn, dsc);
8679 case 0x8: case 0x9: case 0xa: case 0xb:
8680 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8683 case 0xc: case 0xd: case 0xe: case 0xf:
8684 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8689 internal_error (__FILE__, __LINE__,
8690 _("arm_process_displaced_insn: Instruction decode error"));
8693 /* Actually set up the scratch space for a displaced instruction. */
8696 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8697 CORE_ADDR to, struct displaced_step_closure *dsc)
8699 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8700 unsigned int i, len, offset;
8701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8702 int size = dsc->is_thumb? 2 : 4;
8703 const gdb_byte *bkp_insn;
8706 /* Poke modified instruction(s). */
8707 for (i = 0; i < dsc->numinsns; i++)
8709 if (debug_displaced)
8711 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8713 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8716 fprintf_unfiltered (gdb_stdlog, "%.4x",
8717 (unsigned short)dsc->modinsn[i]);
8719 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8720 (unsigned long) to + offset);
8723 write_memory_unsigned_integer (to + offset, size,
8724 byte_order_for_code,
8729 /* Choose the correct breakpoint instruction. */
8732 bkp_insn = tdep->thumb_breakpoint;
8733 len = tdep->thumb_breakpoint_size;
8737 bkp_insn = tdep->arm_breakpoint;
8738 len = tdep->arm_breakpoint_size;
8741 /* Put breakpoint afterwards. */
8742 write_memory (to + offset, bkp_insn, len);
8744 if (debug_displaced)
8745 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8746 paddress (gdbarch, from), paddress (gdbarch, to));
8749 /* Entry point for copying an instruction into scratch space for displaced
8752 struct displaced_step_closure *
8753 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8754 CORE_ADDR from, CORE_ADDR to,
8755 struct regcache *regs)
8757 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
8759 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8760 arm_displaced_init_closure (gdbarch, from, to, dsc);
8765 /* Entry point for cleaning things up after a displaced instruction has been
8769 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8770 struct displaced_step_closure *dsc,
8771 CORE_ADDR from, CORE_ADDR to,
8772 struct regcache *regs)
8775 dsc->cleanup (gdbarch, regs, dsc);
8777 if (!dsc->wrote_to_pc)
8778 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8779 dsc->insn_addr + dsc->insn_size);
8783 #include "bfd-in2.h"
8784 #include "libcoff.h"
8787 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8789 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
8791 if (arm_pc_is_thumb (gdbarch, memaddr))
8793 static asymbol *asym;
8794 static combined_entry_type ce;
8795 static struct coff_symbol_struct csym;
8796 static struct bfd fake_bfd;
8797 static bfd_target fake_target;
8799 if (csym.native == NULL)
8801 /* Create a fake symbol vector containing a Thumb symbol.
8802 This is solely so that the code in print_insn_little_arm()
8803 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8804 the presence of a Thumb symbol and switch to decoding
8805 Thumb instructions. */
8807 fake_target.flavour = bfd_target_coff_flavour;
8808 fake_bfd.xvec = &fake_target;
8809 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8811 csym.symbol.the_bfd = &fake_bfd;
8812 csym.symbol.name = "fake";
8813 asym = (asymbol *) & csym;
8816 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8817 info->symbols = &asym;
8820 info->symbols = NULL;
8822 if (info->endian == BFD_ENDIAN_BIG)
8823 return print_insn_big_arm (memaddr, info);
8825 return print_insn_little_arm (memaddr, info);
8828 /* The following define instruction sequences that will cause ARM
8829 cpu's to take an undefined instruction trap. These are used to
8830 signal a breakpoint to GDB.
8832 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8833 modes. A different instruction is required for each mode. The ARM
8834 cpu's can also be big or little endian. Thus four different
8835 instructions are needed to support all cases.
8837 Note: ARMv4 defines several new instructions that will take the
8838 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8839 not in fact add the new instructions. The new undefined
8840 instructions in ARMv4 are all instructions that had no defined
8841 behaviour in earlier chips. There is no guarantee that they will
8842 raise an exception, but may be treated as NOP's. In practice, it
8843 may only safe to rely on instructions matching:
8845 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8846 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8847 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8849 Even this may only true if the condition predicate is true. The
8850 following use a condition predicate of ALWAYS so it is always TRUE.
8852 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8853 and NetBSD all use a software interrupt rather than an undefined
8854 instruction to force a trap. This can be handled by by the
8855 abi-specific code during establishment of the gdbarch vector. */
8857 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8858 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8859 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8860 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8862 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8863 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8864 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8865 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8867 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8868 the program counter value to determine whether a 16-bit or 32-bit
8869 breakpoint should be used. It returns a pointer to a string of
8870 bytes that encode a breakpoint instruction, stores the length of
8871 the string to *lenptr, and adjusts the program counter (if
8872 necessary) to point to the actual memory location where the
8873 breakpoint should be inserted. */
8875 static const unsigned char *
8876 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8878 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8879 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8881 if (arm_pc_is_thumb (gdbarch, *pcptr))
8883 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8885 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8886 check whether we are replacing a 32-bit instruction. */
8887 if (tdep->thumb2_breakpoint != NULL)
8890 if (target_read_memory (*pcptr, buf, 2) == 0)
8892 unsigned short inst1;
8893 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8894 if (thumb_insn_size (inst1) == 4)
8896 *lenptr = tdep->thumb2_breakpoint_size;
8897 return tdep->thumb2_breakpoint;
8902 *lenptr = tdep->thumb_breakpoint_size;
8903 return tdep->thumb_breakpoint;
8907 *lenptr = tdep->arm_breakpoint_size;
8908 return tdep->arm_breakpoint;
8913 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8916 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8918 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8919 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8920 that this is not confused with a 32-bit ARM breakpoint. */
8924 /* Extract from an array REGBUF containing the (raw) register state a
8925 function return value of type TYPE, and copy that, in virtual
8926 format, into VALBUF. */
8929 arm_extract_return_value (struct type *type, struct regcache *regs,
8932 struct gdbarch *gdbarch = get_regcache_arch (regs);
8933 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8935 if (TYPE_CODE_FLT == TYPE_CODE (type))
8937 switch (gdbarch_tdep (gdbarch)->fp_model)
8941 /* The value is in register F0 in internal format. We need to
8942 extract the raw value and then convert it to the desired
8944 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8946 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8947 convert_from_extended (floatformat_from_type (type), tmpbuf,
8948 valbuf, gdbarch_byte_order (gdbarch));
8952 case ARM_FLOAT_SOFT_FPA:
8953 case ARM_FLOAT_SOFT_VFP:
8954 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8955 not using the VFP ABI code. */
8957 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8958 if (TYPE_LENGTH (type) > 4)
8959 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8960 valbuf + INT_REGISTER_SIZE);
8964 internal_error (__FILE__, __LINE__,
8965 _("arm_extract_return_value: "
8966 "Floating point model not supported"));
8970 else if (TYPE_CODE (type) == TYPE_CODE_INT
8971 || TYPE_CODE (type) == TYPE_CODE_CHAR
8972 || TYPE_CODE (type) == TYPE_CODE_BOOL
8973 || TYPE_CODE (type) == TYPE_CODE_PTR
8974 || TYPE_CODE (type) == TYPE_CODE_REF
8975 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8977 /* If the type is a plain integer, then the access is
8978 straight-forward. Otherwise we have to play around a bit
8980 int len = TYPE_LENGTH (type);
8981 int regno = ARM_A1_REGNUM;
8986 /* By using store_unsigned_integer we avoid having to do
8987 anything special for small big-endian values. */
8988 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8989 store_unsigned_integer (valbuf,
8990 (len > INT_REGISTER_SIZE
8991 ? INT_REGISTER_SIZE : len),
8993 len -= INT_REGISTER_SIZE;
8994 valbuf += INT_REGISTER_SIZE;
8999 /* For a structure or union the behaviour is as if the value had
9000 been stored to word-aligned memory and then loaded into
9001 registers with 32-bit load instruction(s). */
9002 int len = TYPE_LENGTH (type);
9003 int regno = ARM_A1_REGNUM;
9004 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9008 regcache_cooked_read (regs, regno++, tmpbuf);
9009 memcpy (valbuf, tmpbuf,
9010 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9011 len -= INT_REGISTER_SIZE;
9012 valbuf += INT_REGISTER_SIZE;
9018 /* Will a function return an aggregate type in memory or in a
9019 register? Return 0 if an aggregate type can be returned in a
9020 register, 1 if it must be returned in memory. */
9023 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9025 enum type_code code;
9027 type = check_typedef (type);
9029 /* Simple, non-aggregate types (ie not including vectors and
9030 complex) are always returned in a register (or registers). */
9031 code = TYPE_CODE (type);
9032 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9033 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9036 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
9038 /* Vector values should be returned using ARM registers if they
9039 are not over 16 bytes. */
9040 return (TYPE_LENGTH (type) > 16);
9043 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9045 /* The AAPCS says all aggregates not larger than a word are returned
9047 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
9056 /* All aggregate types that won't fit in a register must be returned
9058 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9061 /* In the ARM ABI, "integer" like aggregate types are returned in
9062 registers. For an aggregate type to be integer like, its size
9063 must be less than or equal to INT_REGISTER_SIZE and the
9064 offset of each addressable subfield must be zero. Note that bit
9065 fields are not addressable, and all addressable subfields of
9066 unions always start at offset zero.
9068 This function is based on the behaviour of GCC 2.95.1.
9069 See: gcc/arm.c: arm_return_in_memory() for details.
9071 Note: All versions of GCC before GCC 2.95.2 do not set up the
9072 parameters correctly for a function returning the following
9073 structure: struct { float f;}; This should be returned in memory,
9074 not a register. Richard Earnshaw sent me a patch, but I do not
9075 know of any way to detect if a function like the above has been
9076 compiled with the correct calling convention. */
9078 /* Assume all other aggregate types can be returned in a register.
9079 Run a check for structures, unions and arrays. */
9082 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9085 /* Need to check if this struct/union is "integer" like. For
9086 this to be true, its size must be less than or equal to
9087 INT_REGISTER_SIZE and the offset of each addressable
9088 subfield must be zero. Note that bit fields are not
9089 addressable, and unions always start at offset zero. If any
9090 of the subfields is a floating point type, the struct/union
9091 cannot be an integer type. */
9093 /* For each field in the object, check:
9094 1) Is it FP? --> yes, nRc = 1;
9095 2) Is it addressable (bitpos != 0) and
9096 not packed (bitsize == 0)?
9100 for (i = 0; i < TYPE_NFIELDS (type); i++)
9102 enum type_code field_type_code;
9105 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9108 /* Is it a floating point type field? */
9109 if (field_type_code == TYPE_CODE_FLT)
9115 /* If bitpos != 0, then we have to care about it. */
9116 if (TYPE_FIELD_BITPOS (type, i) != 0)
9118 /* Bitfields are not addressable. If the field bitsize is
9119 zero, then the field is not packed. Hence it cannot be
9120 a bitfield or any other packed type. */
9121 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9134 /* Write into appropriate registers a function return value of type
9135 TYPE, given in virtual format. */
9138 arm_store_return_value (struct type *type, struct regcache *regs,
9139 const gdb_byte *valbuf)
9141 struct gdbarch *gdbarch = get_regcache_arch (regs);
9142 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9144 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9146 gdb_byte buf[MAX_REGISTER_SIZE];
9148 switch (gdbarch_tdep (gdbarch)->fp_model)
9152 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9153 gdbarch_byte_order (gdbarch));
9154 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9157 case ARM_FLOAT_SOFT_FPA:
9158 case ARM_FLOAT_SOFT_VFP:
9159 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9160 not using the VFP ABI code. */
9162 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9163 if (TYPE_LENGTH (type) > 4)
9164 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9165 valbuf + INT_REGISTER_SIZE);
9169 internal_error (__FILE__, __LINE__,
9170 _("arm_store_return_value: Floating "
9171 "point model not supported"));
9175 else if (TYPE_CODE (type) == TYPE_CODE_INT
9176 || TYPE_CODE (type) == TYPE_CODE_CHAR
9177 || TYPE_CODE (type) == TYPE_CODE_BOOL
9178 || TYPE_CODE (type) == TYPE_CODE_PTR
9179 || TYPE_CODE (type) == TYPE_CODE_REF
9180 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9182 if (TYPE_LENGTH (type) <= 4)
9184 /* Values of one word or less are zero/sign-extended and
9186 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9187 LONGEST val = unpack_long (type, valbuf);
9189 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9190 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9194 /* Integral values greater than one word are stored in consecutive
9195 registers starting with r0. This will always be a multiple of
9196 the regiser size. */
9197 int len = TYPE_LENGTH (type);
9198 int regno = ARM_A1_REGNUM;
9202 regcache_cooked_write (regs, regno++, valbuf);
9203 len -= INT_REGISTER_SIZE;
9204 valbuf += INT_REGISTER_SIZE;
9210 /* For a structure or union the behaviour is as if the value had
9211 been stored to word-aligned memory and then loaded into
9212 registers with 32-bit load instruction(s). */
9213 int len = TYPE_LENGTH (type);
9214 int regno = ARM_A1_REGNUM;
9215 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9219 memcpy (tmpbuf, valbuf,
9220 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9221 regcache_cooked_write (regs, regno++, tmpbuf);
9222 len -= INT_REGISTER_SIZE;
9223 valbuf += INT_REGISTER_SIZE;
9229 /* Handle function return values. */
9231 static enum return_value_convention
9232 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9233 struct type *valtype, struct regcache *regcache,
9234 gdb_byte *readbuf, const gdb_byte *writebuf)
9236 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9237 struct type *func_type = function ? value_type (function) : NULL;
9238 enum arm_vfp_cprc_base_type vfp_base_type;
9241 if (arm_vfp_abi_for_function (gdbarch, func_type)
9242 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9244 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9245 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9247 for (i = 0; i < vfp_base_count; i++)
9249 if (reg_char == 'q')
9252 arm_neon_quad_write (gdbarch, regcache, i,
9253 writebuf + i * unit_length);
9256 arm_neon_quad_read (gdbarch, regcache, i,
9257 readbuf + i * unit_length);
9264 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9265 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9268 regcache_cooked_write (regcache, regnum,
9269 writebuf + i * unit_length);
9271 regcache_cooked_read (regcache, regnum,
9272 readbuf + i * unit_length);
9275 return RETURN_VALUE_REGISTER_CONVENTION;
9278 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9279 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9280 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9282 if (tdep->struct_return == pcc_struct_return
9283 || arm_return_in_memory (gdbarch, valtype))
9284 return RETURN_VALUE_STRUCT_CONVENTION;
9286 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
9288 if (arm_return_in_memory (gdbarch, valtype))
9289 return RETURN_VALUE_STRUCT_CONVENTION;
9293 arm_store_return_value (valtype, regcache, writebuf);
9296 arm_extract_return_value (valtype, regcache, readbuf);
9298 return RETURN_VALUE_REGISTER_CONVENTION;
9303 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9305 struct gdbarch *gdbarch = get_frame_arch (frame);
9306 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9307 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9309 gdb_byte buf[INT_REGISTER_SIZE];
9311 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9313 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9317 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9321 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9322 return the target PC. Otherwise return 0. */
9325 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9329 CORE_ADDR start_addr;
9331 /* Find the starting address and name of the function containing the PC. */
9332 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9334 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9336 start_addr = arm_skip_bx_reg (frame, pc);
9337 if (start_addr != 0)
9343 /* If PC is in a Thumb call or return stub, return the address of the
9344 target PC, which is in a register. The thunk functions are called
9345 _call_via_xx, where x is the register name. The possible names
9346 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9347 functions, named __ARM_call_via_r[0-7]. */
9348 if (startswith (name, "_call_via_")
9349 || startswith (name, "__ARM_call_via_"))
9351 /* Use the name suffix to determine which register contains the
9353 static char *table[15] =
9354 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9355 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9358 int offset = strlen (name) - 2;
9360 for (regno = 0; regno <= 14; regno++)
9361 if (strcmp (&name[offset], table[regno]) == 0)
9362 return get_frame_register_unsigned (frame, regno);
9365 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9366 non-interworking calls to foo. We could decode the stubs
9367 to find the target but it's easier to use the symbol table. */
9368 namelen = strlen (name);
9369 if (name[0] == '_' && name[1] == '_'
9370 && ((namelen > 2 + strlen ("_from_thumb")
9371 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9372 || (namelen > 2 + strlen ("_from_arm")
9373 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9376 int target_len = namelen - 2;
9377 struct bound_minimal_symbol minsym;
9378 struct objfile *objfile;
9379 struct obj_section *sec;
9381 if (name[namelen - 1] == 'b')
9382 target_len -= strlen ("_from_thumb");
9384 target_len -= strlen ("_from_arm");
9386 target_name = (char *) alloca (target_len + 1);
9387 memcpy (target_name, name + 2, target_len);
9388 target_name[target_len] = '\0';
9390 sec = find_pc_section (pc);
9391 objfile = (sec == NULL) ? NULL : sec->objfile;
9392 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9393 if (minsym.minsym != NULL)
9394 return BMSYMBOL_VALUE_ADDRESS (minsym);
9399 return 0; /* not a stub */
9403 set_arm_command (char *args, int from_tty)
9405 printf_unfiltered (_("\
9406 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9407 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9411 show_arm_command (char *args, int from_tty)
9413 cmd_show_list (showarmcmdlist, from_tty, "");
9417 arm_update_current_architecture (void)
9419 struct gdbarch_info info;
9421 /* If the current architecture is not ARM, we have nothing to do. */
9422 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9425 /* Update the architecture. */
9426 gdbarch_info_init (&info);
9428 if (!gdbarch_update_p (info))
9429 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9433 set_fp_model_sfunc (char *args, int from_tty,
9434 struct cmd_list_element *c)
9438 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9439 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9441 arm_fp_model = (enum arm_float_model) fp_model;
9445 if (fp_model == ARM_FLOAT_LAST)
9446 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9449 arm_update_current_architecture ();
9453 show_fp_model (struct ui_file *file, int from_tty,
9454 struct cmd_list_element *c, const char *value)
9456 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9458 if (arm_fp_model == ARM_FLOAT_AUTO
9459 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9460 fprintf_filtered (file, _("\
9461 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9462 fp_model_strings[tdep->fp_model]);
9464 fprintf_filtered (file, _("\
9465 The current ARM floating point model is \"%s\".\n"),
9466 fp_model_strings[arm_fp_model]);
9470 arm_set_abi (char *args, int from_tty,
9471 struct cmd_list_element *c)
9475 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9476 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9478 arm_abi_global = (enum arm_abi_kind) arm_abi;
9482 if (arm_abi == ARM_ABI_LAST)
9483 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9486 arm_update_current_architecture ();
9490 arm_show_abi (struct ui_file *file, int from_tty,
9491 struct cmd_list_element *c, const char *value)
9493 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9495 if (arm_abi_global == ARM_ABI_AUTO
9496 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9497 fprintf_filtered (file, _("\
9498 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9499 arm_abi_strings[tdep->arm_abi]);
9501 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9506 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9507 struct cmd_list_element *c, const char *value)
9509 fprintf_filtered (file,
9510 _("The current execution mode assumed "
9511 "(when symbols are unavailable) is \"%s\".\n"),
9512 arm_fallback_mode_string);
9516 arm_show_force_mode (struct ui_file *file, int from_tty,
9517 struct cmd_list_element *c, const char *value)
9519 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9521 fprintf_filtered (file,
9522 _("The current execution mode assumed "
9523 "(even when symbols are available) is \"%s\".\n"),
9524 arm_force_mode_string);
9527 /* If the user changes the register disassembly style used for info
9528 register and other commands, we have to also switch the style used
9529 in opcodes for disassembly output. This function is run in the "set
9530 arm disassembly" command, and does that. */
9533 set_disassembly_style_sfunc (char *args, int from_tty,
9534 struct cmd_list_element *c)
9536 set_disassembly_style ();
9539 /* Return the ARM register name corresponding to register I. */
9541 arm_register_name (struct gdbarch *gdbarch, int i)
9543 const int num_regs = gdbarch_num_regs (gdbarch);
9545 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9546 && i >= num_regs && i < num_regs + 32)
9548 static const char *const vfp_pseudo_names[] = {
9549 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9550 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9551 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9552 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9555 return vfp_pseudo_names[i - num_regs];
9558 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9559 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9561 static const char *const neon_pseudo_names[] = {
9562 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9563 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9566 return neon_pseudo_names[i - num_regs - 32];
9569 if (i >= ARRAY_SIZE (arm_register_names))
9570 /* These registers are only supported on targets which supply
9571 an XML description. */
9574 return arm_register_names[i];
9578 set_disassembly_style (void)
9582 /* Find the style that the user wants. */
9583 for (current = 0; current < num_disassembly_options; current++)
9584 if (disassembly_style == valid_disassembly_styles[current])
9586 gdb_assert (current < num_disassembly_options);
9588 /* Synchronize the disassembler. */
9589 set_arm_regname_option (current);
9592 /* Test whether the coff symbol specific value corresponds to a Thumb
9596 coff_sym_is_thumb (int val)
9598 return (val == C_THUMBEXT
9599 || val == C_THUMBSTAT
9600 || val == C_THUMBEXTFUNC
9601 || val == C_THUMBSTATFUNC
9602 || val == C_THUMBLABEL);
9605 /* arm_coff_make_msymbol_special()
9606 arm_elf_make_msymbol_special()
9608 These functions test whether the COFF or ELF symbol corresponds to
9609 an address in thumb code, and set a "special" bit in a minimal
9610 symbol to indicate that it does. */
9613 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9615 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9616 == ST_BRANCH_TO_THUMB)
9617 MSYMBOL_SET_SPECIAL (msym);
9621 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9623 if (coff_sym_is_thumb (val))
9624 MSYMBOL_SET_SPECIAL (msym);
9628 arm_objfile_data_free (struct objfile *objfile, void *arg)
9630 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
9633 for (i = 0; i < objfile->obfd->section_count; i++)
9634 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9638 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9641 const char *name = bfd_asymbol_name (sym);
9642 struct arm_per_objfile *data;
9643 VEC(arm_mapping_symbol_s) **map_p;
9644 struct arm_mapping_symbol new_map_sym;
9646 gdb_assert (name[0] == '$');
9647 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9650 data = (struct arm_per_objfile *) objfile_data (objfile,
9651 arm_objfile_data_key);
9654 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9655 struct arm_per_objfile);
9656 set_objfile_data (objfile, arm_objfile_data_key, data);
9657 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9658 objfile->obfd->section_count,
9659 VEC(arm_mapping_symbol_s) *);
9661 map_p = &data->section_maps[bfd_get_section (sym)->index];
9663 new_map_sym.value = sym->value;
9664 new_map_sym.type = name[1];
9666 /* Assume that most mapping symbols appear in order of increasing
9667 value. If they were randomly distributed, it would be faster to
9668 always push here and then sort at first use. */
9669 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9671 struct arm_mapping_symbol *prev_map_sym;
9673 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9674 if (prev_map_sym->value >= sym->value)
9677 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9678 arm_compare_mapping_symbols);
9679 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9684 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9688 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9690 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9691 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9693 /* If necessary, set the T bit. */
9696 ULONGEST val, t_bit;
9697 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9698 t_bit = arm_psr_thumb_bit (gdbarch);
9699 if (arm_pc_is_thumb (gdbarch, pc))
9700 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9703 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9708 /* Read the contents of a NEON quad register, by reading from two
9709 double registers. This is used to implement the quad pseudo
9710 registers, and for argument passing in case the quad registers are
9711 missing; vectors are passed in quad registers when using the VFP
9712 ABI, even if a NEON unit is not present. REGNUM is the index of
9713 the quad register, in [0, 15]. */
9715 static enum register_status
9716 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9717 int regnum, gdb_byte *buf)
9720 gdb_byte reg_buf[8];
9721 int offset, double_regnum;
9722 enum register_status status;
9724 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9725 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9728 /* d0 is always the least significant half of q0. */
9729 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9734 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9735 if (status != REG_VALID)
9737 memcpy (buf + offset, reg_buf, 8);
9739 offset = 8 - offset;
9740 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9741 if (status != REG_VALID)
9743 memcpy (buf + offset, reg_buf, 8);
9748 static enum register_status
9749 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9750 int regnum, gdb_byte *buf)
9752 const int num_regs = gdbarch_num_regs (gdbarch);
9754 gdb_byte reg_buf[8];
9755 int offset, double_regnum;
9757 gdb_assert (regnum >= num_regs);
9760 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9761 /* Quad-precision register. */
9762 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9765 enum register_status status;
9767 /* Single-precision register. */
9768 gdb_assert (regnum < 32);
9770 /* s0 is always the least significant half of d0. */
9771 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9772 offset = (regnum & 1) ? 0 : 4;
9774 offset = (regnum & 1) ? 4 : 0;
9776 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9777 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9780 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9781 if (status == REG_VALID)
9782 memcpy (buf, reg_buf + offset, 4);
9787 /* Store the contents of BUF to a NEON quad register, by writing to
9788 two double registers. This is used to implement the quad pseudo
9789 registers, and for argument passing in case the quad registers are
9790 missing; vectors are passed in quad registers when using the VFP
9791 ABI, even if a NEON unit is not present. REGNUM is the index
9792 of the quad register, in [0, 15]. */
9795 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9796 int regnum, const gdb_byte *buf)
9799 int offset, double_regnum;
9801 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9802 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9805 /* d0 is always the least significant half of q0. */
9806 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9811 regcache_raw_write (regcache, double_regnum, buf + offset);
9812 offset = 8 - offset;
9813 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9817 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9818 int regnum, const gdb_byte *buf)
9820 const int num_regs = gdbarch_num_regs (gdbarch);
9822 gdb_byte reg_buf[8];
9823 int offset, double_regnum;
9825 gdb_assert (regnum >= num_regs);
9828 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9829 /* Quad-precision register. */
9830 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9833 /* Single-precision register. */
9834 gdb_assert (regnum < 32);
9836 /* s0 is always the least significant half of d0. */
9837 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9838 offset = (regnum & 1) ? 0 : 4;
9840 offset = (regnum & 1) ? 4 : 0;
9842 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9843 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9846 regcache_raw_read (regcache, double_regnum, reg_buf);
9847 memcpy (reg_buf + offset, buf, 4);
9848 regcache_raw_write (regcache, double_regnum, reg_buf);
9852 static struct value *
9853 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9855 const int *reg_p = (const int *) baton;
9856 return value_of_register (*reg_p, frame);
9859 static enum gdb_osabi
9860 arm_elf_osabi_sniffer (bfd *abfd)
9862 unsigned int elfosabi;
9863 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9865 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9867 if (elfosabi == ELFOSABI_ARM)
9868 /* GNU tools use this value. Check note sections in this case,
9870 bfd_map_over_sections (abfd,
9871 generic_elf_osabi_sniff_abi_tag_sections,
9874 /* Anything else will be handled by the generic ELF sniffer. */
9879 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9880 struct reggroup *group)
9882 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9883 this, FPS register belongs to save_regroup, restore_reggroup, and
9884 all_reggroup, of course. */
9885 if (regnum == ARM_FPS_REGNUM)
9886 return (group == float_reggroup
9887 || group == save_reggroup
9888 || group == restore_reggroup
9889 || group == all_reggroup);
9891 return default_register_reggroup_p (gdbarch, regnum, group);
9895 /* For backward-compatibility we allow two 'g' packet lengths with
9896 the remote protocol depending on whether FPA registers are
9897 supplied. M-profile targets do not have FPA registers, but some
9898 stubs already exist in the wild which use a 'g' packet which
9899 supplies them albeit with dummy values. The packet format which
9900 includes FPA registers should be considered deprecated for
9901 M-profile targets. */
9904 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9906 if (gdbarch_tdep (gdbarch)->is_m)
9908 /* If we know from the executable this is an M-profile target,
9909 cater for remote targets whose register set layout is the
9910 same as the FPA layout. */
9911 register_remote_g_packet_guess (gdbarch,
9912 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9913 (16 * INT_REGISTER_SIZE)
9914 + (8 * FP_REGISTER_SIZE)
9915 + (2 * INT_REGISTER_SIZE),
9916 tdesc_arm_with_m_fpa_layout);
9918 /* The regular M-profile layout. */
9919 register_remote_g_packet_guess (gdbarch,
9920 /* r0-r12,sp,lr,pc; xpsr */
9921 (16 * INT_REGISTER_SIZE)
9922 + INT_REGISTER_SIZE,
9925 /* M-profile plus M4F VFP. */
9926 register_remote_g_packet_guess (gdbarch,
9927 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9928 (16 * INT_REGISTER_SIZE)
9929 + (16 * VFP_REGISTER_SIZE)
9930 + (2 * INT_REGISTER_SIZE),
9931 tdesc_arm_with_m_vfp_d16);
9934 /* Otherwise we don't have a useful guess. */
9938 /* Initialize the current architecture based on INFO. If possible,
9939 re-use an architecture from ARCHES, which is a list of
9940 architectures already created during this debugging session.
9942 Called e.g. at program startup, when reading a core file, and when
9943 reading a binary file. */
9945 static struct gdbarch *
9946 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9948 struct gdbarch_tdep *tdep;
9949 struct gdbarch *gdbarch;
9950 struct gdbarch_list *best_arch;
9951 enum arm_abi_kind arm_abi = arm_abi_global;
9952 enum arm_float_model fp_model = arm_fp_model;
9953 struct tdesc_arch_data *tdesc_data = NULL;
9955 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9956 int have_wmmx_registers = 0;
9958 int have_fpa_registers = 1;
9959 const struct target_desc *tdesc = info.target_desc;
9961 /* If we have an object to base this architecture on, try to determine
9964 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9966 int ei_osabi, e_flags;
9968 switch (bfd_get_flavour (info.abfd))
9970 case bfd_target_aout_flavour:
9971 /* Assume it's an old APCS-style ABI. */
9972 arm_abi = ARM_ABI_APCS;
9975 case bfd_target_coff_flavour:
9976 /* Assume it's an old APCS-style ABI. */
9978 arm_abi = ARM_ABI_APCS;
9981 case bfd_target_elf_flavour:
9982 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9983 e_flags = elf_elfheader (info.abfd)->e_flags;
9985 if (ei_osabi == ELFOSABI_ARM)
9987 /* GNU tools used to use this value, but do not for EABI
9988 objects. There's nowhere to tag an EABI version
9989 anyway, so assume APCS. */
9990 arm_abi = ARM_ABI_APCS;
9992 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9994 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9995 int attr_arch, attr_profile;
9999 case EF_ARM_EABI_UNKNOWN:
10000 /* Assume GNU tools. */
10001 arm_abi = ARM_ABI_APCS;
10004 case EF_ARM_EABI_VER4:
10005 case EF_ARM_EABI_VER5:
10006 arm_abi = ARM_ABI_AAPCS;
10007 /* EABI binaries default to VFP float ordering.
10008 They may also contain build attributes that can
10009 be used to identify if the VFP argument-passing
10011 if (fp_model == ARM_FLOAT_AUTO)
10014 switch (bfd_elf_get_obj_attr_int (info.abfd,
10018 case AEABI_VFP_args_base:
10019 /* "The user intended FP parameter/result
10020 passing to conform to AAPCS, base
10022 fp_model = ARM_FLOAT_SOFT_VFP;
10024 case AEABI_VFP_args_vfp:
10025 /* "The user intended FP parameter/result
10026 passing to conform to AAPCS, VFP
10028 fp_model = ARM_FLOAT_VFP;
10030 case AEABI_VFP_args_toolchain:
10031 /* "The user intended FP parameter/result
10032 passing to conform to tool chain-specific
10033 conventions" - we don't know any such
10034 conventions, so leave it as "auto". */
10036 case AEABI_VFP_args_compatible:
10037 /* "Code is compatible with both the base
10038 and VFP variants; the user did not permit
10039 non-variadic functions to pass FP
10040 parameters/results" - leave it as
10044 /* Attribute value not mentioned in the
10045 November 2012 ABI, so leave it as
10050 fp_model = ARM_FLOAT_SOFT_VFP;
10056 /* Leave it as "auto". */
10057 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10062 /* Detect M-profile programs. This only works if the
10063 executable file includes build attributes; GCC does
10064 copy them to the executable, but e.g. RealView does
10066 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10068 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10070 Tag_CPU_arch_profile);
10071 /* GCC specifies the profile for v6-M; RealView only
10072 specifies the profile for architectures starting with
10073 V7 (as opposed to architectures with a tag
10074 numerically greater than TAG_CPU_ARCH_V7). */
10075 if (!tdesc_has_registers (tdesc)
10076 && (attr_arch == TAG_CPU_ARCH_V6_M
10077 || attr_arch == TAG_CPU_ARCH_V6S_M
10078 || attr_profile == 'M'))
10083 if (fp_model == ARM_FLOAT_AUTO)
10085 int e_flags = elf_elfheader (info.abfd)->e_flags;
10087 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10090 /* Leave it as "auto". Strictly speaking this case
10091 means FPA, but almost nobody uses that now, and
10092 many toolchains fail to set the appropriate bits
10093 for the floating-point model they use. */
10095 case EF_ARM_SOFT_FLOAT:
10096 fp_model = ARM_FLOAT_SOFT_FPA;
10098 case EF_ARM_VFP_FLOAT:
10099 fp_model = ARM_FLOAT_VFP;
10101 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10102 fp_model = ARM_FLOAT_SOFT_VFP;
10107 if (e_flags & EF_ARM_BE8)
10108 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10113 /* Leave it as "auto". */
10118 /* Check any target description for validity. */
10119 if (tdesc_has_registers (tdesc))
10121 /* For most registers we require GDB's default names; but also allow
10122 the numeric names for sp / lr / pc, as a convenience. */
10123 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10124 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10125 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10127 const struct tdesc_feature *feature;
10130 feature = tdesc_find_feature (tdesc,
10131 "org.gnu.gdb.arm.core");
10132 if (feature == NULL)
10134 feature = tdesc_find_feature (tdesc,
10135 "org.gnu.gdb.arm.m-profile");
10136 if (feature == NULL)
10142 tdesc_data = tdesc_data_alloc ();
10145 for (i = 0; i < ARM_SP_REGNUM; i++)
10146 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10147 arm_register_names[i]);
10148 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10151 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10154 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10158 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10159 ARM_PS_REGNUM, "xpsr");
10161 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10162 ARM_PS_REGNUM, "cpsr");
10166 tdesc_data_cleanup (tdesc_data);
10170 feature = tdesc_find_feature (tdesc,
10171 "org.gnu.gdb.arm.fpa");
10172 if (feature != NULL)
10175 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10176 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10177 arm_register_names[i]);
10180 tdesc_data_cleanup (tdesc_data);
10185 have_fpa_registers = 0;
10187 feature = tdesc_find_feature (tdesc,
10188 "org.gnu.gdb.xscale.iwmmxt");
10189 if (feature != NULL)
10191 static const char *const iwmmxt_names[] = {
10192 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10193 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10194 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10195 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10199 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10201 &= tdesc_numbered_register (feature, tdesc_data, i,
10202 iwmmxt_names[i - ARM_WR0_REGNUM]);
10204 /* Check for the control registers, but do not fail if they
10206 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10207 tdesc_numbered_register (feature, tdesc_data, i,
10208 iwmmxt_names[i - ARM_WR0_REGNUM]);
10210 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10212 &= tdesc_numbered_register (feature, tdesc_data, i,
10213 iwmmxt_names[i - ARM_WR0_REGNUM]);
10217 tdesc_data_cleanup (tdesc_data);
10221 have_wmmx_registers = 1;
10224 /* If we have a VFP unit, check whether the single precision registers
10225 are present. If not, then we will synthesize them as pseudo
10227 feature = tdesc_find_feature (tdesc,
10228 "org.gnu.gdb.arm.vfp");
10229 if (feature != NULL)
10231 static const char *const vfp_double_names[] = {
10232 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10233 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10234 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10235 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10238 /* Require the double precision registers. There must be either
10241 for (i = 0; i < 32; i++)
10243 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10245 vfp_double_names[i]);
10249 if (!valid_p && i == 16)
10252 /* Also require FPSCR. */
10253 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10254 ARM_FPSCR_REGNUM, "fpscr");
10257 tdesc_data_cleanup (tdesc_data);
10261 if (tdesc_unnumbered_register (feature, "s0") == 0)
10262 have_vfp_pseudos = 1;
10264 vfp_register_count = i;
10266 /* If we have VFP, also check for NEON. The architecture allows
10267 NEON without VFP (integer vector operations only), but GDB
10268 does not support that. */
10269 feature = tdesc_find_feature (tdesc,
10270 "org.gnu.gdb.arm.neon");
10271 if (feature != NULL)
10273 /* NEON requires 32 double-precision registers. */
10276 tdesc_data_cleanup (tdesc_data);
10280 /* If there are quad registers defined by the stub, use
10281 their type; otherwise (normally) provide them with
10282 the default type. */
10283 if (tdesc_unnumbered_register (feature, "q0") == 0)
10284 have_neon_pseudos = 1;
10291 /* If there is already a candidate, use it. */
10292 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10294 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10296 if (arm_abi != ARM_ABI_AUTO
10297 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10300 if (fp_model != ARM_FLOAT_AUTO
10301 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10304 /* There are various other properties in tdep that we do not
10305 need to check here: those derived from a target description,
10306 since gdbarches with a different target description are
10307 automatically disqualified. */
10309 /* Do check is_m, though, since it might come from the binary. */
10310 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10313 /* Found a match. */
10317 if (best_arch != NULL)
10319 if (tdesc_data != NULL)
10320 tdesc_data_cleanup (tdesc_data);
10321 return best_arch->gdbarch;
10324 tdep = XCNEW (struct gdbarch_tdep);
10325 gdbarch = gdbarch_alloc (&info, tdep);
10327 /* Record additional information about the architecture we are defining.
10328 These are gdbarch discriminators, like the OSABI. */
10329 tdep->arm_abi = arm_abi;
10330 tdep->fp_model = fp_model;
10332 tdep->have_fpa_registers = have_fpa_registers;
10333 tdep->have_wmmx_registers = have_wmmx_registers;
10334 gdb_assert (vfp_register_count == 0
10335 || vfp_register_count == 16
10336 || vfp_register_count == 32);
10337 tdep->vfp_register_count = vfp_register_count;
10338 tdep->have_vfp_pseudos = have_vfp_pseudos;
10339 tdep->have_neon_pseudos = have_neon_pseudos;
10340 tdep->have_neon = have_neon;
10342 arm_register_g_packet_guesses (gdbarch);
10345 switch (info.byte_order_for_code)
10347 case BFD_ENDIAN_BIG:
10348 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10349 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10350 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10351 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10355 case BFD_ENDIAN_LITTLE:
10356 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10357 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10358 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10359 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10364 internal_error (__FILE__, __LINE__,
10365 _("arm_gdbarch_init: bad byte order for float format"));
10368 /* On ARM targets char defaults to unsigned. */
10369 set_gdbarch_char_signed (gdbarch, 0);
10371 /* Note: for displaced stepping, this includes the breakpoint, and one word
10372 of additional scratch space. This setting isn't used for anything beside
10373 displaced stepping at present. */
10374 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10376 /* This should be low enough for everything. */
10377 tdep->lowest_pc = 0x20;
10378 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10380 /* The default, for both APCS and AAPCS, is to return small
10381 structures in registers. */
10382 tdep->struct_return = reg_struct_return;
10384 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10385 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10387 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10389 /* Frame handling. */
10390 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10391 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10392 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10394 frame_base_set_default (gdbarch, &arm_normal_base);
10396 /* Address manipulation. */
10397 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10399 /* Advance PC across function entry code. */
10400 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10402 /* Detect whether PC is at a point where the stack has been destroyed. */
10403 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10405 /* Skip trampolines. */
10406 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10408 /* The stack grows downward. */
10409 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10411 /* Breakpoint manipulation. */
10412 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10413 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10414 arm_remote_breakpoint_from_pc);
10416 /* Information about registers, etc. */
10417 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10418 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10419 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10420 set_gdbarch_register_type (gdbarch, arm_register_type);
10421 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10423 /* This "info float" is FPA-specific. Use the generic version if we
10424 do not have FPA. */
10425 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10426 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10428 /* Internal <-> external register number maps. */
10429 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10430 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10432 set_gdbarch_register_name (gdbarch, arm_register_name);
10434 /* Returning results. */
10435 set_gdbarch_return_value (gdbarch, arm_return_value);
10438 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10440 /* Minsymbol frobbing. */
10441 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10442 set_gdbarch_coff_make_msymbol_special (gdbarch,
10443 arm_coff_make_msymbol_special);
10444 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10446 /* Thumb-2 IT block support. */
10447 set_gdbarch_adjust_breakpoint_address (gdbarch,
10448 arm_adjust_breakpoint_address);
10450 /* Virtual tables. */
10451 set_gdbarch_vbit_in_delta (gdbarch, 1);
10453 /* Hook in the ABI-specific overrides, if they have been registered. */
10454 gdbarch_init_osabi (info, gdbarch);
10456 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10458 /* Add some default predicates. */
10460 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10461 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10462 dwarf2_append_unwinders (gdbarch);
10463 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10464 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10466 /* Now we have tuned the configuration, set a few final things,
10467 based on what the OS ABI has told us. */
10469 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10470 binaries are always marked. */
10471 if (tdep->arm_abi == ARM_ABI_AUTO)
10472 tdep->arm_abi = ARM_ABI_APCS;
10474 /* Watchpoints are not steppable. */
10475 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10477 /* We used to default to FPA for generic ARM, but almost nobody
10478 uses that now, and we now provide a way for the user to force
10479 the model. So default to the most useful variant. */
10480 if (tdep->fp_model == ARM_FLOAT_AUTO)
10481 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10483 if (tdep->jb_pc >= 0)
10484 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10486 /* Floating point sizes and format. */
10487 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10488 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10490 set_gdbarch_double_format
10491 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10492 set_gdbarch_long_double_format
10493 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10497 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10498 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10501 if (have_vfp_pseudos)
10503 /* NOTE: These are the only pseudo registers used by
10504 the ARM target at the moment. If more are added, a
10505 little more care in numbering will be needed. */
10507 int num_pseudos = 32;
10508 if (have_neon_pseudos)
10510 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10511 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10512 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10517 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10519 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10521 /* Override tdesc_register_type to adjust the types of VFP
10522 registers for NEON. */
10523 set_gdbarch_register_type (gdbarch, arm_register_type);
10526 /* Add standard register aliases. We add aliases even for those
10527 nanes which are used by the current architecture - it's simpler,
10528 and does no harm, since nothing ever lists user registers. */
10529 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10530 user_reg_add (gdbarch, arm_register_aliases[i].name,
10531 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10537 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10539 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10544 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10545 (unsigned long) tdep->lowest_pc);
10548 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10551 _initialize_arm_tdep (void)
10553 struct ui_file *stb;
10555 struct cmd_list_element *new_set, *new_show;
10556 const char *setname;
10557 const char *setdesc;
10558 const char *const *regnames;
10560 static char *helptext;
10561 char regdesc[1024], *rdptr = regdesc;
10562 size_t rest = sizeof (regdesc);
10564 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10566 arm_objfile_data_key
10567 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10569 /* Add ourselves to objfile event chain. */
10570 observer_attach_new_objfile (arm_exidx_new_objfile);
10572 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10574 /* Register an ELF OS ABI sniffer for ARM binaries. */
10575 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10576 bfd_target_elf_flavour,
10577 arm_elf_osabi_sniffer);
10579 /* Initialize the standard target descriptions. */
10580 initialize_tdesc_arm_with_m ();
10581 initialize_tdesc_arm_with_m_fpa_layout ();
10582 initialize_tdesc_arm_with_m_vfp_d16 ();
10583 initialize_tdesc_arm_with_iwmmxt ();
10584 initialize_tdesc_arm_with_vfpv2 ();
10585 initialize_tdesc_arm_with_vfpv3 ();
10586 initialize_tdesc_arm_with_neon ();
10588 /* Get the number of possible sets of register names defined in opcodes. */
10589 num_disassembly_options = get_arm_regname_num_options ();
10591 /* Add root prefix command for all "set arm"/"show arm" commands. */
10592 add_prefix_cmd ("arm", no_class, set_arm_command,
10593 _("Various ARM-specific commands."),
10594 &setarmcmdlist, "set arm ", 0, &setlist);
10596 add_prefix_cmd ("arm", no_class, show_arm_command,
10597 _("Various ARM-specific commands."),
10598 &showarmcmdlist, "show arm ", 0, &showlist);
10600 /* Sync the opcode insn printer with our register viewer. */
10601 parse_arm_disassembler_option ("reg-names-std");
10603 /* Initialize the array that will be passed to
10604 add_setshow_enum_cmd(). */
10605 valid_disassembly_styles = XNEWVEC (const char *,
10606 num_disassembly_options + 1);
10607 for (i = 0; i < num_disassembly_options; i++)
10609 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10610 valid_disassembly_styles[i] = setname;
10611 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10614 /* When we find the default names, tell the disassembler to use
10616 if (!strcmp (setname, "std"))
10618 disassembly_style = setname;
10619 set_arm_regname_option (i);
10622 /* Mark the end of valid options. */
10623 valid_disassembly_styles[num_disassembly_options] = NULL;
10625 /* Create the help text. */
10626 stb = mem_fileopen ();
10627 fprintf_unfiltered (stb, "%s%s%s",
10628 _("The valid values are:\n"),
10630 _("The default is \"std\"."));
10631 helptext = ui_file_xstrdup (stb, NULL);
10632 ui_file_delete (stb);
10634 add_setshow_enum_cmd("disassembler", no_class,
10635 valid_disassembly_styles, &disassembly_style,
10636 _("Set the disassembly style."),
10637 _("Show the disassembly style."),
10639 set_disassembly_style_sfunc,
10640 NULL, /* FIXME: i18n: The disassembly style is
10642 &setarmcmdlist, &showarmcmdlist);
10644 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10645 _("Set usage of ARM 32-bit mode."),
10646 _("Show usage of ARM 32-bit mode."),
10647 _("When off, a 26-bit PC will be used."),
10649 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10651 &setarmcmdlist, &showarmcmdlist);
10653 /* Add a command to allow the user to force the FPU model. */
10654 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10655 _("Set the floating point type."),
10656 _("Show the floating point type."),
10657 _("auto - Determine the FP typefrom the OS-ABI.\n\
10658 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10659 fpa - FPA co-processor (GCC compiled).\n\
10660 softvfp - Software FP with pure-endian doubles.\n\
10661 vfp - VFP co-processor."),
10662 set_fp_model_sfunc, show_fp_model,
10663 &setarmcmdlist, &showarmcmdlist);
10665 /* Add a command to allow the user to force the ABI. */
10666 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10668 _("Show the ABI."),
10669 NULL, arm_set_abi, arm_show_abi,
10670 &setarmcmdlist, &showarmcmdlist);
10672 /* Add two commands to allow the user to force the assumed
10674 add_setshow_enum_cmd ("fallback-mode", class_support,
10675 arm_mode_strings, &arm_fallback_mode_string,
10676 _("Set the mode assumed when symbols are unavailable."),
10677 _("Show the mode assumed when symbols are unavailable."),
10678 NULL, NULL, arm_show_fallback_mode,
10679 &setarmcmdlist, &showarmcmdlist);
10680 add_setshow_enum_cmd ("force-mode", class_support,
10681 arm_mode_strings, &arm_force_mode_string,
10682 _("Set the mode assumed even when symbols are available."),
10683 _("Show the mode assumed even when symbols are available."),
10684 NULL, NULL, arm_show_force_mode,
10685 &setarmcmdlist, &showarmcmdlist);
10687 /* Debugging flag. */
10688 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10689 _("Set ARM debugging."),
10690 _("Show ARM debugging."),
10691 _("When on, arm-specific debugging is enabled."),
10693 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10694 &setdebuglist, &showdebuglist);
10697 /* ARM-reversible process record data structures. */
10699 #define ARM_INSN_SIZE_BYTES 4
10700 #define THUMB_INSN_SIZE_BYTES 2
10701 #define THUMB2_INSN_SIZE_BYTES 4
10704 /* Position of the bit within a 32-bit ARM instruction
10705 that defines whether the instruction is a load or store. */
10706 #define INSN_S_L_BIT_NUM 20
10708 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10711 unsigned int reg_len = LENGTH; \
10714 REGS = XNEWVEC (uint32_t, reg_len); \
10715 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10720 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10723 unsigned int mem_len = LENGTH; \
10726 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10727 memcpy(&MEMS->len, &RECORD_BUF[0], \
10728 sizeof(struct arm_mem_r) * LENGTH); \
10733 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10734 #define INSN_RECORDED(ARM_RECORD) \
10735 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10737 /* ARM memory record structure. */
10740 uint32_t len; /* Record length. */
10741 uint32_t addr; /* Memory address. */
10744 /* ARM instruction record contains opcode of current insn
10745 and execution state (before entry to decode_insn()),
10746 contains list of to-be-modified registers and
10747 memory blocks (on return from decode_insn()). */
10749 typedef struct insn_decode_record_t
10751 struct gdbarch *gdbarch;
10752 struct regcache *regcache;
10753 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10754 uint32_t arm_insn; /* Should accommodate thumb. */
10755 uint32_t cond; /* Condition code. */
10756 uint32_t opcode; /* Insn opcode. */
10757 uint32_t decode; /* Insn decode bits. */
10758 uint32_t mem_rec_count; /* No of mem records. */
10759 uint32_t reg_rec_count; /* No of reg records. */
10760 uint32_t *arm_regs; /* Registers to be saved for this record. */
10761 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10762 } insn_decode_record;
10765 /* Checks ARM SBZ and SBO mandatory fields. */
10768 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10770 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10789 enum arm_record_result
10791 ARM_RECORD_SUCCESS = 0,
10792 ARM_RECORD_FAILURE = 1
10799 } arm_record_strx_t;
10810 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10811 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10814 struct regcache *reg_cache = arm_insn_r->regcache;
10815 ULONGEST u_regval[2]= {0};
10817 uint32_t reg_src1 = 0, reg_src2 = 0;
10818 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10819 uint32_t opcode1 = 0;
10821 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10822 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10823 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10826 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10828 /* 1) Handle misc store, immediate offset. */
10829 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10830 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10831 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10832 regcache_raw_read_unsigned (reg_cache, reg_src1,
10834 if (ARM_PC_REGNUM == reg_src1)
10836 /* If R15 was used as Rn, hence current PC+8. */
10837 u_regval[0] = u_regval[0] + 8;
10839 offset_8 = (immed_high << 4) | immed_low;
10840 /* Calculate target store address. */
10841 if (14 == arm_insn_r->opcode)
10843 tgt_mem_addr = u_regval[0] + offset_8;
10847 tgt_mem_addr = u_regval[0] - offset_8;
10849 if (ARM_RECORD_STRH == str_type)
10851 record_buf_mem[0] = 2;
10852 record_buf_mem[1] = tgt_mem_addr;
10853 arm_insn_r->mem_rec_count = 1;
10855 else if (ARM_RECORD_STRD == str_type)
10857 record_buf_mem[0] = 4;
10858 record_buf_mem[1] = tgt_mem_addr;
10859 record_buf_mem[2] = 4;
10860 record_buf_mem[3] = tgt_mem_addr + 4;
10861 arm_insn_r->mem_rec_count = 2;
10864 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10866 /* 2) Store, register offset. */
10868 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10870 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10871 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10872 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10873 if (15 == reg_src2)
10875 /* If R15 was used as Rn, hence current PC+8. */
10876 u_regval[0] = u_regval[0] + 8;
10878 /* Calculate target store address, Rn +/- Rm, register offset. */
10879 if (12 == arm_insn_r->opcode)
10881 tgt_mem_addr = u_regval[0] + u_regval[1];
10885 tgt_mem_addr = u_regval[1] - u_regval[0];
10887 if (ARM_RECORD_STRH == str_type)
10889 record_buf_mem[0] = 2;
10890 record_buf_mem[1] = tgt_mem_addr;
10891 arm_insn_r->mem_rec_count = 1;
10893 else if (ARM_RECORD_STRD == str_type)
10895 record_buf_mem[0] = 4;
10896 record_buf_mem[1] = tgt_mem_addr;
10897 record_buf_mem[2] = 4;
10898 record_buf_mem[3] = tgt_mem_addr + 4;
10899 arm_insn_r->mem_rec_count = 2;
10902 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10903 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10905 /* 3) Store, immediate pre-indexed. */
10906 /* 5) Store, immediate post-indexed. */
10907 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10908 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10909 offset_8 = (immed_high << 4) | immed_low;
10910 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10911 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10912 /* Calculate target store address, Rn +/- Rm, register offset. */
10913 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10915 tgt_mem_addr = u_regval[0] + offset_8;
10919 tgt_mem_addr = u_regval[0] - offset_8;
10921 if (ARM_RECORD_STRH == str_type)
10923 record_buf_mem[0] = 2;
10924 record_buf_mem[1] = tgt_mem_addr;
10925 arm_insn_r->mem_rec_count = 1;
10927 else if (ARM_RECORD_STRD == str_type)
10929 record_buf_mem[0] = 4;
10930 record_buf_mem[1] = tgt_mem_addr;
10931 record_buf_mem[2] = 4;
10932 record_buf_mem[3] = tgt_mem_addr + 4;
10933 arm_insn_r->mem_rec_count = 2;
10935 /* Record Rn also as it changes. */
10936 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10937 arm_insn_r->reg_rec_count = 1;
10939 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10940 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10942 /* 4) Store, register pre-indexed. */
10943 /* 6) Store, register post -indexed. */
10944 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10945 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10946 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10947 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10948 /* Calculate target store address, Rn +/- Rm, register offset. */
10949 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10951 tgt_mem_addr = u_regval[0] + u_regval[1];
10955 tgt_mem_addr = u_regval[1] - u_regval[0];
10957 if (ARM_RECORD_STRH == str_type)
10959 record_buf_mem[0] = 2;
10960 record_buf_mem[1] = tgt_mem_addr;
10961 arm_insn_r->mem_rec_count = 1;
10963 else if (ARM_RECORD_STRD == str_type)
10965 record_buf_mem[0] = 4;
10966 record_buf_mem[1] = tgt_mem_addr;
10967 record_buf_mem[2] = 4;
10968 record_buf_mem[3] = tgt_mem_addr + 4;
10969 arm_insn_r->mem_rec_count = 2;
10971 /* Record Rn also as it changes. */
10972 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10973 arm_insn_r->reg_rec_count = 1;
10978 /* Handling ARM extension space insns. */
10981 arm_record_extension_space (insn_decode_record *arm_insn_r)
10983 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10984 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10985 uint32_t record_buf[8], record_buf_mem[8];
10986 uint32_t reg_src1 = 0;
10987 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10988 struct regcache *reg_cache = arm_insn_r->regcache;
10989 ULONGEST u_regval = 0;
10991 gdb_assert (!INSN_RECORDED(arm_insn_r));
10992 /* Handle unconditional insn extension space. */
10994 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10995 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10996 if (arm_insn_r->cond)
10998 /* PLD has no affect on architectural state, it just affects
11000 if (5 == ((opcode1 & 0xE0) >> 5))
11003 record_buf[0] = ARM_PS_REGNUM;
11004 record_buf[1] = ARM_LR_REGNUM;
11005 arm_insn_r->reg_rec_count = 2;
11007 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11011 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11012 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11015 /* Undefined instruction on ARM V5; need to handle if later
11016 versions define it. */
11019 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11020 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11021 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11023 /* Handle arithmetic insn extension space. */
11024 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11025 && !INSN_RECORDED(arm_insn_r))
11027 /* Handle MLA(S) and MUL(S). */
11028 if (0 <= insn_op1 && 3 >= insn_op1)
11030 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11031 record_buf[1] = ARM_PS_REGNUM;
11032 arm_insn_r->reg_rec_count = 2;
11034 else if (4 <= insn_op1 && 15 >= insn_op1)
11036 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11037 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11038 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11039 record_buf[2] = ARM_PS_REGNUM;
11040 arm_insn_r->reg_rec_count = 3;
11044 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11045 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11046 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11048 /* Handle control insn extension space. */
11050 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11051 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11053 if (!bit (arm_insn_r->arm_insn,25))
11055 if (!bits (arm_insn_r->arm_insn, 4, 7))
11057 if ((0 == insn_op1) || (2 == insn_op1))
11060 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11061 arm_insn_r->reg_rec_count = 1;
11063 else if (1 == insn_op1)
11065 /* CSPR is going to be changed. */
11066 record_buf[0] = ARM_PS_REGNUM;
11067 arm_insn_r->reg_rec_count = 1;
11069 else if (3 == insn_op1)
11071 /* SPSR is going to be changed. */
11072 /* We need to get SPSR value, which is yet to be done. */
11073 printf_unfiltered (_("Process record does not support "
11074 "instruction 0x%0x at address %s.\n"),
11075 arm_insn_r->arm_insn,
11076 paddress (arm_insn_r->gdbarch,
11077 arm_insn_r->this_addr));
11081 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11086 record_buf[0] = ARM_PS_REGNUM;
11087 arm_insn_r->reg_rec_count = 1;
11089 else if (3 == insn_op1)
11092 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11093 arm_insn_r->reg_rec_count = 1;
11096 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11099 record_buf[0] = ARM_PS_REGNUM;
11100 record_buf[1] = ARM_LR_REGNUM;
11101 arm_insn_r->reg_rec_count = 2;
11103 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11105 /* QADD, QSUB, QDADD, QDSUB */
11106 record_buf[0] = ARM_PS_REGNUM;
11107 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11108 arm_insn_r->reg_rec_count = 2;
11110 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11113 record_buf[0] = ARM_PS_REGNUM;
11114 record_buf[1] = ARM_LR_REGNUM;
11115 arm_insn_r->reg_rec_count = 2;
11117 /* Save SPSR also;how? */
11118 printf_unfiltered (_("Process record does not support "
11119 "instruction 0x%0x at address %s.\n"),
11120 arm_insn_r->arm_insn,
11121 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11124 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11125 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11126 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11127 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11130 if (0 == insn_op1 || 1 == insn_op1)
11132 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11133 /* We dont do optimization for SMULW<y> where we
11135 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11136 record_buf[1] = ARM_PS_REGNUM;
11137 arm_insn_r->reg_rec_count = 2;
11139 else if (2 == insn_op1)
11142 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11143 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11144 arm_insn_r->reg_rec_count = 2;
11146 else if (3 == insn_op1)
11149 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11150 arm_insn_r->reg_rec_count = 1;
11156 /* MSR : immediate form. */
11159 /* CSPR is going to be changed. */
11160 record_buf[0] = ARM_PS_REGNUM;
11161 arm_insn_r->reg_rec_count = 1;
11163 else if (3 == insn_op1)
11165 /* SPSR is going to be changed. */
11166 /* we need to get SPSR value, which is yet to be done */
11167 printf_unfiltered (_("Process record does not support "
11168 "instruction 0x%0x at address %s.\n"),
11169 arm_insn_r->arm_insn,
11170 paddress (arm_insn_r->gdbarch,
11171 arm_insn_r->this_addr));
11177 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11178 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11179 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11181 /* Handle load/store insn extension space. */
11183 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11184 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11185 && !INSN_RECORDED(arm_insn_r))
11190 /* These insn, changes register and memory as well. */
11191 /* SWP or SWPB insn. */
11192 /* Get memory address given by Rn. */
11193 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11194 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11195 /* SWP insn ?, swaps word. */
11196 if (8 == arm_insn_r->opcode)
11198 record_buf_mem[0] = 4;
11202 /* SWPB insn, swaps only byte. */
11203 record_buf_mem[0] = 1;
11205 record_buf_mem[1] = u_regval;
11206 arm_insn_r->mem_rec_count = 1;
11207 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11208 arm_insn_r->reg_rec_count = 1;
11210 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11213 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11216 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11219 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11220 record_buf[1] = record_buf[0] + 1;
11221 arm_insn_r->reg_rec_count = 2;
11223 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11226 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11229 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11231 /* LDRH, LDRSB, LDRSH. */
11232 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11233 arm_insn_r->reg_rec_count = 1;
11238 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11239 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11240 && !INSN_RECORDED(arm_insn_r))
11243 /* Handle coprocessor insn extension space. */
11246 /* To be done for ARMv5 and later; as of now we return -1. */
11248 printf_unfiltered (_("Process record does not support instruction x%0x "
11249 "at address %s.\n"),arm_insn_r->arm_insn,
11250 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11253 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11254 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11259 /* Handling opcode 000 insns. */
11262 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11264 struct regcache *reg_cache = arm_insn_r->regcache;
11265 uint32_t record_buf[8], record_buf_mem[8];
11266 ULONGEST u_regval[2] = {0};
11268 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11269 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11270 uint32_t opcode1 = 0;
11272 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11273 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11274 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11276 /* Data processing insn /multiply insn. */
11277 if (9 == arm_insn_r->decode
11278 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11279 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11281 /* Handle multiply instructions. */
11282 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11283 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11285 /* Handle MLA and MUL. */
11286 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11287 record_buf[1] = ARM_PS_REGNUM;
11288 arm_insn_r->reg_rec_count = 2;
11290 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11292 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11293 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11294 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11295 record_buf[2] = ARM_PS_REGNUM;
11296 arm_insn_r->reg_rec_count = 3;
11299 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11300 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11302 /* Handle misc load insns, as 20th bit (L = 1). */
11303 /* LDR insn has a capability to do branching, if
11304 MOV LR, PC is precceded by LDR insn having Rn as R15
11305 in that case, it emulates branch and link insn, and hence we
11306 need to save CSPR and PC as well. I am not sure this is right
11307 place; as opcode = 010 LDR insn make this happen, if R15 was
11309 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11310 if (15 != reg_dest)
11312 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11313 arm_insn_r->reg_rec_count = 1;
11317 record_buf[0] = reg_dest;
11318 record_buf[1] = ARM_PS_REGNUM;
11319 arm_insn_r->reg_rec_count = 2;
11322 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11323 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11324 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11325 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11327 /* Handle MSR insn. */
11328 if (9 == arm_insn_r->opcode)
11330 /* CSPR is going to be changed. */
11331 record_buf[0] = ARM_PS_REGNUM;
11332 arm_insn_r->reg_rec_count = 1;
11336 /* SPSR is going to be changed. */
11337 /* How to read SPSR value? */
11338 printf_unfiltered (_("Process record does not support instruction "
11339 "0x%0x at address %s.\n"),
11340 arm_insn_r->arm_insn,
11341 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11345 else if (9 == arm_insn_r->decode
11346 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11347 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11349 /* Handling SWP, SWPB. */
11350 /* These insn, changes register and memory as well. */
11351 /* SWP or SWPB insn. */
11353 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11354 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11355 /* SWP insn ?, swaps word. */
11356 if (8 == arm_insn_r->opcode)
11358 record_buf_mem[0] = 4;
11362 /* SWPB insn, swaps only byte. */
11363 record_buf_mem[0] = 1;
11365 record_buf_mem[1] = u_regval[0];
11366 arm_insn_r->mem_rec_count = 1;
11367 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11368 arm_insn_r->reg_rec_count = 1;
11370 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11371 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11373 /* Handle BLX, branch and link/exchange. */
11374 if (9 == arm_insn_r->opcode)
11376 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11377 and R14 stores the return address. */
11378 record_buf[0] = ARM_PS_REGNUM;
11379 record_buf[1] = ARM_LR_REGNUM;
11380 arm_insn_r->reg_rec_count = 2;
11383 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11385 /* Handle enhanced software breakpoint insn, BKPT. */
11386 /* CPSR is changed to be executed in ARM state, disabling normal
11387 interrupts, entering abort mode. */
11388 /* According to high vector configuration PC is set. */
11389 /* user hit breakpoint and type reverse, in
11390 that case, we need to go back with previous CPSR and
11391 Program Counter. */
11392 record_buf[0] = ARM_PS_REGNUM;
11393 record_buf[1] = ARM_LR_REGNUM;
11394 arm_insn_r->reg_rec_count = 2;
11396 /* Save SPSR also; how? */
11397 printf_unfiltered (_("Process record does not support instruction "
11398 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11399 paddress (arm_insn_r->gdbarch,
11400 arm_insn_r->this_addr));
11403 else if (11 == arm_insn_r->decode
11404 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11406 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11408 /* Handle str(x) insn */
11409 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11412 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11413 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11415 /* Handle BX, branch and link/exchange. */
11416 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11417 record_buf[0] = ARM_PS_REGNUM;
11418 arm_insn_r->reg_rec_count = 1;
11420 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11421 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11422 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11424 /* Count leading zeros: CLZ. */
11425 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11426 arm_insn_r->reg_rec_count = 1;
11428 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11429 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11430 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11431 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11434 /* Handle MRS insn. */
11435 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11436 arm_insn_r->reg_rec_count = 1;
11438 else if (arm_insn_r->opcode <= 15)
11440 /* Normal data processing insns. */
11441 /* Out of 11 shifter operands mode, all the insn modifies destination
11442 register, which is specified by 13-16 decode. */
11443 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11444 record_buf[1] = ARM_PS_REGNUM;
11445 arm_insn_r->reg_rec_count = 2;
11452 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11453 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11457 /* Handling opcode 001 insns. */
11460 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11462 uint32_t record_buf[8], record_buf_mem[8];
11464 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11465 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11467 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11468 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11469 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11472 /* Handle MSR insn. */
11473 if (9 == arm_insn_r->opcode)
11475 /* CSPR is going to be changed. */
11476 record_buf[0] = ARM_PS_REGNUM;
11477 arm_insn_r->reg_rec_count = 1;
11481 /* SPSR is going to be changed. */
11484 else if (arm_insn_r->opcode <= 15)
11486 /* Normal data processing insns. */
11487 /* Out of 11 shifter operands mode, all the insn modifies destination
11488 register, which is specified by 13-16 decode. */
11489 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11490 record_buf[1] = ARM_PS_REGNUM;
11491 arm_insn_r->reg_rec_count = 2;
11498 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11499 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11503 /* Handle ARM mode instructions with opcode 010. */
11506 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11508 struct regcache *reg_cache = arm_insn_r->regcache;
11510 uint32_t reg_base , reg_dest;
11511 uint32_t offset_12, tgt_mem_addr;
11512 uint32_t record_buf[8], record_buf_mem[8];
11513 unsigned char wback;
11516 /* Calculate wback. */
11517 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11518 || (bit (arm_insn_r->arm_insn, 21) == 1);
11520 arm_insn_r->reg_rec_count = 0;
11521 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11523 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11525 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11528 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11529 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11531 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11532 preceeds a LDR instruction having R15 as reg_base, it
11533 emulates a branch and link instruction, and hence we need to save
11534 CPSR and PC as well. */
11535 if (ARM_PC_REGNUM == reg_dest)
11536 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11538 /* If wback is true, also save the base register, which is going to be
11541 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11545 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11547 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11548 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11550 /* Handle bit U. */
11551 if (bit (arm_insn_r->arm_insn, 23))
11553 /* U == 1: Add the offset. */
11554 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11558 /* U == 0: subtract the offset. */
11559 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11562 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11564 if (bit (arm_insn_r->arm_insn, 22))
11566 /* STRB and STRBT: 1 byte. */
11567 record_buf_mem[0] = 1;
11571 /* STR and STRT: 4 bytes. */
11572 record_buf_mem[0] = 4;
11575 /* Handle bit P. */
11576 if (bit (arm_insn_r->arm_insn, 24))
11577 record_buf_mem[1] = tgt_mem_addr;
11579 record_buf_mem[1] = (uint32_t) u_regval;
11581 arm_insn_r->mem_rec_count = 1;
11583 /* If wback is true, also save the base register, which is going to be
11586 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11589 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11590 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11594 /* Handling opcode 011 insns. */
11597 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11599 struct regcache *reg_cache = arm_insn_r->regcache;
11601 uint32_t shift_imm = 0;
11602 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11603 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11604 uint32_t record_buf[8], record_buf_mem[8];
11607 ULONGEST u_regval[2];
11609 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11610 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11612 /* Handle enhanced store insns and LDRD DSP insn,
11613 order begins according to addressing modes for store insns
11617 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11619 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11620 /* LDR insn has a capability to do branching, if
11621 MOV LR, PC is precedded by LDR insn having Rn as R15
11622 in that case, it emulates branch and link insn, and hence we
11623 need to save CSPR and PC as well. */
11624 if (15 != reg_dest)
11626 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11627 arm_insn_r->reg_rec_count = 1;
11631 record_buf[0] = reg_dest;
11632 record_buf[1] = ARM_PS_REGNUM;
11633 arm_insn_r->reg_rec_count = 2;
11638 if (! bits (arm_insn_r->arm_insn, 4, 11))
11640 /* Store insn, register offset and register pre-indexed,
11641 register post-indexed. */
11643 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11645 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11646 regcache_raw_read_unsigned (reg_cache, reg_src1
11648 regcache_raw_read_unsigned (reg_cache, reg_src2
11650 if (15 == reg_src2)
11652 /* If R15 was used as Rn, hence current PC+8. */
11653 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11654 u_regval[0] = u_regval[0] + 8;
11656 /* Calculate target store address, Rn +/- Rm, register offset. */
11658 if (bit (arm_insn_r->arm_insn, 23))
11660 tgt_mem_addr = u_regval[0] + u_regval[1];
11664 tgt_mem_addr = u_regval[1] - u_regval[0];
11667 switch (arm_insn_r->opcode)
11681 record_buf_mem[0] = 4;
11696 record_buf_mem[0] = 1;
11700 gdb_assert_not_reached ("no decoding pattern found");
11703 record_buf_mem[1] = tgt_mem_addr;
11704 arm_insn_r->mem_rec_count = 1;
11706 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11707 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11708 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11709 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11710 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11711 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11714 /* Rn is going to be changed in pre-indexed mode and
11715 post-indexed mode as well. */
11716 record_buf[0] = reg_src2;
11717 arm_insn_r->reg_rec_count = 1;
11722 /* Store insn, scaled register offset; scaled pre-indexed. */
11723 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11725 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11727 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11728 /* Get shift_imm. */
11729 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11730 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11731 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11732 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11733 /* Offset_12 used as shift. */
11737 /* Offset_12 used as index. */
11738 offset_12 = u_regval[0] << shift_imm;
11742 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11748 if (bit (u_regval[0], 31))
11750 offset_12 = 0xFFFFFFFF;
11759 /* This is arithmetic shift. */
11760 offset_12 = s_word >> shift_imm;
11767 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11769 /* Get C flag value and shift it by 31. */
11770 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11771 | (u_regval[0]) >> 1);
11775 offset_12 = (u_regval[0] >> shift_imm) \
11777 (sizeof(uint32_t) - shift_imm));
11782 gdb_assert_not_reached ("no decoding pattern found");
11786 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11788 if (bit (arm_insn_r->arm_insn, 23))
11790 tgt_mem_addr = u_regval[1] + offset_12;
11794 tgt_mem_addr = u_regval[1] - offset_12;
11797 switch (arm_insn_r->opcode)
11811 record_buf_mem[0] = 4;
11826 record_buf_mem[0] = 1;
11830 gdb_assert_not_reached ("no decoding pattern found");
11833 record_buf_mem[1] = tgt_mem_addr;
11834 arm_insn_r->mem_rec_count = 1;
11836 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11837 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11838 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11839 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11840 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11841 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11844 /* Rn is going to be changed in register scaled pre-indexed
11845 mode,and scaled post indexed mode. */
11846 record_buf[0] = reg_src2;
11847 arm_insn_r->reg_rec_count = 1;
11852 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11853 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11857 /* Handle ARM mode instructions with opcode 100. */
11860 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11862 struct regcache *reg_cache = arm_insn_r->regcache;
11863 uint32_t register_count = 0, register_bits;
11864 uint32_t reg_base, addr_mode;
11865 uint32_t record_buf[24], record_buf_mem[48];
11869 /* Fetch the list of registers. */
11870 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11871 arm_insn_r->reg_rec_count = 0;
11873 /* Fetch the base register that contains the address we are loading data
11875 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11877 /* Calculate wback. */
11878 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11880 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11882 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11884 /* Find out which registers are going to be loaded from memory. */
11885 while (register_bits)
11887 if (register_bits & 0x00000001)
11888 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11889 register_bits = register_bits >> 1;
11894 /* If wback is true, also save the base register, which is going to be
11897 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11899 /* Save the CPSR register. */
11900 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11904 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11906 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11908 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11910 /* Find out how many registers are going to be stored to memory. */
11911 while (register_bits)
11913 if (register_bits & 0x00000001)
11915 register_bits = register_bits >> 1;
11920 /* STMDA (STMED): Decrement after. */
11922 record_buf_mem[1] = (uint32_t) u_regval
11923 - register_count * INT_REGISTER_SIZE + 4;
11925 /* STM (STMIA, STMEA): Increment after. */
11927 record_buf_mem[1] = (uint32_t) u_regval;
11929 /* STMDB (STMFD): Decrement before. */
11931 record_buf_mem[1] = (uint32_t) u_regval
11932 - register_count * INT_REGISTER_SIZE;
11934 /* STMIB (STMFA): Increment before. */
11936 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11939 gdb_assert_not_reached ("no decoding pattern found");
11943 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11944 arm_insn_r->mem_rec_count = 1;
11946 /* If wback is true, also save the base register, which is going to be
11949 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11952 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11953 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11957 /* Handling opcode 101 insns. */
11960 arm_record_b_bl (insn_decode_record *arm_insn_r)
11962 uint32_t record_buf[8];
11964 /* Handle B, BL, BLX(1) insns. */
11965 /* B simply branches so we do nothing here. */
11966 /* Note: BLX(1) doesnt fall here but instead it falls into
11967 extension space. */
11968 if (bit (arm_insn_r->arm_insn, 24))
11970 record_buf[0] = ARM_LR_REGNUM;
11971 arm_insn_r->reg_rec_count = 1;
11974 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11979 /* Handling opcode 110 insns. */
11982 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11984 printf_unfiltered (_("Process record does not support instruction "
11985 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11986 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11991 /* Record handler for vector data transfer instructions. */
11994 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11996 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11997 uint32_t record_buf[4];
11999 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12000 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12001 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12002 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12003 bit_l = bit (arm_insn_r->arm_insn, 20);
12004 bit_c = bit (arm_insn_r->arm_insn, 8);
12006 /* Handle VMOV instruction. */
12007 if (bit_l && bit_c)
12009 record_buf[0] = reg_t;
12010 arm_insn_r->reg_rec_count = 1;
12012 else if (bit_l && !bit_c)
12014 /* Handle VMOV instruction. */
12015 if (bits_a == 0x00)
12017 if (bit (arm_insn_r->arm_insn, 20))
12018 record_buf[0] = reg_t;
12020 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12023 arm_insn_r->reg_rec_count = 1;
12025 /* Handle VMRS instruction. */
12026 else if (bits_a == 0x07)
12029 reg_t = ARM_PS_REGNUM;
12031 record_buf[0] = reg_t;
12032 arm_insn_r->reg_rec_count = 1;
12035 else if (!bit_l && !bit_c)
12037 /* Handle VMOV instruction. */
12038 if (bits_a == 0x00)
12040 if (bit (arm_insn_r->arm_insn, 20))
12041 record_buf[0] = reg_t;
12043 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12046 arm_insn_r->reg_rec_count = 1;
12048 /* Handle VMSR instruction. */
12049 else if (bits_a == 0x07)
12051 record_buf[0] = ARM_FPSCR_REGNUM;
12052 arm_insn_r->reg_rec_count = 1;
12055 else if (!bit_l && bit_c)
12057 /* Handle VMOV instruction. */
12058 if (!(bits_a & 0x04))
12060 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12062 arm_insn_r->reg_rec_count = 1;
12064 /* Handle VDUP instruction. */
12067 if (bit (arm_insn_r->arm_insn, 21))
12069 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12070 record_buf[0] = reg_v + ARM_D0_REGNUM;
12071 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12072 arm_insn_r->reg_rec_count = 2;
12076 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12077 record_buf[0] = reg_v + ARM_D0_REGNUM;
12078 arm_insn_r->reg_rec_count = 1;
12083 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12087 /* Record handler for extension register load/store instructions. */
12090 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12092 uint32_t opcode, single_reg;
12093 uint8_t op_vldm_vstm;
12094 uint32_t record_buf[8], record_buf_mem[128];
12095 ULONGEST u_regval = 0;
12097 struct regcache *reg_cache = arm_insn_r->regcache;
12098 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12100 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12101 single_reg = bit (arm_insn_r->arm_insn, 8);
12102 op_vldm_vstm = opcode & 0x1b;
12104 /* Handle VMOV instructions. */
12105 if ((opcode & 0x1e) == 0x04)
12107 if (bit (arm_insn_r->arm_insn, 4))
12109 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12110 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12111 arm_insn_r->reg_rec_count = 2;
12115 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12116 | bit (arm_insn_r->arm_insn, 5);
12120 record_buf[0] = num_regs + reg_m;
12121 record_buf[1] = num_regs + reg_m + 1;
12122 arm_insn_r->reg_rec_count = 2;
12126 record_buf[0] = reg_m + ARM_D0_REGNUM;
12127 arm_insn_r->reg_rec_count = 1;
12131 /* Handle VSTM and VPUSH instructions. */
12132 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12133 || op_vldm_vstm == 0x12)
12135 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12136 uint32_t memory_index = 0;
12138 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12139 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12140 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12141 imm_off32 = imm_off8 << 24;
12142 memory_count = imm_off8;
12144 if (bit (arm_insn_r->arm_insn, 23))
12145 start_address = u_regval;
12147 start_address = u_regval - imm_off32;
12149 if (bit (arm_insn_r->arm_insn, 21))
12151 record_buf[0] = reg_rn;
12152 arm_insn_r->reg_rec_count = 1;
12155 while (memory_count > 0)
12159 record_buf_mem[memory_index] = start_address;
12160 record_buf_mem[memory_index + 1] = 4;
12161 start_address = start_address + 4;
12162 memory_index = memory_index + 2;
12166 record_buf_mem[memory_index] = start_address;
12167 record_buf_mem[memory_index + 1] = 4;
12168 record_buf_mem[memory_index + 2] = start_address + 4;
12169 record_buf_mem[memory_index + 3] = 4;
12170 start_address = start_address + 8;
12171 memory_index = memory_index + 4;
12175 arm_insn_r->mem_rec_count = (memory_index >> 1);
12177 /* Handle VLDM instructions. */
12178 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12179 || op_vldm_vstm == 0x13)
12181 uint32_t reg_count, reg_vd;
12182 uint32_t reg_index = 0;
12184 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12185 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12188 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12190 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12192 if (bit (arm_insn_r->arm_insn, 21))
12193 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12195 while (reg_count > 0)
12198 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12200 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12204 arm_insn_r->reg_rec_count = reg_index;
12206 /* VSTR Vector store register. */
12207 else if ((opcode & 0x13) == 0x10)
12209 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12210 uint32_t memory_index = 0;
12212 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12213 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12214 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12215 imm_off32 = imm_off8 << 24;
12216 memory_count = imm_off8;
12218 if (bit (arm_insn_r->arm_insn, 23))
12219 start_address = u_regval + imm_off32;
12221 start_address = u_regval - imm_off32;
12225 record_buf_mem[memory_index] = start_address;
12226 record_buf_mem[memory_index + 1] = 4;
12227 arm_insn_r->mem_rec_count = 1;
12231 record_buf_mem[memory_index] = start_address;
12232 record_buf_mem[memory_index + 1] = 4;
12233 record_buf_mem[memory_index + 2] = start_address + 4;
12234 record_buf_mem[memory_index + 3] = 4;
12235 arm_insn_r->mem_rec_count = 2;
12238 /* VLDR Vector load register. */
12239 else if ((opcode & 0x13) == 0x11)
12241 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12245 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12246 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12250 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12251 record_buf[0] = num_regs + reg_vd;
12253 arm_insn_r->reg_rec_count = 1;
12256 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12257 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12261 /* Record handler for arm/thumb mode VFP data processing instructions. */
12264 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12266 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12267 uint32_t record_buf[4];
12268 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12269 enum insn_types curr_insn_type = INSN_INV;
12271 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12272 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12273 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12274 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12275 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12276 bit_d = bit (arm_insn_r->arm_insn, 22);
12277 opc1 = opc1 & 0x04;
12279 /* Handle VMLA, VMLS. */
12282 if (bit (arm_insn_r->arm_insn, 10))
12284 if (bit (arm_insn_r->arm_insn, 6))
12285 curr_insn_type = INSN_T0;
12287 curr_insn_type = INSN_T1;
12292 curr_insn_type = INSN_T1;
12294 curr_insn_type = INSN_T2;
12297 /* Handle VNMLA, VNMLS, VNMUL. */
12298 else if (opc1 == 0x01)
12301 curr_insn_type = INSN_T1;
12303 curr_insn_type = INSN_T2;
12306 else if (opc1 == 0x02 && !(opc3 & 0x01))
12308 if (bit (arm_insn_r->arm_insn, 10))
12310 if (bit (arm_insn_r->arm_insn, 6))
12311 curr_insn_type = INSN_T0;
12313 curr_insn_type = INSN_T1;
12318 curr_insn_type = INSN_T1;
12320 curr_insn_type = INSN_T2;
12323 /* Handle VADD, VSUB. */
12324 else if (opc1 == 0x03)
12326 if (!bit (arm_insn_r->arm_insn, 9))
12328 if (bit (arm_insn_r->arm_insn, 6))
12329 curr_insn_type = INSN_T0;
12331 curr_insn_type = INSN_T1;
12336 curr_insn_type = INSN_T1;
12338 curr_insn_type = INSN_T2;
12342 else if (opc1 == 0x0b)
12345 curr_insn_type = INSN_T1;
12347 curr_insn_type = INSN_T2;
12349 /* Handle all other vfp data processing instructions. */
12350 else if (opc1 == 0x0b)
12353 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12355 if (bit (arm_insn_r->arm_insn, 4))
12357 if (bit (arm_insn_r->arm_insn, 6))
12358 curr_insn_type = INSN_T0;
12360 curr_insn_type = INSN_T1;
12365 curr_insn_type = INSN_T1;
12367 curr_insn_type = INSN_T2;
12370 /* Handle VNEG and VABS. */
12371 else if ((opc2 == 0x01 && opc3 == 0x01)
12372 || (opc2 == 0x00 && opc3 == 0x03))
12374 if (!bit (arm_insn_r->arm_insn, 11))
12376 if (bit (arm_insn_r->arm_insn, 6))
12377 curr_insn_type = INSN_T0;
12379 curr_insn_type = INSN_T1;
12384 curr_insn_type = INSN_T1;
12386 curr_insn_type = INSN_T2;
12389 /* Handle VSQRT. */
12390 else if (opc2 == 0x01 && opc3 == 0x03)
12393 curr_insn_type = INSN_T1;
12395 curr_insn_type = INSN_T2;
12398 else if (opc2 == 0x07 && opc3 == 0x03)
12401 curr_insn_type = INSN_T1;
12403 curr_insn_type = INSN_T2;
12405 else if (opc3 & 0x01)
12408 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12410 if (!bit (arm_insn_r->arm_insn, 18))
12411 curr_insn_type = INSN_T2;
12415 curr_insn_type = INSN_T1;
12417 curr_insn_type = INSN_T2;
12421 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12424 curr_insn_type = INSN_T1;
12426 curr_insn_type = INSN_T2;
12428 /* Handle VCVTB, VCVTT. */
12429 else if ((opc2 & 0x0e) == 0x02)
12430 curr_insn_type = INSN_T2;
12431 /* Handle VCMP, VCMPE. */
12432 else if ((opc2 & 0x0e) == 0x04)
12433 curr_insn_type = INSN_T3;
12437 switch (curr_insn_type)
12440 reg_vd = reg_vd | (bit_d << 4);
12441 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12442 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12443 arm_insn_r->reg_rec_count = 2;
12447 reg_vd = reg_vd | (bit_d << 4);
12448 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12449 arm_insn_r->reg_rec_count = 1;
12453 reg_vd = (reg_vd << 1) | bit_d;
12454 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12455 arm_insn_r->reg_rec_count = 1;
12459 record_buf[0] = ARM_FPSCR_REGNUM;
12460 arm_insn_r->reg_rec_count = 1;
12464 gdb_assert_not_reached ("no decoding pattern found");
12468 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12472 /* Handling opcode 110 insns. */
12475 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12477 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12479 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12480 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12481 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12483 if ((coproc & 0x0e) == 0x0a)
12485 /* Handle extension register ld/st instructions. */
12487 return arm_record_exreg_ld_st_insn (arm_insn_r);
12489 /* 64-bit transfers between arm core and extension registers. */
12490 if ((op1 & 0x3e) == 0x04)
12491 return arm_record_exreg_ld_st_insn (arm_insn_r);
12495 /* Handle coprocessor ld/st instructions. */
12500 return arm_record_unsupported_insn (arm_insn_r);
12503 return arm_record_unsupported_insn (arm_insn_r);
12506 /* Move to coprocessor from two arm core registers. */
12508 return arm_record_unsupported_insn (arm_insn_r);
12510 /* Move to two arm core registers from coprocessor. */
12515 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12516 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12517 arm_insn_r->reg_rec_count = 2;
12519 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12523 return arm_record_unsupported_insn (arm_insn_r);
12526 /* Handling opcode 111 insns. */
12529 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12531 uint32_t op, op1_sbit, op1_ebit, coproc;
12532 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12533 struct regcache *reg_cache = arm_insn_r->regcache;
12534 ULONGEST u_regval = 0;
12536 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12537 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12538 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12539 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12540 op = bit (arm_insn_r->arm_insn, 4);
12542 /* Handle arm SWI/SVC system call instructions. */
12545 if (tdep->arm_syscall_record != NULL)
12547 ULONGEST svc_operand, svc_number;
12549 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12551 if (svc_operand) /* OABI. */
12552 svc_number = svc_operand - 0x900000;
12554 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12556 return tdep->arm_syscall_record (reg_cache, svc_number);
12560 printf_unfiltered (_("no syscall record support\n"));
12565 if ((coproc & 0x0e) == 0x0a)
12567 /* VFP data-processing instructions. */
12568 if (!op1_sbit && !op)
12569 return arm_record_vfp_data_proc_insn (arm_insn_r);
12571 /* Advanced SIMD, VFP instructions. */
12572 if (!op1_sbit && op)
12573 return arm_record_vdata_transfer_insn (arm_insn_r);
12577 /* Coprocessor data operations. */
12578 if (!op1_sbit && !op)
12579 return arm_record_unsupported_insn (arm_insn_r);
12581 /* Move to Coprocessor from ARM core register. */
12582 if (!op1_sbit && !op1_ebit && op)
12583 return arm_record_unsupported_insn (arm_insn_r);
12585 /* Move to arm core register from coprocessor. */
12586 if (!op1_sbit && op1_ebit && op)
12588 uint32_t record_buf[1];
12590 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12591 if (record_buf[0] == 15)
12592 record_buf[0] = ARM_PS_REGNUM;
12594 arm_insn_r->reg_rec_count = 1;
12595 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12601 return arm_record_unsupported_insn (arm_insn_r);
12604 /* Handling opcode 000 insns. */
12607 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12609 uint32_t record_buf[8];
12610 uint32_t reg_src1 = 0;
12612 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12614 record_buf[0] = ARM_PS_REGNUM;
12615 record_buf[1] = reg_src1;
12616 thumb_insn_r->reg_rec_count = 2;
12618 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12624 /* Handling opcode 001 insns. */
12627 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12629 uint32_t record_buf[8];
12630 uint32_t reg_src1 = 0;
12632 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12634 record_buf[0] = ARM_PS_REGNUM;
12635 record_buf[1] = reg_src1;
12636 thumb_insn_r->reg_rec_count = 2;
12638 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12643 /* Handling opcode 010 insns. */
12646 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12648 struct regcache *reg_cache = thumb_insn_r->regcache;
12649 uint32_t record_buf[8], record_buf_mem[8];
12651 uint32_t reg_src1 = 0, reg_src2 = 0;
12652 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12654 ULONGEST u_regval[2] = {0};
12656 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12658 if (bit (thumb_insn_r->arm_insn, 12))
12660 /* Handle load/store register offset. */
12661 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12662 if (opcode2 >= 12 && opcode2 <= 15)
12664 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12665 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12666 record_buf[0] = reg_src1;
12667 thumb_insn_r->reg_rec_count = 1;
12669 else if (opcode2 >= 8 && opcode2 <= 10)
12671 /* STR(2), STRB(2), STRH(2) . */
12672 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12673 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12674 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12675 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12677 record_buf_mem[0] = 4; /* STR (2). */
12678 else if (10 == opcode2)
12679 record_buf_mem[0] = 1; /* STRB (2). */
12680 else if (9 == opcode2)
12681 record_buf_mem[0] = 2; /* STRH (2). */
12682 record_buf_mem[1] = u_regval[0] + u_regval[1];
12683 thumb_insn_r->mem_rec_count = 1;
12686 else if (bit (thumb_insn_r->arm_insn, 11))
12688 /* Handle load from literal pool. */
12690 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12691 record_buf[0] = reg_src1;
12692 thumb_insn_r->reg_rec_count = 1;
12696 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12697 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12698 if ((3 == opcode2) && (!opcode3))
12700 /* Branch with exchange. */
12701 record_buf[0] = ARM_PS_REGNUM;
12702 thumb_insn_r->reg_rec_count = 1;
12706 /* Format 8; special data processing insns. */
12707 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12708 record_buf[0] = ARM_PS_REGNUM;
12709 record_buf[1] = reg_src1;
12710 thumb_insn_r->reg_rec_count = 2;
12715 /* Format 5; data processing insns. */
12716 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12717 if (bit (thumb_insn_r->arm_insn, 7))
12719 reg_src1 = reg_src1 + 8;
12721 record_buf[0] = ARM_PS_REGNUM;
12722 record_buf[1] = reg_src1;
12723 thumb_insn_r->reg_rec_count = 2;
12726 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12727 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12733 /* Handling opcode 001 insns. */
12736 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12738 struct regcache *reg_cache = thumb_insn_r->regcache;
12739 uint32_t record_buf[8], record_buf_mem[8];
12741 uint32_t reg_src1 = 0;
12742 uint32_t opcode = 0, immed_5 = 0;
12744 ULONGEST u_regval = 0;
12746 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12751 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12752 record_buf[0] = reg_src1;
12753 thumb_insn_r->reg_rec_count = 1;
12758 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12759 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12760 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12761 record_buf_mem[0] = 4;
12762 record_buf_mem[1] = u_regval + (immed_5 * 4);
12763 thumb_insn_r->mem_rec_count = 1;
12766 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12767 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12773 /* Handling opcode 100 insns. */
12776 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12778 struct regcache *reg_cache = thumb_insn_r->regcache;
12779 uint32_t record_buf[8], record_buf_mem[8];
12781 uint32_t reg_src1 = 0;
12782 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12784 ULONGEST u_regval = 0;
12786 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12791 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12792 record_buf[0] = reg_src1;
12793 thumb_insn_r->reg_rec_count = 1;
12795 else if (1 == opcode)
12798 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12799 record_buf[0] = reg_src1;
12800 thumb_insn_r->reg_rec_count = 1;
12802 else if (2 == opcode)
12805 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12806 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12807 record_buf_mem[0] = 4;
12808 record_buf_mem[1] = u_regval + (immed_8 * 4);
12809 thumb_insn_r->mem_rec_count = 1;
12811 else if (0 == opcode)
12814 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12815 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12816 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12817 record_buf_mem[0] = 2;
12818 record_buf_mem[1] = u_regval + (immed_5 * 2);
12819 thumb_insn_r->mem_rec_count = 1;
12822 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12823 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12829 /* Handling opcode 101 insns. */
12832 thumb_record_misc (insn_decode_record *thumb_insn_r)
12834 struct regcache *reg_cache = thumb_insn_r->regcache;
12836 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12837 uint32_t register_bits = 0, register_count = 0;
12838 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12839 uint32_t record_buf[24], record_buf_mem[48];
12842 ULONGEST u_regval = 0;
12844 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12845 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12846 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12851 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12852 while (register_bits)
12854 if (register_bits & 0x00000001)
12855 record_buf[index++] = register_count;
12856 register_bits = register_bits >> 1;
12859 record_buf[index++] = ARM_PS_REGNUM;
12860 record_buf[index++] = ARM_SP_REGNUM;
12861 thumb_insn_r->reg_rec_count = index;
12863 else if (10 == opcode2)
12866 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12867 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12868 while (register_bits)
12870 if (register_bits & 0x00000001)
12872 register_bits = register_bits >> 1;
12874 start_address = u_regval - \
12875 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12876 thumb_insn_r->mem_rec_count = register_count;
12877 while (register_count)
12879 record_buf_mem[(register_count * 2) - 1] = start_address;
12880 record_buf_mem[(register_count * 2) - 2] = 4;
12881 start_address = start_address + 4;
12884 record_buf[0] = ARM_SP_REGNUM;
12885 thumb_insn_r->reg_rec_count = 1;
12887 else if (0x1E == opcode1)
12890 /* Handle enhanced software breakpoint insn, BKPT. */
12891 /* CPSR is changed to be executed in ARM state, disabling normal
12892 interrupts, entering abort mode. */
12893 /* According to high vector configuration PC is set. */
12894 /* User hits breakpoint and type reverse, in that case, we need to go back with
12895 previous CPSR and Program Counter. */
12896 record_buf[0] = ARM_PS_REGNUM;
12897 record_buf[1] = ARM_LR_REGNUM;
12898 thumb_insn_r->reg_rec_count = 2;
12899 /* We need to save SPSR value, which is not yet done. */
12900 printf_unfiltered (_("Process record does not support instruction "
12901 "0x%0x at address %s.\n"),
12902 thumb_insn_r->arm_insn,
12903 paddress (thumb_insn_r->gdbarch,
12904 thumb_insn_r->this_addr));
12907 else if ((0 == opcode) || (1 == opcode))
12909 /* ADD(5), ADD(6). */
12910 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12911 record_buf[0] = reg_src1;
12912 thumb_insn_r->reg_rec_count = 1;
12914 else if (2 == opcode)
12916 /* ADD(7), SUB(4). */
12917 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12918 record_buf[0] = ARM_SP_REGNUM;
12919 thumb_insn_r->reg_rec_count = 1;
12922 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12923 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12929 /* Handling opcode 110 insns. */
12932 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12934 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12935 struct regcache *reg_cache = thumb_insn_r->regcache;
12937 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12938 uint32_t reg_src1 = 0;
12939 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12940 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12941 uint32_t record_buf[24], record_buf_mem[48];
12943 ULONGEST u_regval = 0;
12945 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12946 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12952 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12954 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12955 while (register_bits)
12957 if (register_bits & 0x00000001)
12958 record_buf[index++] = register_count;
12959 register_bits = register_bits >> 1;
12962 record_buf[index++] = reg_src1;
12963 thumb_insn_r->reg_rec_count = index;
12965 else if (0 == opcode2)
12967 /* It handles both STMIA. */
12968 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12970 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12971 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12972 while (register_bits)
12974 if (register_bits & 0x00000001)
12976 register_bits = register_bits >> 1;
12978 start_address = u_regval;
12979 thumb_insn_r->mem_rec_count = register_count;
12980 while (register_count)
12982 record_buf_mem[(register_count * 2) - 1] = start_address;
12983 record_buf_mem[(register_count * 2) - 2] = 4;
12984 start_address = start_address + 4;
12988 else if (0x1F == opcode1)
12990 /* Handle arm syscall insn. */
12991 if (tdep->arm_syscall_record != NULL)
12993 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12994 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12998 printf_unfiltered (_("no syscall record support\n"));
13003 /* B (1), conditional branch is automatically taken care in process_record,
13004 as PC is saved there. */
13006 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13007 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13013 /* Handling opcode 111 insns. */
13016 thumb_record_branch (insn_decode_record *thumb_insn_r)
13018 uint32_t record_buf[8];
13019 uint32_t bits_h = 0;
13021 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13023 if (2 == bits_h || 3 == bits_h)
13026 record_buf[0] = ARM_LR_REGNUM;
13027 thumb_insn_r->reg_rec_count = 1;
13029 else if (1 == bits_h)
13032 record_buf[0] = ARM_PS_REGNUM;
13033 record_buf[1] = ARM_LR_REGNUM;
13034 thumb_insn_r->reg_rec_count = 2;
13037 /* B(2) is automatically taken care in process_record, as PC is
13040 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13045 /* Handler for thumb2 load/store multiple instructions. */
13048 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13050 struct regcache *reg_cache = thumb2_insn_r->regcache;
13052 uint32_t reg_rn, op;
13053 uint32_t register_bits = 0, register_count = 0;
13054 uint32_t index = 0, start_address = 0;
13055 uint32_t record_buf[24], record_buf_mem[48];
13057 ULONGEST u_regval = 0;
13059 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13060 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13062 if (0 == op || 3 == op)
13064 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13066 /* Handle RFE instruction. */
13067 record_buf[0] = ARM_PS_REGNUM;
13068 thumb2_insn_r->reg_rec_count = 1;
13072 /* Handle SRS instruction after reading banked SP. */
13073 return arm_record_unsupported_insn (thumb2_insn_r);
13076 else if (1 == op || 2 == op)
13078 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13080 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13081 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13082 while (register_bits)
13084 if (register_bits & 0x00000001)
13085 record_buf[index++] = register_count;
13088 register_bits = register_bits >> 1;
13090 record_buf[index++] = reg_rn;
13091 record_buf[index++] = ARM_PS_REGNUM;
13092 thumb2_insn_r->reg_rec_count = index;
13096 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13097 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13098 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13099 while (register_bits)
13101 if (register_bits & 0x00000001)
13104 register_bits = register_bits >> 1;
13109 /* Start address calculation for LDMDB/LDMEA. */
13110 start_address = u_regval;
13114 /* Start address calculation for LDMDB/LDMEA. */
13115 start_address = u_regval - register_count * 4;
13118 thumb2_insn_r->mem_rec_count = register_count;
13119 while (register_count)
13121 record_buf_mem[register_count * 2 - 1] = start_address;
13122 record_buf_mem[register_count * 2 - 2] = 4;
13123 start_address = start_address + 4;
13126 record_buf[0] = reg_rn;
13127 record_buf[1] = ARM_PS_REGNUM;
13128 thumb2_insn_r->reg_rec_count = 2;
13132 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13134 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13136 return ARM_RECORD_SUCCESS;
13139 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13143 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13145 struct regcache *reg_cache = thumb2_insn_r->regcache;
13147 uint32_t reg_rd, reg_rn, offset_imm;
13148 uint32_t reg_dest1, reg_dest2;
13149 uint32_t address, offset_addr;
13150 uint32_t record_buf[8], record_buf_mem[8];
13151 uint32_t op1, op2, op3;
13154 ULONGEST u_regval[2];
13156 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13157 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13158 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13160 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13162 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13164 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13165 record_buf[0] = reg_dest1;
13166 record_buf[1] = ARM_PS_REGNUM;
13167 thumb2_insn_r->reg_rec_count = 2;
13170 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13172 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13173 record_buf[2] = reg_dest2;
13174 thumb2_insn_r->reg_rec_count = 3;
13179 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13180 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13182 if (0 == op1 && 0 == op2)
13184 /* Handle STREX. */
13185 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13186 address = u_regval[0] + (offset_imm * 4);
13187 record_buf_mem[0] = 4;
13188 record_buf_mem[1] = address;
13189 thumb2_insn_r->mem_rec_count = 1;
13190 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13191 record_buf[0] = reg_rd;
13192 thumb2_insn_r->reg_rec_count = 1;
13194 else if (1 == op1 && 0 == op2)
13196 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13197 record_buf[0] = reg_rd;
13198 thumb2_insn_r->reg_rec_count = 1;
13199 address = u_regval[0];
13200 record_buf_mem[1] = address;
13204 /* Handle STREXB. */
13205 record_buf_mem[0] = 1;
13206 thumb2_insn_r->mem_rec_count = 1;
13210 /* Handle STREXH. */
13211 record_buf_mem[0] = 2 ;
13212 thumb2_insn_r->mem_rec_count = 1;
13216 /* Handle STREXD. */
13217 address = u_regval[0];
13218 record_buf_mem[0] = 4;
13219 record_buf_mem[2] = 4;
13220 record_buf_mem[3] = address + 4;
13221 thumb2_insn_r->mem_rec_count = 2;
13226 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13228 if (bit (thumb2_insn_r->arm_insn, 24))
13230 if (bit (thumb2_insn_r->arm_insn, 23))
13231 offset_addr = u_regval[0] + (offset_imm * 4);
13233 offset_addr = u_regval[0] - (offset_imm * 4);
13235 address = offset_addr;
13238 address = u_regval[0];
13240 record_buf_mem[0] = 4;
13241 record_buf_mem[1] = address;
13242 record_buf_mem[2] = 4;
13243 record_buf_mem[3] = address + 4;
13244 thumb2_insn_r->mem_rec_count = 2;
13245 record_buf[0] = reg_rn;
13246 thumb2_insn_r->reg_rec_count = 1;
13250 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13252 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13254 return ARM_RECORD_SUCCESS;
13257 /* Handler for thumb2 data processing (shift register and modified immediate)
13261 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13263 uint32_t reg_rd, op;
13264 uint32_t record_buf[8];
13266 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13267 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13269 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13271 record_buf[0] = ARM_PS_REGNUM;
13272 thumb2_insn_r->reg_rec_count = 1;
13276 record_buf[0] = reg_rd;
13277 record_buf[1] = ARM_PS_REGNUM;
13278 thumb2_insn_r->reg_rec_count = 2;
13281 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13283 return ARM_RECORD_SUCCESS;
13286 /* Generic handler for thumb2 instructions which effect destination and PS
13290 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13293 uint32_t record_buf[8];
13295 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13297 record_buf[0] = reg_rd;
13298 record_buf[1] = ARM_PS_REGNUM;
13299 thumb2_insn_r->reg_rec_count = 2;
13301 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13303 return ARM_RECORD_SUCCESS;
13306 /* Handler for thumb2 branch and miscellaneous control instructions. */
13309 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13311 uint32_t op, op1, op2;
13312 uint32_t record_buf[8];
13314 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13315 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13316 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13318 /* Handle MSR insn. */
13319 if (!(op1 & 0x2) && 0x38 == op)
13323 /* CPSR is going to be changed. */
13324 record_buf[0] = ARM_PS_REGNUM;
13325 thumb2_insn_r->reg_rec_count = 1;
13329 arm_record_unsupported_insn(thumb2_insn_r);
13333 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13336 record_buf[0] = ARM_PS_REGNUM;
13337 record_buf[1] = ARM_LR_REGNUM;
13338 thumb2_insn_r->reg_rec_count = 2;
13341 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13343 return ARM_RECORD_SUCCESS;
13346 /* Handler for thumb2 store single data item instructions. */
13349 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13351 struct regcache *reg_cache = thumb2_insn_r->regcache;
13353 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13354 uint32_t address, offset_addr;
13355 uint32_t record_buf[8], record_buf_mem[8];
13358 ULONGEST u_regval[2];
13360 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13361 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13362 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13363 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13365 if (bit (thumb2_insn_r->arm_insn, 23))
13368 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13369 offset_addr = u_regval[0] + offset_imm;
13370 address = offset_addr;
13375 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13377 /* Handle STRB (register). */
13378 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13379 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13380 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13381 offset_addr = u_regval[1] << shift_imm;
13382 address = u_regval[0] + offset_addr;
13386 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13387 if (bit (thumb2_insn_r->arm_insn, 10))
13389 if (bit (thumb2_insn_r->arm_insn, 9))
13390 offset_addr = u_regval[0] + offset_imm;
13392 offset_addr = u_regval[0] - offset_imm;
13394 address = offset_addr;
13397 address = u_regval[0];
13403 /* Store byte instructions. */
13406 record_buf_mem[0] = 1;
13408 /* Store half word instructions. */
13411 record_buf_mem[0] = 2;
13413 /* Store word instructions. */
13416 record_buf_mem[0] = 4;
13420 gdb_assert_not_reached ("no decoding pattern found");
13424 record_buf_mem[1] = address;
13425 thumb2_insn_r->mem_rec_count = 1;
13426 record_buf[0] = reg_rn;
13427 thumb2_insn_r->reg_rec_count = 1;
13429 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13431 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13433 return ARM_RECORD_SUCCESS;
13436 /* Handler for thumb2 load memory hints instructions. */
13439 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13441 uint32_t record_buf[8];
13442 uint32_t reg_rt, reg_rn;
13444 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13445 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13447 if (ARM_PC_REGNUM != reg_rt)
13449 record_buf[0] = reg_rt;
13450 record_buf[1] = reg_rn;
13451 record_buf[2] = ARM_PS_REGNUM;
13452 thumb2_insn_r->reg_rec_count = 3;
13454 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13456 return ARM_RECORD_SUCCESS;
13459 return ARM_RECORD_FAILURE;
13462 /* Handler for thumb2 load word instructions. */
13465 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13467 uint32_t opcode1 = 0, opcode2 = 0;
13468 uint32_t record_buf[8];
13470 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13471 record_buf[1] = ARM_PS_REGNUM;
13472 thumb2_insn_r->reg_rec_count = 2;
13474 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13476 return ARM_RECORD_SUCCESS;
13479 /* Handler for thumb2 long multiply, long multiply accumulate, and
13480 divide instructions. */
13483 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13485 uint32_t opcode1 = 0, opcode2 = 0;
13486 uint32_t record_buf[8];
13487 uint32_t reg_src1 = 0;
13489 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13490 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13492 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13494 /* Handle SMULL, UMULL, SMULAL. */
13495 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13496 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13497 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13498 record_buf[2] = ARM_PS_REGNUM;
13499 thumb2_insn_r->reg_rec_count = 3;
13501 else if (1 == opcode1 || 3 == opcode2)
13503 /* Handle SDIV and UDIV. */
13504 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13505 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13506 record_buf[2] = ARM_PS_REGNUM;
13507 thumb2_insn_r->reg_rec_count = 3;
13510 return ARM_RECORD_FAILURE;
13512 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13514 return ARM_RECORD_SUCCESS;
13517 /* Record handler for thumb32 coprocessor instructions. */
13520 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13522 if (bit (thumb2_insn_r->arm_insn, 25))
13523 return arm_record_coproc_data_proc (thumb2_insn_r);
13525 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13528 /* Record handler for advance SIMD structure load/store instructions. */
13531 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13533 struct regcache *reg_cache = thumb2_insn_r->regcache;
13534 uint32_t l_bit, a_bit, b_bits;
13535 uint32_t record_buf[128], record_buf_mem[128];
13536 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13537 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13540 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13541 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13542 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13543 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13544 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13545 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13546 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13547 f_esize = 8 * f_ebytes;
13548 f_elem = 8 / f_ebytes;
13552 ULONGEST u_regval = 0;
13553 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13554 address = u_regval;
13559 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13561 if (b_bits == 0x07)
13563 else if (b_bits == 0x0a)
13565 else if (b_bits == 0x06)
13567 else if (b_bits == 0x02)
13572 for (index_r = 0; index_r < bf_regs; index_r++)
13574 for (index_e = 0; index_e < f_elem; index_e++)
13576 record_buf_mem[index_m++] = f_ebytes;
13577 record_buf_mem[index_m++] = address;
13578 address = address + f_ebytes;
13579 thumb2_insn_r->mem_rec_count += 1;
13584 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13586 if (b_bits == 0x09 || b_bits == 0x08)
13588 else if (b_bits == 0x03)
13593 for (index_r = 0; index_r < bf_regs; index_r++)
13594 for (index_e = 0; index_e < f_elem; index_e++)
13596 for (loop_t = 0; loop_t < 2; loop_t++)
13598 record_buf_mem[index_m++] = f_ebytes;
13599 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13600 thumb2_insn_r->mem_rec_count += 1;
13602 address = address + (2 * f_ebytes);
13606 else if ((b_bits & 0x0e) == 0x04)
13608 for (index_e = 0; index_e < f_elem; index_e++)
13610 for (loop_t = 0; loop_t < 3; loop_t++)
13612 record_buf_mem[index_m++] = f_ebytes;
13613 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13614 thumb2_insn_r->mem_rec_count += 1;
13616 address = address + (3 * f_ebytes);
13620 else if (!(b_bits & 0x0e))
13622 for (index_e = 0; index_e < f_elem; index_e++)
13624 for (loop_t = 0; loop_t < 4; loop_t++)
13626 record_buf_mem[index_m++] = f_ebytes;
13627 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13628 thumb2_insn_r->mem_rec_count += 1;
13630 address = address + (4 * f_ebytes);
13636 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13638 if (bft_size == 0x00)
13640 else if (bft_size == 0x01)
13642 else if (bft_size == 0x02)
13648 if (!(b_bits & 0x0b) || b_bits == 0x08)
13649 thumb2_insn_r->mem_rec_count = 1;
13651 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13652 thumb2_insn_r->mem_rec_count = 2;
13654 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13655 thumb2_insn_r->mem_rec_count = 3;
13657 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13658 thumb2_insn_r->mem_rec_count = 4;
13660 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13662 record_buf_mem[index_m] = f_ebytes;
13663 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13672 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13673 thumb2_insn_r->reg_rec_count = 1;
13675 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13676 thumb2_insn_r->reg_rec_count = 2;
13678 else if ((b_bits & 0x0e) == 0x04)
13679 thumb2_insn_r->reg_rec_count = 3;
13681 else if (!(b_bits & 0x0e))
13682 thumb2_insn_r->reg_rec_count = 4;
13687 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13688 thumb2_insn_r->reg_rec_count = 1;
13690 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13691 thumb2_insn_r->reg_rec_count = 2;
13693 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13694 thumb2_insn_r->reg_rec_count = 3;
13696 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13697 thumb2_insn_r->reg_rec_count = 4;
13699 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13700 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13704 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13706 record_buf[index_r] = reg_rn;
13707 thumb2_insn_r->reg_rec_count += 1;
13710 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13712 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13717 /* Decodes thumb2 instruction type and invokes its record handler. */
13719 static unsigned int
13720 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13722 uint32_t op, op1, op2;
13724 op = bit (thumb2_insn_r->arm_insn, 15);
13725 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13726 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13730 if (!(op2 & 0x64 ))
13732 /* Load/store multiple instruction. */
13733 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13735 else if (!((op2 & 0x64) ^ 0x04))
13737 /* Load/store (dual/exclusive) and table branch instruction. */
13738 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13740 else if (!((op2 & 0x20) ^ 0x20))
13742 /* Data-processing (shifted register). */
13743 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13745 else if (op2 & 0x40)
13747 /* Co-processor instructions. */
13748 return thumb2_record_coproc_insn (thumb2_insn_r);
13751 else if (op1 == 0x02)
13755 /* Branches and miscellaneous control instructions. */
13756 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13758 else if (op2 & 0x20)
13760 /* Data-processing (plain binary immediate) instruction. */
13761 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13765 /* Data-processing (modified immediate). */
13766 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13769 else if (op1 == 0x03)
13771 if (!(op2 & 0x71 ))
13773 /* Store single data item. */
13774 return thumb2_record_str_single_data (thumb2_insn_r);
13776 else if (!((op2 & 0x71) ^ 0x10))
13778 /* Advanced SIMD or structure load/store instructions. */
13779 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13781 else if (!((op2 & 0x67) ^ 0x01))
13783 /* Load byte, memory hints instruction. */
13784 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13786 else if (!((op2 & 0x67) ^ 0x03))
13788 /* Load halfword, memory hints instruction. */
13789 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13791 else if (!((op2 & 0x67) ^ 0x05))
13793 /* Load word instruction. */
13794 return thumb2_record_ld_word (thumb2_insn_r);
13796 else if (!((op2 & 0x70) ^ 0x20))
13798 /* Data-processing (register) instruction. */
13799 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13801 else if (!((op2 & 0x78) ^ 0x30))
13803 /* Multiply, multiply accumulate, abs diff instruction. */
13804 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13806 else if (!((op2 & 0x78) ^ 0x38))
13808 /* Long multiply, long multiply accumulate, and divide. */
13809 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13811 else if (op2 & 0x40)
13813 /* Co-processor instructions. */
13814 return thumb2_record_coproc_insn (thumb2_insn_r);
13821 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13822 and positive val on fauilure. */
13825 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13827 gdb_byte buf[insn_size];
13829 memset (&buf[0], 0, insn_size);
13831 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13833 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13835 gdbarch_byte_order_for_code (insn_record->gdbarch));
13839 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13841 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13845 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13846 uint32_t insn_size)
13849 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13850 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13852 arm_record_data_proc_misc_ld_str, /* 000. */
13853 arm_record_data_proc_imm, /* 001. */
13854 arm_record_ld_st_imm_offset, /* 010. */
13855 arm_record_ld_st_reg_offset, /* 011. */
13856 arm_record_ld_st_multiple, /* 100. */
13857 arm_record_b_bl, /* 101. */
13858 arm_record_asimd_vfp_coproc, /* 110. */
13859 arm_record_coproc_data_proc /* 111. */
13862 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13863 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13865 thumb_record_shift_add_sub, /* 000. */
13866 thumb_record_add_sub_cmp_mov, /* 001. */
13867 thumb_record_ld_st_reg_offset, /* 010. */
13868 thumb_record_ld_st_imm_offset, /* 011. */
13869 thumb_record_ld_st_stack, /* 100. */
13870 thumb_record_misc, /* 101. */
13871 thumb_record_ldm_stm_swi, /* 110. */
13872 thumb_record_branch /* 111. */
13875 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13876 uint32_t insn_id = 0;
13878 if (extract_arm_insn (arm_record, insn_size))
13882 printf_unfiltered (_("Process record: error reading memory at "
13883 "addr %s len = %d.\n"),
13884 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13888 else if (ARM_RECORD == record_type)
13890 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13891 insn_id = bits (arm_record->arm_insn, 25, 27);
13892 ret = arm_record_extension_space (arm_record);
13893 /* If this insn has fallen into extension space
13894 then we need not decode it anymore. */
13895 if (ret != -1 && !INSN_RECORDED(arm_record))
13897 ret = arm_handle_insn[insn_id] (arm_record);
13900 else if (THUMB_RECORD == record_type)
13902 /* As thumb does not have condition codes, we set negative. */
13903 arm_record->cond = -1;
13904 insn_id = bits (arm_record->arm_insn, 13, 15);
13905 ret = thumb_handle_insn[insn_id] (arm_record);
13907 else if (THUMB2_RECORD == record_type)
13909 /* As thumb does not have condition codes, we set negative. */
13910 arm_record->cond = -1;
13912 /* Swap first half of 32bit thumb instruction with second half. */
13913 arm_record->arm_insn
13914 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13916 insn_id = thumb2_record_decode_insn_handler (arm_record);
13918 if (insn_id != ARM_RECORD_SUCCESS)
13920 arm_record_unsupported_insn (arm_record);
13926 /* Throw assertion. */
13927 gdb_assert_not_reached ("not a valid instruction, could not decode");
13934 /* Cleans up local record registers and memory allocations. */
13937 deallocate_reg_mem (insn_decode_record *record)
13939 xfree (record->arm_regs);
13940 xfree (record->arm_mems);
13944 /* Parse the current instruction and record the values of the registers and
13945 memory that will be changed in current instruction to record_arch_list".
13946 Return -1 if something is wrong. */
13949 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13950 CORE_ADDR insn_addr)
13953 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13954 uint32_t no_of_rec = 0;
13955 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13956 ULONGEST t_bit = 0, insn_id = 0;
13958 ULONGEST u_regval = 0;
13960 insn_decode_record arm_record;
13962 memset (&arm_record, 0, sizeof (insn_decode_record));
13963 arm_record.regcache = regcache;
13964 arm_record.this_addr = insn_addr;
13965 arm_record.gdbarch = gdbarch;
13968 if (record_debug > 1)
13970 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13972 paddress (gdbarch, arm_record.this_addr));
13975 if (extract_arm_insn (&arm_record, 2))
13979 printf_unfiltered (_("Process record: error reading memory at "
13980 "addr %s len = %d.\n"),
13981 paddress (arm_record.gdbarch,
13982 arm_record.this_addr), 2);
13987 /* Check the insn, whether it is thumb or arm one. */
13989 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13990 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13993 if (!(u_regval & t_bit))
13995 /* We are decoding arm insn. */
13996 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14000 insn_id = bits (arm_record.arm_insn, 11, 15);
14001 /* is it thumb2 insn? */
14002 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14004 ret = decode_insn (&arm_record, THUMB2_RECORD,
14005 THUMB2_INSN_SIZE_BYTES);
14009 /* We are decoding thumb insn. */
14010 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
14016 /* Record registers. */
14017 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14018 if (arm_record.arm_regs)
14020 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14022 if (record_full_arch_list_add_reg
14023 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14027 /* Record memories. */
14028 if (arm_record.arm_mems)
14030 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14032 if (record_full_arch_list_add_mem
14033 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14034 arm_record.arm_mems[no_of_rec].len))
14039 if (record_full_arch_list_add_end ())
14044 deallocate_reg_mem (&arm_record);