1 /* Common code for ARM software single stepping support.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "common-defs.h"
22 #include "common-regcache.h"
24 #include "arm-get-next-pcs.h"
26 /* See arm-get-next-pcs.h. */
29 arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
32 int byte_order_for_code,
33 int has_thumb2_breakpoint,
34 struct regcache *regcache)
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
40 self->regcache = regcache;
43 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
48 static VEC (CORE_ADDR) *
49 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
51 int byte_order_for_code = self->byte_order_for_code;
52 CORE_ADDR breaks[2] = {-1, -1};
53 CORE_ADDR pc = regcache_read_pc (self->regcache);
55 unsigned short insn1, insn2;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61 VEC (CORE_ADDR) *next_pcs = NULL;
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
73 if (thumb_insn_size (insn1) != 4)
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
79 if (!((insn1 & 0xfff0) == 0xe850
80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
90 if (thumb_insn_size (insn1) != 4)
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
97 if (last_breakpoint > 0)
98 return NULL; /* More than one conditional branch found,
99 fallback to the standard code. */
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
109 else if (thumb_instruction_changes_pc (insn1))
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
125 int sign, j1, j2, imm1, imm2;
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
137 if (last_breakpoint > 0)
138 return 0; /* More than one conditional branch found,
139 fallback to the standard code. */
141 breaks[1] = loc + offset;
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
163 /* Insert a breakpoint right after the end of the atomic sequence. */
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
173 /* Adds the breakpoints to the list to be inserted. */
174 for (index = 0; index <= last_breakpoint; index++)
175 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (breaks[index]));
180 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
181 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
182 is found, attempt to step through it. The end of the sequence address is
183 added to the next_pcs list. */
185 static VEC (CORE_ADDR) *
186 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
188 int byte_order_for_code = self->byte_order_for_code;
189 CORE_ADDR breaks[2] = {-1, -1};
190 CORE_ADDR pc = regcache_read_pc (self->regcache);
195 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
196 const int atomic_sequence_length = 16; /* Instruction sequence length. */
197 VEC (CORE_ADDR) *next_pcs = NULL;
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
205 if ((insn & 0xff9000f0) != 0xe1900090)
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
221 if (last_breakpoint > 0)
222 return NULL; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
225 breaks[1] = BranchDest (loc - 4, insn);
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
244 /* Insert a breakpoint right after the end of the atomic sequence. */
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
254 /* Adds the breakpoints to the list to be inserted. */
255 for (index = 0; index <= last_breakpoint; index++)
256 VEC_safe_push (CORE_ADDR, next_pcs, breaks[index]);
261 /* Find the next possible PCs for thumb mode. */
263 static VEC (CORE_ADDR) *
264 thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
266 int byte_order = self->byte_order;
267 int byte_order_for_code = self->byte_order_for_code;
268 CORE_ADDR pc = regcache_read_pc (self->regcache);
269 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
270 unsigned short inst1;
271 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
272 unsigned long offset;
273 ULONGEST status, itstate;
274 struct regcache *regcache = self->regcache;
275 VEC (CORE_ADDR) * next_pcs = NULL;
277 nextpc = MAKE_THUMB_ADDR (nextpc);
278 pc_val = MAKE_THUMB_ADDR (pc_val);
280 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
282 /* Thumb-2 conditional execution support. There are eight bits in
283 the CPSR which describe conditional execution state. Once
284 reconstructed (they're in a funny order), the low five bits
285 describe the low bit of the condition for each instruction and
286 how many instructions remain. The high three bits describe the
287 base condition. One of the low four bits will be set if an IT
288 block is active. These bits read as zero on earlier
290 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
291 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
293 /* If-Then handling. On GNU/Linux, where this routine is used, we
294 use an undefined instruction as a breakpoint. Unlike BKPT, IT
295 can disable execution of the undefined instruction. So we might
296 miss the breakpoint if we set it on a skipped conditional
297 instruction. Because conditional instructions can change the
298 flags, affecting the execution of further instructions, we may
299 need to set two breakpoints. */
301 if (self->has_thumb2_breakpoint)
303 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
305 /* An IT instruction. Because this instruction does not
306 modify the flags, we can accurately predict the next
307 executed instruction. */
308 itstate = inst1 & 0x00ff;
309 pc += thumb_insn_size (inst1);
311 while (itstate != 0 && ! condition_true (itstate >> 4, status))
313 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
314 pc += thumb_insn_size (inst1);
315 itstate = thumb_advance_itstate (itstate);
318 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
321 else if (itstate != 0)
323 /* We are in a conditional block. Check the condition. */
324 if (! condition_true (itstate >> 4, status))
326 /* Advance to the next executed instruction. */
327 pc += thumb_insn_size (inst1);
328 itstate = thumb_advance_itstate (itstate);
330 while (itstate != 0 && ! condition_true (itstate >> 4, status))
332 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
334 pc += thumb_insn_size (inst1);
335 itstate = thumb_advance_itstate (itstate);
338 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
341 else if ((itstate & 0x0f) == 0x08)
343 /* This is the last instruction of the conditional
344 block, and it is executed. We can handle it normally
345 because the following instruction is not conditional,
346 and we must handle it normally because it is
347 permitted to branch. Fall through. */
353 /* There are conditional instructions after this one.
354 If this instruction modifies the flags, then we can
355 not predict what the next executed instruction will
356 be. Fortunately, this instruction is architecturally
357 forbidden to branch; we know it will fall through.
358 Start by skipping past it. */
359 pc += thumb_insn_size (inst1);
360 itstate = thumb_advance_itstate (itstate);
362 /* Set a breakpoint on the following instruction. */
363 gdb_assert ((itstate & 0x0f) != 0);
364 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
366 cond_negated = (itstate >> 4) & 1;
368 /* Skip all following instructions with the same
369 condition. If there is a later instruction in the IT
370 block with the opposite condition, set the other
371 breakpoint there. If not, then set a breakpoint on
372 the instruction after the IT block. */
375 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
376 pc += thumb_insn_size (inst1);
377 itstate = thumb_advance_itstate (itstate);
379 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
381 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
387 else if (itstate & 0x0f)
389 /* We are in a conditional block. Check the condition. */
390 int cond = itstate >> 4;
392 if (! condition_true (cond, status))
394 /* Advance to the next instruction. All the 32-bit
395 instructions share a common prefix. */
396 VEC_safe_push (CORE_ADDR, next_pcs,
397 MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
402 /* Otherwise, handle the instruction normally. */
405 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
409 /* Fetch the saved PC from the stack. It's stored above
410 all of the other registers. */
411 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
412 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
413 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
415 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
417 unsigned long cond = bits (inst1, 8, 11);
418 if (cond == 0x0f) /* 0x0f = SWI */
420 nextpc = self->ops->syscall_next_pc (self, pc);
422 else if (cond != 0x0f && condition_true (cond, status))
423 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
425 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
427 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
429 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
431 unsigned short inst2;
432 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
434 /* Default to the next instruction. */
436 nextpc = MAKE_THUMB_ADDR (nextpc);
438 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
440 /* Branches and miscellaneous control instructions. */
442 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
445 int j1, j2, imm1, imm2;
447 imm1 = sbits (inst1, 0, 10);
448 imm2 = bits (inst2, 0, 10);
449 j1 = bit (inst2, 13);
450 j2 = bit (inst2, 11);
452 offset = ((imm1 << 12) + (imm2 << 1));
453 offset ^= ((!j2) << 22) | ((!j1) << 23);
455 nextpc = pc_val + offset;
456 /* For BLX make sure to clear the low bits. */
457 if (bit (inst2, 12) == 0)
458 nextpc = nextpc & 0xfffffffc;
460 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
462 /* SUBS PC, LR, #imm8. */
463 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
464 nextpc -= inst2 & 0x00ff;
466 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
468 /* Conditional branch. */
469 if (condition_true (bits (inst1, 6, 9), status))
471 int sign, j1, j2, imm1, imm2;
473 sign = sbits (inst1, 10, 10);
474 imm1 = bits (inst1, 0, 5);
475 imm2 = bits (inst2, 0, 10);
476 j1 = bit (inst2, 13);
477 j2 = bit (inst2, 11);
479 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
480 offset += (imm1 << 12) + (imm2 << 1);
482 nextpc = pc_val + offset;
486 else if ((inst1 & 0xfe50) == 0xe810)
488 /* Load multiple or RFE. */
489 int rn, offset, load_pc = 1;
491 rn = bits (inst1, 0, 3);
492 if (bit (inst1, 7) && !bit (inst1, 8))
495 if (!bit (inst2, 15))
497 offset = bitcount (inst2) * 4 - 4;
499 else if (!bit (inst1, 7) && bit (inst1, 8))
502 if (!bit (inst2, 15))
506 else if (bit (inst1, 7) && bit (inst1, 8))
511 else if (!bit (inst1, 7) && !bit (inst1, 8))
521 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
522 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
525 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
527 /* MOV PC or MOVS PC. */
528 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
529 nextpc = MAKE_THUMB_ADDR (nextpc);
531 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
537 rn = bits (inst1, 0, 3);
538 base = regcache_raw_get_unsigned (regcache, rn);
539 if (rn == ARM_PC_REGNUM)
541 base = (base + 4) & ~(CORE_ADDR) 0x3;
543 base += bits (inst2, 0, 11);
545 base -= bits (inst2, 0, 11);
547 else if (bit (inst1, 7))
548 base += bits (inst2, 0, 11);
549 else if (bit (inst2, 11))
554 base += bits (inst2, 0, 7);
556 base -= bits (inst2, 0, 7);
559 else if ((inst2 & 0x0fc0) == 0x0000)
561 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
562 base += regcache_raw_get_unsigned (regcache, rm) << shift;
570 = self->ops->read_mem_uint (base, 4, byte_order);
572 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
575 CORE_ADDR tbl_reg, table, offset, length;
577 tbl_reg = bits (inst1, 0, 3);
579 table = pc + 4; /* Regcache copy of PC isn't right yet. */
581 table = regcache_raw_get_unsigned (regcache, tbl_reg);
583 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
584 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
585 nextpc = pc_val + length;
587 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
590 CORE_ADDR tbl_reg, table, offset, length;
592 tbl_reg = bits (inst1, 0, 3);
594 table = pc + 4; /* Regcache copy of PC isn't right yet. */
596 table = regcache_raw_get_unsigned (regcache, tbl_reg);
598 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
599 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
600 nextpc = pc_val + length;
603 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
605 if (bits (inst1, 3, 6) == 0x0f)
606 nextpc = UNMAKE_THUMB_ADDR (pc_val);
608 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
610 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
612 if (bits (inst1, 3, 6) == 0x0f)
615 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
617 nextpc = MAKE_THUMB_ADDR (nextpc);
619 else if ((inst1 & 0xf500) == 0xb100)
622 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
623 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
625 if (bit (inst1, 11) && reg != 0)
626 nextpc = pc_val + imm;
627 else if (!bit (inst1, 11) && reg == 0)
628 nextpc = pc_val + imm;
631 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
636 /* Get the raw next possible addresses. PC in next_pcs is the current program
637 counter, which is assumed to be executing in ARM mode.
639 The values returned have the execution state of the next instruction
640 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
641 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
642 address in GDB and arm_addr_bits_remove in GDBServer. */
644 static VEC (CORE_ADDR) *
645 arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
647 int byte_order = self->byte_order;
648 int byte_order_for_code = self->byte_order_for_code;
649 unsigned long pc_val;
650 unsigned long this_instr = 0;
651 unsigned long status;
653 struct regcache *regcache = self->regcache;
654 CORE_ADDR pc = regcache_read_pc (self->regcache);
655 VEC (CORE_ADDR) *next_pcs = NULL;
657 pc_val = (unsigned long) pc;
658 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
660 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
661 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
663 if (bits (this_instr, 28, 31) == INST_NV)
664 switch (bits (this_instr, 24, 27))
669 /* Branch with Link and change to Thumb. */
670 nextpc = BranchDest (pc, this_instr);
671 nextpc |= bit (this_instr, 24) << 1;
672 nextpc = MAKE_THUMB_ADDR (nextpc);
678 /* Coprocessor register transfer. */
679 if (bits (this_instr, 12, 15) == 15)
680 error (_("Invalid update to pc in instruction"));
683 else if (condition_true (bits (this_instr, 28, 31), status))
685 switch (bits (this_instr, 24, 27))
688 case 0x1: /* data processing */
692 unsigned long operand1, operand2, result = 0;
696 if (bits (this_instr, 12, 15) != 15)
699 if (bits (this_instr, 22, 25) == 0
700 && bits (this_instr, 4, 7) == 9) /* multiply */
701 error (_("Invalid update to pc in instruction"));
703 /* BX <reg>, BLX <reg> */
704 if (bits (this_instr, 4, 27) == 0x12fff1
705 || bits (this_instr, 4, 27) == 0x12fff3)
707 rn = bits (this_instr, 0, 3);
708 nextpc = ((rn == ARM_PC_REGNUM)
710 : regcache_raw_get_unsigned (regcache, rn));
712 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
716 /* Multiply into PC. */
717 c = (status & FLAG_C) ? 1 : 0;
718 rn = bits (this_instr, 16, 19);
719 operand1 = ((rn == ARM_PC_REGNUM)
721 : regcache_raw_get_unsigned (regcache, rn));
723 if (bit (this_instr, 25))
725 unsigned long immval = bits (this_instr, 0, 7);
726 unsigned long rotate = 2 * bits (this_instr, 8, 11);
727 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
730 else /* operand 2 is a shifted register. */
731 operand2 = shifted_reg_val (regcache, this_instr, c,
734 switch (bits (this_instr, 21, 24))
737 result = operand1 & operand2;
741 result = operand1 ^ operand2;
745 result = operand1 - operand2;
749 result = operand2 - operand1;
753 result = operand1 + operand2;
757 result = operand1 + operand2 + c;
761 result = operand1 - operand2 + c;
765 result = operand2 - operand1 + c;
771 case 0xb: /* tst, teq, cmp, cmn */
772 result = (unsigned long) nextpc;
776 result = operand1 | operand2;
780 /* Always step into a function. */
785 result = operand1 & ~operand2;
792 nextpc = self->ops->addr_bits_remove (self, result);
797 case 0x5: /* data transfer */
800 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
802 /* Media instructions and architecturally undefined
807 if (bit (this_instr, 20))
810 if (bits (this_instr, 12, 15) == 15)
816 if (bit (this_instr, 22))
817 error (_("Invalid update to pc in instruction"));
819 /* byte write to PC */
820 rn = bits (this_instr, 16, 19);
821 base = ((rn == ARM_PC_REGNUM)
823 : regcache_raw_get_unsigned (regcache, rn));
825 if (bit (this_instr, 24))
828 int c = (status & FLAG_C) ? 1 : 0;
829 unsigned long offset =
830 (bit (this_instr, 25)
831 ? shifted_reg_val (regcache, this_instr, c,
833 : bits (this_instr, 0, 11));
835 if (bit (this_instr, 23))
841 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
848 case 0x9: /* block transfer */
849 if (bit (this_instr, 20))
852 if (bit (this_instr, 15))
856 CORE_ADDR rn_val_offset = 0;
858 = regcache_raw_get_unsigned (regcache,
859 bits (this_instr, 16, 19));
861 if (bit (this_instr, 23))
864 unsigned long reglist = bits (this_instr, 0, 14);
865 offset = bitcount (reglist) * 4;
866 if (bit (this_instr, 24)) /* pre */
869 else if (bit (this_instr, 24))
872 rn_val_offset = rn_val + offset;
873 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
879 case 0xb: /* branch & link */
880 case 0xa: /* branch */
882 nextpc = BranchDest (pc, this_instr);
888 case 0xe: /* coproc ops */
892 nextpc = self->ops->syscall_next_pc (self, pc);
897 error (_("Bad bit-field extraction"));
902 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
906 /* See arm-get-next-pcs.h. */
909 arm_get_next_pcs (struct arm_get_next_pcs *self)
911 VEC (CORE_ADDR) *next_pcs = NULL;
913 if (self->ops->is_thumb (self))
915 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
916 if (next_pcs == NULL)
917 next_pcs = thumb_get_next_pcs_raw (self);
921 next_pcs = arm_deal_with_atomic_sequence_raw (self);
922 if (next_pcs == NULL)
923 next_pcs = arm_get_next_pcs_raw (self);
926 if (self->ops->fixup != NULL)
931 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, nextpc); i++)
933 nextpc = self->ops->fixup (self, nextpc);
934 VEC_replace (CORE_ADDR, next_pcs, i, nextpc);