* tree.h (edge_def): Remove.
+ * bt-load.c, cfgexpand.c, dwarf2out.c, emit-rtl.c, expr.c,
+ function.c, global.c, lcm.c, loop-invariant.c, optabs.c,
+ reorg.c, resource.c, tree-ssa-loop-ivopts.c, value-prof.c: Use
+ JUMP_P, LABEL_P, REG_P, MEM_P, NONJUMP_INSN_P, and INSN_P
+ where appropriate.
+
2005-04-28 Joseph S. Myers <joseph@codesourcery.com>
* c-typeck.c (build_compound_expr): Correct logic in last change.
def->bb, user->bb,
(flag_btr_bb_exclusive
|| user->insn != BB_END (def->bb)
- || GET_CODE (user->insn) != JUMP_INSN));
+ || !JUMP_P (user->insn)));
}
else
{
def->bb, user->bb,
(flag_btr_bb_exclusive
|| user->insn != BB_END (def->bb)
- || GET_CODE (user->insn) != JUMP_INSN));
+ || !JUMP_P (user->insn)));
btr = choose_btr (combined_btrs_live);
if (btr != -1)
if (profile_status == PROFILE_ABSENT)
return;
for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
- if (GET_CODE (last) == JUMP_INSN)
+ if (JUMP_P (last))
{
/* It is common to emit condjump-around-jump sequence when we don't know
how to reverse the conditional. Special case this. */
if (!any_condjump_p (last)
- || GET_CODE (NEXT_INSN (last)) != JUMP_INSN
+ || !JUMP_P (NEXT_INSN (last))
|| !simplejump_p (NEXT_INSN (last))
- || GET_CODE (NEXT_INSN (NEXT_INSN (last))) != BARRIER
- || GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) != CODE_LABEL
+ || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
+ || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
|| NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
goto failed;
gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
REG_NOTES (last));
return;
}
- if (!last || GET_CODE (last) != JUMP_INSN || !any_condjump_p (last))
+ if (!last || !JUMP_P (last) || !any_condjump_p (last))
goto failed;
gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
REG_NOTES (last)
src = SET_SRC (expr);
dest = SET_DEST (expr);
- if (GET_CODE (src) == REG)
+ if (REG_P (src))
{
rtx rsi = reg_saved_in (src);
if (rsi)
continue;
else
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
continue;
else
{
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0,
XVECLEN (PATTERN (insn), 0) - 1);
the original object if it spans an even number of hard regs.
This special case is important for SCmode on 64-bit platforms
where the natural size of floating-point regs is 32-bit. */
- || (GET_CODE (cplx) == REG
+ || (REG_P (cplx)
&& REGNO (cplx) < FIRST_PSEUDO_REGISTER
&& hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
/* For MEMs we always try to make a "subreg", that is to adjust
the original object if it spans an even number of hard regs.
This special case is important for SCmode on 64-bit platforms
where the natural size of floating-point regs is 32-bit. */
- || (GET_CODE (cplx) == REG
+ || (REG_P (cplx)
&& REGNO (cplx) < FIRST_PSEUDO_REGISTER
&& hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
/* For MEMs we always try to make a "subreg", that is to adjust
/* If the DECL isn't in memory, then the DECL wasn't properly
marked TREE_ADDRESSABLE, which will be either a front-end
or a tree optimizer bug. */
- gcc_assert (GET_CODE (result) == MEM);
+ gcc_assert (MEM_P (result));
result = XEXP (result, 0);
/* ??? Is this needed anymore? */
/* Scan through all the insns, instantiating every virtual register still
present. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)
+ if (INSN_P (insn))
{
instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
if (INSN_DELETED_P (insn))
continue;
instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
NULL_RTX, 0);
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
- if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
+ if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
{
regno = REGNO (*x);
if (bitmap_bit_p (bb_info->killed, regno)
insert the final mode switch before the return value copy
to its hard register. */
if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
- && GET_CODE ((last_insn = BB_END (src_bb))) == INSN
+ && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
&& GET_CODE (PATTERN (last_insn)) == USE
&& GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
{
return;
dest = SET_DEST (set);
- if (GET_CODE (dest) != REG
+ if (!REG_P (dest)
|| HARD_REGISTER_P (dest))
simple = false;
/* Ensure that if old_val == mem, that we're not comparing
against an old value. */
- if (GET_CODE (old_val) == MEM)
+ if (MEM_P (old_val))
old_val = force_reg (mode, old_val);
subtarget = expand_val_compare_and_swap_1 (mem, old_val, new_val,
/* If the basic block reorder pass moves the return insn to
some other place try to locate it again and put our
end_of_function_label there. */
- while (insn && ! (GET_CODE (insn) == JUMP_INSN
+ while (insn && ! (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == RETURN)))
insn = PREV_INSN (insn);
if (insn)
static bool
return_insn_p (rtx insn)
{
- if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
+ if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
return true;
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
return false;
end_sequence ();
cost = seq_cost (seq);
- if (GET_CODE (rslt) == MEM)
+ if (MEM_P (rslt))
cost += address_cost (XEXP (rslt, 0), TYPE_MODE (type));
return cost;
{
rtx *mem = ret;
- if (GET_CODE (*expr) == MEM)
+ if (MEM_P (*expr))
{
*mem = *expr;
return 1;
histogram_value hist;
/* It only makes sense to look for memory references in ordinary insns. */
- if (GET_CODE (insn) != INSN)
+ if (!NONJUMP_INSN_P (insn))
return false;
if (!find_mem_reference (insn, &mem, &write))