2014-08-22 David Malcolm <dmalcolm@redhat.com>
+ * ifcvt.c (count_bb_insns): Strengthen local "insn" from rtx to
+ rtx_insn *.
+ (cheap_bb_rtx_cost_p): Likewise.
+ (first_active_insn): Likewise for return type and local "insn".
+ (last_active_insn): Likewise for return type and locals "insn",
+ "head".
+ (struct noce_if_info): Likewise for fields "jump", "insn_a",
+ "insn_b".
+ (end_ifcvt_sequence): Likewise for return type and locals "insn",
+ "seq".
+ (noce_try_move): Likewise for local "seq".
+ (noce_try_store_flag): Likewise.
+ (noce_try_store_flag_constants): Likewise.
+ (noce_try_addcc): Likewise.
+ (noce_try_store_flag_mask): Likewise.
+ (noce_try_cmove): Likewise.
+ (noce_try_minmax): Likewise.
+ (noce_try_abs): Likewise.
+ (noce_try_sign_mask): Likewise.
+ (noce_try_bitop): Likewise.
+ (noce_can_store_speculate_p): Likewise for local "insn".
+ (noce_process_if_block): Likewise for locals "insn_a", "insn_b",
+ seq".
+ (check_cond_move_block): Likewise for local "insn".
+ (cond_move_convert_if_block): Likewise.
+ (cond_move_process_if_block): Likewise for locals "seq",
+ "loc_insn".
+ (noce_find_if_block): Likewise for local "jump".
+ (merge_if_block): Likewise for local "last".
+ (block_jumps_and_fallthru_p): Likewise for locals "insn", "end".
+ (find_cond_trap): Likewise for locals "trap", "jump", "newjump".
+ (block_has_only_trap): Likewise for return type and local "trap".
+ (find_if_case_1): Likewise for local "jump".
+ (dead_or_predicable): Likewise for locals "head", "end", "jump",
+ "insn".
+
+2014-08-22 David Malcolm <dmalcolm@redhat.com>
+
* hw-doloop.h (struct hwloop_info_d): Strengthen fields
"last_insn", "loop_end" from rtx to rtx_insn *.
/* Forward references. */
static int count_bb_insns (const_basic_block);
static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
-static rtx first_active_insn (basic_block);
-static rtx last_active_insn (basic_block, int);
+static rtx_insn *first_active_insn (basic_block);
+static rtx_insn *last_active_insn (basic_block, int);
static rtx find_active_insn_before (basic_block, rtx);
static rtx find_active_insn_after (basic_block, rtx);
static basic_block block_fallthru (basic_block);
static int dead_or_predicable (basic_block, basic_block, basic_block,
edge, int);
static void noce_emit_move_insn (rtx, rtx);
-static rtx block_has_only_trap (basic_block);
+static rtx_insn *block_has_only_trap (basic_block);
\f
/* Count the number of non-jump active insns in BB. */
count_bb_insns (const_basic_block bb)
{
int count = 0;
- rtx insn = BB_HEAD (bb);
+ rtx_insn *insn = BB_HEAD (bb);
while (1)
{
cheap_bb_rtx_cost_p (const_basic_block bb, int scale, int max_cost)
{
int count = 0;
- rtx insn = BB_HEAD (bb);
+ rtx_insn *insn = BB_HEAD (bb);
bool speed = optimize_bb_for_speed_p (bb);
/* Set scale to REG_BR_PROB_BASE to void the identical scaling
/* Return the first non-jump active insn in the basic block. */
-static rtx
+static rtx_insn *
first_active_insn (basic_block bb)
{
- rtx insn = BB_HEAD (bb);
+ rtx_insn *insn = BB_HEAD (bb);
if (LABEL_P (insn))
{
if (insn == BB_END (bb))
- return NULL_RTX;
+ return NULL;
insn = NEXT_INSN (insn);
}
while (NOTE_P (insn) || DEBUG_INSN_P (insn))
{
if (insn == BB_END (bb))
- return NULL_RTX;
+ return NULL;
insn = NEXT_INSN (insn);
}
if (JUMP_P (insn))
- return NULL_RTX;
+ return NULL;
return insn;
}
/* Return the last non-jump active (non-jump) insn in the basic block. */
-static rtx
+static rtx_insn *
last_active_insn (basic_block bb, int skip_use_p)
{
- rtx insn = BB_END (bb);
- rtx head = BB_HEAD (bb);
+ rtx_insn *insn = BB_END (bb);
+ rtx_insn *head = BB_HEAD (bb);
while (NOTE_P (insn)
|| JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == USE))
{
if (insn == head)
- return NULL_RTX;
+ return NULL;
insn = PREV_INSN (insn);
}
if (LABEL_P (insn))
- return NULL_RTX;
+ return NULL;
return insn;
}
basic_block test_bb, then_bb, else_bb, join_bb;
/* The jump that ends TEST_BB. */
- rtx jump;
+ rtx_insn *jump;
/* The jump condition. */
rtx cond;
COND_EARLIEST, or NULL_RTX. In the former case, the insn
operands are still valid, as if INSN_B was moved down below
the jump. */
- rtx insn_a, insn_b;
+ rtx_insn *insn_a, *insn_b;
/* The SET_SRC of INSN_A and INSN_B. */
rtx a, b;
that are instructions are unshared, recognizable non-jump insns.
On failure, this function returns a NULL_RTX. */
-static rtx
+static rtx_insn *
end_ifcvt_sequence (struct noce_if_info *if_info)
{
- rtx insn;
- rtx seq = get_insns ();
+ rtx_insn *insn;
+ rtx_insn *seq = get_insns ();
set_used_flags (if_info->x);
set_used_flags (if_info->cond);
for (insn = seq; insn; insn = NEXT_INSN (insn))
if (JUMP_P (insn)
|| recog_memoized (insn) == -1)
- return NULL_RTX;
+ return NULL;
return seq;
}
{
rtx cond = if_info->cond;
enum rtx_code code = GET_CODE (cond);
- rtx y, seq;
+ rtx y;
+ rtx_insn *seq;
if (code != NE && code != EQ)
return FALSE;
noce_try_store_flag (struct noce_if_info *if_info)
{
int reversep;
- rtx target, seq;
+ rtx target;
+ rtx_insn *seq;
if (CONST_INT_P (if_info->b)
&& INTVAL (if_info->b) == STORE_FLAG_VALUE
static int
noce_try_store_flag_constants (struct noce_if_info *if_info)
{
- rtx target, seq;
+ rtx target;
+ rtx_insn *seq;
int reversep;
HOST_WIDE_INT itrue, ifalse, diff, tmp;
int normalize, can_reverse;
static int
noce_try_addcc (struct noce_if_info *if_info)
{
- rtx target, seq;
+ rtx target;
+ rtx_insn *seq;
int subtract, normalize;
if (GET_CODE (if_info->a) == PLUS
static int
noce_try_store_flag_mask (struct noce_if_info *if_info)
{
- rtx target, seq;
+ rtx target;
+ rtx_insn *seq;
int reversep;
reversep = 0;
noce_try_cmove (struct noce_if_info *if_info)
{
enum rtx_code code;
- rtx target, seq;
+ rtx target;
+ rtx_insn *seq;
if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
&& (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
static int
noce_try_minmax (struct noce_if_info *if_info)
{
- rtx cond, earliest, target, seq;
+ rtx cond, earliest, target;
+ rtx_insn *seq;
enum rtx_code code, op;
int unsignedp;
static int
noce_try_abs (struct noce_if_info *if_info)
{
- rtx cond, earliest, target, seq, a, b, c;
+ rtx cond, earliest, target, a, b, c;
+ rtx_insn *seq;
int negate;
bool one_cmpl = false;
static int
noce_try_sign_mask (struct noce_if_info *if_info)
{
- rtx cond, t, m, c, seq;
+ rtx cond, t, m, c;
+ rtx_insn *seq;
enum machine_mode mode;
enum rtx_code code;
bool t_unconditional;
static int
noce_try_bitop (struct noce_if_info *if_info)
{
- rtx cond, x, a, result, seq;
+ rtx cond, x, a, result;
+ rtx_insn *seq;
enum machine_mode mode;
enum rtx_code code;
int bitnum;
dominator != NULL;
dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
{
- rtx insn;
+ rtx_insn *insn;
FOR_BB_INSNS (dominator, insn)
{
basic_block join_bb = if_info->join_bb; /* JOIN */
rtx jump = if_info->jump;
rtx cond = if_info->cond;
- rtx insn_a, insn_b;
+ rtx_insn *insn_a, *insn_b;
rtx set_a, set_b;
rtx orig_x, x, a, b;
|| reg_overlap_mentioned_p (x, cond)
|| reg_overlap_mentioned_p (x, a)
|| modified_between_p (x, insn_b, jump))
- insn_b = set_b = NULL_RTX;
+ {
+ insn_b = NULL;
+ set_b = NULL_RTX;
+ }
}
/* If x has side effects then only the if-then-else form is safe to
if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
remove_note (insn_b, note);
- insn_b = NULL_RTX;
+ insn_b = NULL;
}
/* If we have "x = b; if (...) x = a;", and x has side-effects, then
x must be executed twice. */
if (!else_bb && set_b)
{
- insn_b = set_b = NULL_RTX;
+ insn_b = NULL;
+ set_b = NULL_RTX;
b = orig_x;
goto retry;
}
/* If we used a temporary, fix it up now. */
if (orig_x != x)
{
- rtx seq;
+ rtx_insn *seq;
start_sequence ();
noce_emit_move_insn (orig_x, x);
vec<rtx> *regs,
rtx cond)
{
- rtx insn;
+ rtx_insn *insn;
/* We can only handle simple jumps at the end of the basic block.
It is almost impossible to update the CFG otherwise. */
bool else_block_p)
{
enum rtx_code code;
- rtx insn, cond_arg0, cond_arg1;
+ rtx_insn *insn;
+ rtx cond_arg0, cond_arg1;
code = GET_CODE (cond);
cond_arg0 = XEXP (cond, 0);
basic_block join_bb = if_info->join_bb;
rtx jump = if_info->jump;
rtx cond = if_info->cond;
- rtx seq, loc_insn;
+ rtx_insn *seq, *loc_insn;
rtx reg;
int c;
vec<rtx> then_regs = vNULL;
{
basic_block then_bb, else_bb, join_bb;
bool then_else_reversed = false;
- rtx jump, cond;
+ rtx_insn *jump;
+ rtx cond;
rtx cond_earliest;
struct noce_if_info if_info;
if (! join_bb)
{
- rtx last = BB_END (combo_bb);
+ rtx_insn *last = BB_END (combo_bb);
/* The outgoing edge for the current COMBO block should already
be correct. Verify this. */
edge cur_edge;
int fallthru_p = FALSE;
int jump_p = FALSE;
- rtx insn;
- rtx end;
+ rtx_insn *insn;
+ rtx_insn *end;
int n_insns = 0;
edge_iterator ei;
basic_block then_bb = then_edge->dest;
basic_block else_bb = else_edge->dest;
basic_block other_bb, trap_bb;
- rtx trap, jump, cond, cond_earliest, seq;
+ rtx_insn *trap, *jump;
+ rtx cond, cond_earliest, seq;
enum rtx_code code;
/* Locate the block with the trap instruction. */
single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
else if (trap_bb == then_bb)
{
- rtx lab, newjump;
+ rtx lab;
+ rtx_insn *newjump;
lab = JUMP_LABEL (jump);
newjump = emit_jump_insn_after (gen_jump (lab), jump);
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
return it. */
-static rtx
+static rtx_insn *
block_has_only_trap (basic_block bb)
{
- rtx trap;
+ rtx_insn *trap;
/* We're not the exit block. */
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
- return NULL_RTX;
+ return NULL;
/* The block must have no successors. */
if (EDGE_COUNT (bb->succs) > 0)
- return NULL_RTX;
+ return NULL;
/* The only instruction in the THEN block must be the trap. */
trap = first_active_insn (bb);
if (! (trap == BB_END (bb)
&& GET_CODE (PATTERN (trap)) == TRAP_IF
&& TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
- return NULL_RTX;
+ return NULL;
return trap;
}
if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
{
- rtx jump = BB_END (else_edge->src);
+ rtx_insn *jump = BB_END (else_edge->src);
gcc_assert (JUMP_P (jump));
else_target = JUMP_LABEL (jump);
}
basic_block other_bb, edge dest_edge, int reversep)
{
basic_block new_dest = dest_edge->dest;
- rtx head, end, jump, earliest = NULL_RTX, old_dest;
+ rtx_insn *head, *end, *jump;
+ rtx earliest = NULL_RTX, old_dest;
bitmap merge_set = NULL;
/* Number of pending changes. */
int n_validated_changes = 0;
{
if (head == end)
{
- head = end = NULL_RTX;
+ head = end = NULL;
goto no_body;
}
head = NEXT_INSN (head);
return FALSE;
if (head == end)
{
- head = end = NULL_RTX;
+ head = end = NULL;
goto no_body;
}
end = PREV_INSN (end);
can lead to one of the paths of the branch having wrong unwind info. */
if (epilogue_completed)
{
- rtx insn = head;
+ rtx_insn *insn = head;
while (1)
{
if (INSN_P (insn) && RTX_FRAME_RELATED_P (insn))
/* Try the NCE path if the CE path did not result in any changes. */
if (n_validated_changes == 0)
{
- rtx cond, insn;
+ rtx cond;
+ rtx_insn *insn;
regset live;
bool success;
/* Move the insns out of MERGE_BB to before the branch. */
if (head != NULL)
{
- rtx insn;
+ rtx_insn *insn;
if (end == BB_END (merge_bb))
SET_BB_END (merge_bb) = PREV_INSN (head);