/* Optimize jump instructions, for GNU compiler.
- Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
- 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010,
- 2011 Free Software Foundation, Inc.
+ Copyright (C) 1987-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
#include "rtl.h"
+#include "tree.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
+#include "memmodel.h"
#include "tm_p.h"
-#include "flags.h"
-#include "hard-reg-set.h"
-#include "regs.h"
#include "insn-config.h"
-#include "insn-attr.h"
+#include "regs.h"
+#include "emit-rtl.h"
#include "recog.h"
-#include "function.h"
-#include "basic-block.h"
-#include "expr.h"
-#include "except.h"
-#include "diagnostic-core.h"
-#include "reload.h"
-#include "predict.h"
-#include "timevar.h"
-#include "tree-pass.h"
-#include "target.h"
+#include "cfgrtl.h"
+#include "rtl-iter.h"
/* Optimize jump y; x: ... y: jumpif... x?
Don't know if it is worth bothering with. */
or even change what is live at any point.
So perhaps let combiner do it. */
-static void init_label_info (rtx);
-static void mark_all_labels (rtx);
-static void mark_jump_label_1 (rtx, rtx, bool, bool);
-static void mark_jump_label_asm (rtx, rtx);
-static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
-static int invert_exp_1 (rtx, rtx);
-static int returnjump_p_1 (rtx *, void *);
+static void init_label_info (rtx_insn *);
+static void mark_all_labels (rtx_insn *);
+static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
+static void mark_jump_label_asm (rtx, rtx_insn *);
+static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
+static int invert_exp_1 (rtx, rtx_insn *);
\f
/* Worker for rebuild_jump_labels and rebuild_jump_labels_chain. */
static void
-rebuild_jump_labels_1 (rtx f, bool count_forced)
+rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
{
- rtx insn;
-
timevar_push (TV_REBUILD_JUMP);
init_label_info (f);
mark_all_labels (f);
count doesn't drop to zero. */
if (count_forced)
- for (insn = forced_labels; insn; insn = XEXP (insn, 1))
- if (LABEL_P (XEXP (insn, 0)))
- LABEL_NUSES (XEXP (insn, 0))++;
+ {
+ rtx_insn *insn;
+ unsigned int i;
+ FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
+ if (LABEL_P (insn))
+ LABEL_NUSES (insn)++;
+ }
timevar_pop (TV_REBUILD_JUMP);
}
instructions and jumping insns that have labels as operands
(e.g. cbranchsi4). */
void
-rebuild_jump_labels (rtx f)
+rebuild_jump_labels (rtx_insn *f)
{
rebuild_jump_labels_1 (f, true);
}
forced_labels. It can be used on insn chains that aren't the
main function chain. */
void
-rebuild_jump_labels_chain (rtx chain)
+rebuild_jump_labels_chain (rtx_insn *chain)
{
rebuild_jump_labels_1 (chain, false);
}
This simple pass moves barriers and removes duplicates so that the
old code is happy.
*/
-unsigned int
+static unsigned int
cleanup_barriers (void)
{
- rtx insn, next, prev;
- for (insn = get_insns (); insn; insn = next)
+ rtx_insn *insn;
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- next = NEXT_INSN (insn);
if (BARRIER_P (insn))
{
- prev = prev_nonnote_insn (insn);
+ rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
if (!prev)
continue;
+
if (BARRIER_P (prev))
delete_insn (insn);
else if (prev != PREV_INSN (insn))
- reorder_insns (insn, insn, prev);
+ {
+ basic_block bb = BLOCK_FOR_INSN (prev);
+ rtx_insn *end = PREV_INSN (insn);
+ reorder_insns_nobb (insn, insn, prev);
+ if (bb)
+ {
+ /* If the backend called in machine reorg compute_bb_for_insn
+ and didn't free_bb_for_insn again, preserve basic block
+ boundaries. Move the end of basic block to PREV since
+ it is followed by a barrier now, and clear BLOCK_FOR_INSN
+ on the following notes.
+ ??? Maybe the proper solution for the targets that have
+ cfg around after machine reorg is not to run cleanup_barriers
+ pass at all. */
+ BB_END (bb) = prev;
+ do
+ {
+ prev = NEXT_INSN (prev);
+ if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
+ BLOCK_FOR_INSN (prev) = NULL;
+ }
+ while (prev != end);
+ }
+ }
}
}
return 0;
}
-struct rtl_opt_pass pass_cleanup_barriers =
+namespace {
+
+const pass_data pass_data_cleanup_barriers =
{
- {
- RTL_PASS,
- "barriers", /* name */
- NULL, /* gate */
- cleanup_barriers, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_NONE, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0 /* todo_flags_finish */
- }
+ RTL_PASS, /* type */
+ "barriers", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_NONE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
};
+class pass_cleanup_barriers : public rtl_opt_pass
+{
+public:
+ pass_cleanup_barriers (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *) { return cleanup_barriers (); }
+
+}; // class pass_cleanup_barriers
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_cleanup_barriers (gcc::context *ctxt)
+{
+ return new pass_cleanup_barriers (ctxt);
+}
+
\f
/* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
for remaining targets for JUMP_P. Delete any REG_LABEL_OPERAND
notes whose labels don't occur in the insn any more. */
static void
-init_label_info (rtx f)
+init_label_info (rtx_insn *f)
{
- rtx insn;
+ rtx_insn *insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
load into a jump_insn that uses it. */
static void
-maybe_propagate_label_ref (rtx jump_insn, rtx prev_nonjump_insn)
+maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
{
rtx label_note, pc, pc_src;
CODE_LABEL in the LABEL_REF of the "set". We can
conveniently use it for the marker function, which
requires a LABEL_REF wrapping. */
- gcc_assert (XEXP (label_note, 0) == XEXP (SET_SRC (label_set), 0));
+ gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
mark_jump_label_1 (label_set, jump_insn, false, true);
Combine consecutive labels, and count uses of labels. */
static void
-mark_all_labels (rtx f)
+mark_all_labels (rtx_insn *f)
{
- rtx insn;
+ rtx_insn *insn;
if (current_ir_type () == IR_RTL_CFGLAYOUT)
{
basic_block bb;
- FOR_EACH_BB (bb)
+ FOR_EACH_BB_FN (bb, cfun)
{
/* In cfglayout mode, we don't bother with trivial next-insn
propagation of LABEL_REFs into JUMP_LABEL. This will be
handled by other optimizers using better algorithms. */
FOR_BB_INSNS (bb, insn)
{
- gcc_assert (! INSN_DELETED_P (insn));
+ gcc_assert (! insn->deleted ());
if (NONDEBUG_INSN_P (insn))
mark_jump_label (PATTERN (insn), insn, 0);
}
/* In cfglayout mode, there may be non-insns between the
basic blocks. If those non-insns represent tablejump data,
they contain label references that we must record. */
- for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- {
- gcc_assert (JUMP_TABLE_DATA_P (insn));
- mark_jump_label (PATTERN (insn), insn, 0);
- }
- for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- {
- gcc_assert (JUMP_TABLE_DATA_P (insn));
- mark_jump_label (PATTERN (insn), insn, 0);
- }
+ for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
+ if (JUMP_TABLE_DATA_P (insn))
+ mark_jump_label (PATTERN (insn), insn, 0);
+ for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
+ if (JUMP_TABLE_DATA_P (insn))
+ mark_jump_label (PATTERN (insn), insn, 0);
}
}
else
{
- rtx prev_nonjump_insn = NULL;
+ rtx_insn *prev_nonjump_insn = NULL;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
- if (INSN_DELETED_P (insn))
+ if (insn->deleted ())
;
else if (LABEL_P (insn))
prev_nonjump_insn = NULL;
+ else if (JUMP_TABLE_DATA_P (insn))
+ mark_jump_label (PATTERN (insn), insn, 0);
else if (NONDEBUG_INSN_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
to help this function avoid overhead in these cases. */
enum rtx_code
reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
- const_rtx arg1, const_rtx insn)
+ const_rtx arg1, const rtx_insn *insn)
{
- enum machine_mode mode;
+ machine_mode mode;
/* If this is not actually a comparison, we can't reverse it. */
if (GET_RTX_CLASS (code) != RTX_COMPARE
machine description to do tricks. */
if (GET_MODE_CLASS (mode) == MODE_CC
&& REVERSIBLE_CC_MODE (mode))
- {
-#ifdef REVERSE_CONDITION
- return REVERSE_CONDITION (code, mode);
-#else
- return reverse_condition (code);
-#endif
- }
+ return REVERSE_CONDITION (code, mode);
/* Try a few special cases based on the comparison code. */
switch (code)
break;
}
- if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
+ if (GET_MODE_CLASS (mode) == MODE_CC)
{
- const_rtx prev;
/* Try to search for the comparison to determine the real mode.
This code is expensive, but with sane machine description it
will be never used, since REVERSIBLE_CC_MODE will return true
/* These CONST_CAST's are okay because prev_nonnote_insn just
returns its argument and we assign it to a const_rtx
variable. */
- for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
+ for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
prev != 0 && !LABEL_P (prev);
- prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
+ prev = prev_nonnote_insn (prev))
{
const_rtx set = set_of (arg0, prev);
if (set && GET_CODE (set) == SET
/* A wrapper around the previous function to take COMPARISON as rtx
expression. This simplifies many callers. */
enum rtx_code
-reversed_comparison_code (const_rtx comparison, const_rtx insn)
+reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
{
if (!COMPARISON_P (comparison))
return UNKNOWN;
/* Return comparison with reversed code of EXP.
Return NULL_RTX in case we fail to do the reversal. */
rtx
-reversed_comparison (const_rtx exp, enum machine_mode mode)
+reversed_comparison (const_rtx exp, machine_mode mode)
{
- enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
+ enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
if (reversed_code == UNKNOWN)
return NULL_RTX;
else
/* Return 1 if INSN is an unconditional jump and nothing else. */
int
-simplejump_p (const_rtx insn)
+simplejump_p (const rtx_insn *insn)
{
return (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_p (const_rtx insn)
+condjump_p (const rtx_insn *insn)
{
const_rtx x = PATTERN (insn);
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_in_parallel_p (const_rtx insn)
+condjump_in_parallel_p (const rtx_insn *insn)
{
const_rtx x = PATTERN (insn);
/* Return set of PC, otherwise NULL. */
rtx
-pc_set (const_rtx insn)
+pc_set (const rtx_insn *insn)
{
rtx pat;
if (!JUMP_P (insn))
pat = PATTERN (insn);
/* The set is allowed to appear either as the insn pattern or
- the first set in a PARALLEL. */
- if (GET_CODE (pat) == PARALLEL)
- pat = XVECEXP (pat, 0, 0);
+ the first set in a PARALLEL, UNSPEC or UNSPEC_VOLATILE. */
+ switch (GET_CODE (pat))
+ {
+ case PARALLEL:
+ case UNSPEC:
+ case UNSPEC_VOLATILE:
+ pat = XVECEXP (pat, 0, 0);
+ break;
+ default:
+ break;
+ }
if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
return pat;
}
/* Return true when insn is an unconditional direct jump,
- possibly bundled inside a PARALLEL. */
+ possibly bundled inside a PARALLEL, UNSPEC or UNSPEC_VOLATILE.
+ The instruction may have various other effects so before removing the jump
+ you must verify onlyjump_p. */
int
-any_uncondjump_p (const_rtx insn)
+any_uncondjump_p (const rtx_insn *insn)
{
const_rtx x = pc_set (insn);
if (!x)
}
/* Return true when insn is a conditional jump. This function works for
- instructions containing PC sets in PARALLELs. The instruction may have
- various other effects so before removing the jump you must verify
- onlyjump_p.
+ instructions containing PC sets in PARALLELs, UNSPECs or UNSPEC_VOLATILEs.
+ The instruction may have various other effects so before removing the jump
+ you must verify onlyjump_p.
Note that unlike condjump_p it returns false for unconditional jumps. */
int
-any_condjump_p (const_rtx insn)
+any_condjump_p (const rtx_insn *insn)
{
const_rtx x = pc_set (insn);
enum rtx_code a, b;
/* Return the label of a conditional jump. */
rtx
-condjump_label (const_rtx insn)
+condjump_label (const rtx_insn *insn)
{
rtx x = pc_set (insn);
return NULL_RTX;
}
-/* Return true if INSN is a (possibly conditional) return insn. */
-
-static int
-returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
-{
- rtx x = *loc;
-
- if (x == NULL)
- return false;
-
- switch (GET_CODE (x))
- {
- case RETURN:
- case SIMPLE_RETURN:
- case EH_RETURN:
- return true;
-
- case SET:
- return SET_IS_RETURN_P (x);
-
- default:
- return false;
- }
-}
-
/* Return TRUE if INSN is a return jump. */
int
-returnjump_p (rtx insn)
+returnjump_p (const rtx_insn *insn)
{
- if (!JUMP_P (insn))
- return 0;
- return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
+ if (JUMP_P (insn))
+ {
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ switch (GET_CODE (x))
+ {
+ case RETURN:
+ case SIMPLE_RETURN:
+ case EH_RETURN:
+ return true;
+
+ case SET:
+ if (SET_IS_RETURN_P (x))
+ return true;
+ break;
+
+ default:
+ break;
+ }
+ }
+ }
+ return false;
}
/* Return true if INSN is a (possibly conditional) return insn. */
-static int
-eh_returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
-{
- return *loc && GET_CODE (*loc) == EH_RETURN;
-}
-
int
-eh_returnjump_p (rtx insn)
+eh_returnjump_p (rtx_insn *insn)
{
- if (!JUMP_P (insn))
- return 0;
- return for_each_rtx (&PATTERN (insn), eh_returnjump_p_1, NULL);
+ if (JUMP_P (insn))
+ {
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ if (GET_CODE (*iter) == EH_RETURN)
+ return true;
+ }
+ return false;
}
/* Return true if INSN is a jump that only transfers control and
nothing more. */
int
-onlyjump_p (const_rtx insn)
+onlyjump_p (const rtx_insn *insn)
{
rtx set;
/* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
NULL or a return. */
bool
-jump_to_label_p (rtx insn)
+jump_to_label_p (const rtx_insn *insn)
{
return (JUMP_P (insn)
&& JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
}
-
-#ifdef HAVE_cc0
-
-/* Return nonzero if X is an RTX that only sets the condition codes
- and has no side effects. */
-
-int
-only_sets_cc0_p (const_rtx x)
-{
- if (! x)
- return 0;
-
- if (INSN_P (x))
- x = PATTERN (x);
-
- return sets_cc0_p (x) == 1 && ! side_effects_p (x);
-}
-
-/* Return 1 if X is an RTX that does nothing but set the condition codes
- and CLOBBER or USE registers.
- Return -1 if X does explicitly set the condition codes,
- but also does other things. */
-
-int
-sets_cc0_p (const_rtx x)
-{
- if (! x)
- return 0;
-
- if (INSN_P (x))
- x = PATTERN (x);
-
- if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
- return 1;
- if (GET_CODE (x) == PARALLEL)
- {
- int i;
- int sets_cc0 = 0;
- int other_things = 0;
- for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- {
- if (GET_CODE (XVECEXP (x, 0, i)) == SET
- && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
- sets_cc0 = 1;
- else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
- other_things = 1;
- }
- return ! sets_cc0 ? 0 : other_things ? -1 : 1;
- }
- return 0;
-}
-#endif
\f
/* Find all CODE_LABELs referred to in X, and increment their use
counts. If INSN is a JUMP_INSN and there is at least one
that loop-optimization is done with. */
void
-mark_jump_label (rtx x, rtx insn, int in_mem)
+mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
{
rtx asmop = extract_asm_operands (x);
if (asmop)
note. */
static void
-mark_jump_label_1 (rtx x, rtx insn, bool in_mem, bool is_target)
+mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
{
RTX_CODE code = GET_CODE (x);
int i;
switch (code)
{
case PC:
- case CC0:
case REG:
- case CONST_INT:
- case CONST_DOUBLE:
case CLOBBER:
case CALL:
return;
break;
case SEQUENCE:
- for (i = 0; i < XVECLEN (x, 0); i++)
- mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
- XVECEXP (x, 0, i), 0);
+ {
+ rtx_sequence *seq = as_a <rtx_sequence *> (x);
+ for (i = 0; i < seq->len (); i++)
+ mark_jump_label (PATTERN (seq->insn (i)),
+ seq->insn (i), 0);
+ }
return;
case SYMBOL_REF:
case LABEL_REF:
{
- rtx label = XEXP (x, 0);
+ rtx_insn *label = label_ref_label (x);
/* Ignore remaining references to unreachable labels that
have been deleted. */
if (LABEL_REF_NONLOCAL_P (x))
break;
- XEXP (x, 0) = label;
- if (! insn || ! INSN_DELETED_P (insn))
+ set_label_ref_label (x, label);
+ if (! insn || ! insn->deleted ())
++LABEL_NUSES (label);
if (insn)
return;
}
- /* Do walk the labels in a vector, but not the first operand of an
- ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
+ /* Do walk the labels in a vector, but not the first operand of an
+ ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
case ADDR_VEC:
case ADDR_DIFF_VEC:
- if (! INSN_DELETED_P (insn))
+ if (! insn->deleted ())
{
int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
for (i = 0; i < XVECLEN (x, eltnum); i++)
- mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL_RTX, in_mem,
+ mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
is_target);
}
return;
need to be considered targets. */
static void
-mark_jump_label_asm (rtx asmop, rtx insn)
+mark_jump_label_asm (rtx asmop, rtx_insn *insn)
{
int i;
Usage of this instruction is deprecated. Use delete_insn instead and
subsequent cfg_cleanup pass to delete unreachable code if needed. */
-rtx
-delete_related_insns (rtx insn)
+rtx_insn *
+delete_related_insns (rtx uncast_insn)
{
+ rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
int was_code_label = (LABEL_P (insn));
rtx note;
- rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
+ rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
- while (next && INSN_DELETED_P (next))
+ while (next && next->deleted ())
next = NEXT_INSN (next);
/* This insn is already deleted => return first following nondeleted. */
- if (INSN_DELETED_P (insn))
+ if (insn->deleted ())
return next;
delete_insn (insn);
if (jump_to_label_p (insn))
{
- rtx lab = JUMP_LABEL (insn), lab_next;
+ rtx lab = JUMP_LABEL (insn);
+ rtx_jump_table_data *lab_next;
if (LABEL_NUSES (lab) == 0)
/* This can delete NEXT or PREV,
/* Likewise if we're deleting a dispatch table. */
- if (JUMP_TABLE_DATA_P (insn))
+ if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
{
- rtx pat = PATTERN (insn);
- int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
- int len = XVECLEN (pat, diff_vec_p);
+ rtvec labels = table->get_labels ();
+ int i;
+ int len = GET_NUM_ELEM (labels);
for (i = 0; i < len; i++)
- if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
- delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
- while (next && INSN_DELETED_P (next))
+ if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
+ delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
+ while (next && next->deleted ())
next = NEXT_INSN (next);
return next;
}
if (LABEL_NUSES (XEXP (note, 0)) == 0)
delete_related_insns (XEXP (note, 0));
- while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
+ while (prev && (prev->deleted () || NOTE_P (prev)))
prev = PREV_INSN (prev);
/* If INSN was a label and a dispatch table follows it,
if (code == NOTE)
next = NEXT_INSN (next);
/* Keep going past other deleted labels to delete what follows. */
- else if (code == CODE_LABEL && INSN_DELETED_P (next))
+ else if (code == CODE_LABEL && next->deleted ())
+ next = NEXT_INSN (next);
+ /* Keep the (use (insn))s created by dbr_schedule, which needs
+ them in order to track liveness relative to a previous
+ barrier. */
+ else if (INSN_P (next)
+ && GET_CODE (PATTERN (next)) == USE
+ && INSN_P (XEXP (PATTERN (next), 0)))
next = NEXT_INSN (next);
else if (code == BARRIER || INSN_P (next))
/* Note: if this deletes a jump, it can cause more
but I see no clean and sure alternative way
to find the first insn after INSN that is not now deleted.
I hope this works. */
- while (next && INSN_DELETED_P (next))
+ while (next && next->deleted ())
next = NEXT_INSN (next);
return next;
}
peephole insn that will replace them. */
void
-delete_for_peephole (rtx from, rtx to)
+delete_for_peephole (rtx_insn *from, rtx_insn *to)
{
- rtx insn = from;
+ rtx_insn *insn = from;
while (1)
{
- rtx next = NEXT_INSN (insn);
- rtx prev = PREV_INSN (insn);
+ rtx_insn *next = NEXT_INSN (insn);
+ rtx_insn *prev = PREV_INSN (insn);
if (!NOTE_P (insn))
{
- INSN_DELETED_P (insn) = 1;
+ insn->set_deleted();
/* Patch this insn out of the chain. */
/* We don't do this all at once, because we
must preserve all NOTEs. */
if (prev)
- NEXT_INSN (prev) = next;
+ SET_NEXT_INSN (prev) = next;
if (next)
- PREV_INSN (next) = prev;
+ SET_PREV_INSN (next) = prev;
}
if (insn == to)
NLABEL as a return. Accrue modifications into the change group. */
static void
-redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
+redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
{
rtx x = *loc;
RTX_CODE code = GET_CODE (x);
int i;
const char *fmt;
- if ((code == LABEL_REF && XEXP (x, 0) == olabel)
+ if ((code == LABEL_REF && label_ref_label (x) == olabel)
|| x == olabel)
{
x = redirect_target (nlabel);
if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
- x = gen_rtx_SET (VOIDmode, pc_rtx, x);
+ x = gen_rtx_SET (pc_rtx, x);
validate_change (insn, loc, x, 1);
return;
}
if (code == SET && SET_DEST (x) == pc_rtx
&& ANY_RETURN_P (nlabel)
&& GET_CODE (SET_SRC (x)) == LABEL_REF
- && XEXP (SET_SRC (x), 0) == olabel)
+ && label_ref_label (SET_SRC (x)) == olabel)
{
validate_change (insn, loc, nlabel, 1);
return;
not see how to do that. */
int
-redirect_jump_1 (rtx jump, rtx nlabel)
+redirect_jump_1 (rtx_insn *jump, rtx nlabel)
{
int ochanges = num_validated_changes ();
rtx *loc, asmop;
(this can only occur when trying to produce return insns). */
int
-redirect_jump (rtx jump, rtx nlabel, int delete_unused)
+redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
{
- rtx olabel = JUMP_LABEL (jump);
+ rtx olabel = jump->jump_label ();
if (!nlabel)
{
If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
count has dropped to zero. */
void
-redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
+redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
int invert)
{
rtx note;
}
}
+ /* Handle the case where we had a conditional crossing jump to a return
+ label and are now changing it into a direct conditional return.
+ The jump is no longer crossing in that case. */
+ if (ANY_RETURN_P (nlabel))
+ CROSSING_JUMP_P (jump) = 0;
+
if (!ANY_RETURN_P (olabel)
&& --LABEL_NUSES (olabel) == 0 && delete_unused > 0
/* Undefined labels will remain outside the insn stream. */
/* Invert the jump condition X contained in jump insn INSN. Accrue the
modifications into the change group. Return nonzero for success. */
static int
-invert_exp_1 (rtx x, rtx insn)
+invert_exp_1 (rtx x, rtx_insn *insn)
{
RTX_CODE code = GET_CODE (x);
inversion and redirection. */
int
-invert_jump_1 (rtx jump, rtx nlabel)
+invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
{
rtx x = pc_set (jump);
int ochanges;
NLABEL instead of where it jumps now. Return true if successful. */
int
-invert_jump (rtx jump, rtx nlabel, int delete_unused)
+invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
{
rtx olabel = JUMP_LABEL (jump);
&& REG_P (SUBREG_REG (y)))))
{
int reg_x = -1, reg_y = -1;
- int byte_x = 0, byte_y = 0;
+ poly_int64 byte_x = 0, byte_y = 0;
struct subreg_info info;
if (GET_MODE (x) != GET_MODE (y))
reg_y = reg_renumber[reg_y];
}
- return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
+ return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
}
/* Now we have disposed of all the cases
switch (code)
{
case PC:
- case CC0:
case ADDR_VEC:
case ADDR_DIFF_VEC:
- case CONST_INT:
- case CONST_DOUBLE:
+ CASE_CONST_UNIQUE:
return 0;
+ case CONST_VECTOR:
+ if (!same_vector_encodings_p (x, y))
+ return false;
+ break;
+
case LABEL_REF:
/* We can't assume nonlocal labels have their following insns yet. */
if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
- return XEXP (x, 0) == XEXP (y, 0);
+ return label_ref_label (x) == label_ref_label (y);
/* Two label-refs are equivalent if they point at labels
in the same position in the instruction stream. */
- return (next_real_insn (XEXP (x, 0))
- == next_real_insn (XEXP (y, 0)));
+ else
+ {
+ rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
+ rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
+ while (xi && LABEL_P (xi))
+ xi = next_nonnote_nondebug_insn (xi);
+ while (yi && LABEL_P (yi))
+ yi = next_nonnote_nondebug_insn (yi);
+ return xi == yi;
+ }
case SYMBOL_REF:
return XSTR (x, 0) == XSTR (y, 0);
if (GET_MODE (x) != GET_MODE (y))
return 0;
- /* MEMs refering to different address space are not equivalent. */
+ /* MEMs referring to different address space are not equivalent. */
if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
return 0;
if (XINT (x, i) != XINT (y, i))
{
if (((code == ASM_OPERANDS && i == 6)
- || (code == ASM_INPUT && i == 1))
- && locator_eq (XINT (x, i), XINT (y, i)))
+ || (code == ASM_INPUT && i == 1)))
break;
return 0;
}
break;
+ case 'p':
+ if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
+ return 0;
+ break;
+
case 't':
if (XTREE (x, i) != XTREE (y, i))
return 0;
{
if (REG_P (x))
{
- if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
+ if (REGNO (x) >= FIRST_PSEUDO_REGISTER
+ && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
return reg_renumber[REGNO (x)];
return REGNO (x);
}
{
struct subreg_info info;
- subreg_get_info (REGNO (SUBREG_REG (x)),
+ subreg_get_info (lra_in_progress
+ ? (unsigned) base : REGNO (SUBREG_REG (x)),
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x), GET_MODE (x), &info);