static int reg_attrs_htab_eq (const void *, const void *);
static reg_attrs *get_reg_attrs (tree, int);
static tree component_ref_for_mem_expr (tree);
-static rtx gen_const_vector_0 (enum machine_mode);
+static rtx gen_const_vector (enum machine_mode, int);
static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
static void copy_rtx_if_shared_1 (rtx *orig);
if (mode != VOIDmode)
{
int width;
- if (GET_MODE_CLASS (mode) != MODE_INT
- && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
- /* We can get a 0 for an error mark. */
- && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
- && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
- abort ();
+
+ gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
+ || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
+ /* We can get a 0 for an error mark. */
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
/* We clear out all bits that don't belong in MODE, unless they and
our sign bit are all one. So we get either a reasonable negative
else if (width == HOST_BITS_PER_WIDE_INT
&& ! (i1 == ~0 && i0 < 0))
i1 = 0;
- else if (width > 2 * HOST_BITS_PER_WIDE_INT)
- /* We cannot represent this value as a constant. */
- abort ();
+ else
+ /* We should be able to represent this value as a constant. */
+ gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
/* If this would be an entire word for the target, but is not for
the host, then sign-extend on the host so that the number will
return rt;
}
+/* Generate a memory referring to non-trapping constant memory. */
+
+rtx
+gen_const_mem (enum machine_mode mode, rtx addr)
+{
+ rtx mem = gen_rtx_MEM (mode, addr);
+ MEM_READONLY_P (mem) = 1;
+ MEM_NOTRAP_P (mem) = 1;
+ return mem;
+}
+
rtx
gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
{
/* This is the most common failure type.
Catch it early so we can see who does it. */
- if ((offset % GET_MODE_SIZE (mode)) != 0)
- abort ();
+ gcc_assert (!(offset % GET_MODE_SIZE (mode)));
/* This check isn't usable right now because combine will
throw arbitrary crap like a CALL into a SUBREG in
gen_lowpart_for_combine so we must just eat it. */
#if 0
/* Check for this too. */
- if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
- abort ();
+ gcc_assert (offset < GET_MODE_SIZE (GET_MODE (reg)));
#endif
return gen_rtx_raw_SUBREG (mode, reg, offset);
}
/* Don't let anything called after initial flow analysis create new
registers. */
- if (no_new_pseudos)
- abort ();
+ gcc_assert (!no_new_pseudos);
if (generating_concat_p
&& (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
REG_USERVAR_P (XEXP (reg, 0)) = 1;
REG_USERVAR_P (XEXP (reg, 1)) = 1;
}
- else if (REG_P (reg))
- REG_USERVAR_P (reg) = 1;
else
- abort ();
+ {
+ gcc_assert (REG_P (reg));
+ REG_USERVAR_P (reg) = 1;
+ }
}
/* Identify REG as a probable pointer register and show its alignment
/* This is where we attempt to catch illegal subregs
created by the compiler. */
- if (GET_CODE (x) != SUBREG
- || !REG_P (reg))
- abort ();
+ gcc_assert (GET_CODE (x) == SUBREG && REG_P (reg));
base_regno = REGNO (reg);
- if (base_regno >= FIRST_PSEUDO_REGISTER)
- abort ();
- if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
- abort ();
+ gcc_assert (base_regno < FIRST_PSEUDO_REGISTER);
+ gcc_assert (!check_mode || HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)));
#ifdef ENABLE_CHECKING
- if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
- SUBREG_BYTE (x), mode))
- abort ();
+ gcc_assert (subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
+ SUBREG_BYTE (x), mode));
#endif
/* Catch non-congruent offsets too. */
byte_offset = SUBREG_BYTE (x);
- if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
- abort ();
+ gcc_assert (!(byte_offset % GET_MODE_SIZE (mode)));
final_regno = subreg_regno (x);
xsize = GET_MODE_SIZE (innermode);
- if (innermode == VOIDmode || innermode == BLKmode)
- abort ();
+ gcc_assert (innermode != VOIDmode && innermode != BLKmode);
if (innermode == mode)
return x;
/* This case loses if X is a subreg. To catch bugs early,
complain if an invalid MODE is used even in other cases. */
- if (msize > UNITS_PER_WORD
- && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
- abort ();
+ gcc_assert (msize <= UNITS_PER_WORD
+ || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
result = simplify_gen_subreg (mode, x, GET_MODE (x),
subreg_highpart_offset (mode, GET_MODE (x)));
-
+ gcc_assert (result);
+
/* simplify_gen_subreg is not guaranteed to return a valid operand for
the target if we have a MEM. gen_highpart must return a valid operand,
emitting code if necessary to do so. */
- if (result != NULL_RTX && MEM_P (result))
- result = validize_mem (result);
-
- if (!result)
- abort ();
+ if (MEM_P (result))
+ {
+ result = validize_mem (result);
+ gcc_assert (result);
+ }
+
return result;
}
{
if (GET_MODE (exp) != VOIDmode)
{
- if (GET_MODE (exp) != innermode)
- abort ();
+ gcc_assert (GET_MODE (exp) == innermode);
return gen_highpart (outermode, exp);
}
return simplify_gen_subreg (outermode, exp, innermode,
unsigned int offset = 0;
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
- if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
- abort ();
+ gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
if (difference > 0)
{
if (mode == VOIDmode)
mode = GET_MODE (op);
- if (mode == VOIDmode)
- abort ();
+ gcc_assert (mode != VOIDmode);
/* If OP is narrower than a word, fail. */
if (mode != BLKmode
}
result = operand_subword (op, offset, 1, mode);
- if (result == 0)
- abort ();
+ gcc_assert (result);
return result;
}
if (inner == TREE_OPERAND (ref, 0))
return ref;
else
- return build (COMPONENT_REF, TREE_TYPE (ref), inner, TREE_OPERAND (ref, 1),
- NULL_TREE);
+ return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
+ TREE_OPERAND (ref, 1), NULL_TREE);
}
/* Returns 1 if both MEM_EXPR can be considered equal
if (TREE_CODE (expr1) == INDIRECT_REF)
return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
TREE_OPERAND (expr2, 0));
-
- /* Decls with different pointers can't be equal. */
- if (DECL_P (expr1))
- return 0;
- abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
+ /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
have been resolved here. */
+ gcc_assert (DECL_P (expr1));
+
+ /* Decls with different pointers can't be equal. */
+ return 0;
}
/* Given REF, a MEM, and T, either the type of X or the expression
wrong answer, as it assumes that DECL_RTL already has the right alias
info. Callers should not set DECL_RTL until after the call to
set_mem_attributes. */
- if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
- abort ();
+ gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
/* Get the alias set from the expression or type (perhaps using a
front-end routine) and use it. */
MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
- RTX_UNCHANGING_P (ref)
- |= ((lang_hooks.honor_readonly
- && (TYPE_READONLY (type) || (t != type && TREE_READONLY (t))))
- || (! TYPE_P (t) && TREE_CONSTANT (t)));
MEM_POINTER (ref) = POINTER_TYPE_P (type);
MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
the expression. */
if (! TYPE_P (t))
{
- maybe_set_unchanging (ref, t);
+ tree base = get_base_address (t);
+ if (base && DECL_P (base)
+ && TREE_READONLY (base)
+ && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
+ MEM_READONLY_P (ref) = 1;
+
if (TREE_THIS_VOLATILE (t))
MEM_VOLATILE_P (ref) = 1;
}
/* If this is a constant, we know the alignment. */
- else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
+ else if (CONSTANT_CLASS_P (t))
{
align = TYPE_ALIGN (type);
#ifdef CONSTANT_ALIGNMENT
index, then convert to sizetype and multiply by the size of
the array element. */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound));
+ index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, low_bound));
off_tree = size_binop (PLUS_EXPR,
size_binop (MULT_EXPR, convert (sizetype,
{
#ifdef ENABLE_CHECKING
/* If the new and old alias sets don't conflict, something is wrong. */
- if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
- abort ();
+ gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
#endif
MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
{
rtx new;
- if (!MEM_P (memref))
- abort ();
+ gcc_assert (MEM_P (memref));
if (mode == VOIDmode)
mode = GET_MODE (memref);
if (addr == 0)
if (validate)
{
if (reload_in_progress || reload_completed)
- {
- if (! memory_address_p (mode, addr))
- abort ();
- }
+ gcc_assert (memory_address_p (mode, addr));
else
addr = memory_address (mode, addr);
}
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
/* This rtx may not be shared. If it has already been seen,
replace it with a copy of itself. */
-
+#ifdef ENABLE_CHECKING
if (RTX_FLAG (x, used))
{
error ("Invalid rtl sharing found in the insn");
debug_rtx (insn);
error ("Shared rtx");
debug_rtx (x);
- abort ();
+ internal_error ("Internal consistency failure");
}
+#endif
+ gcc_assert (!RTX_FLAG (x, used));
+
RTX_FLAG (x, used) = 1;
/* Now scan the subexpressions recursively. */
for (j = 0; j < len; j++)
{
- /* We allow sharing of ASM_OPERANDS inside single instruction. */
+ /* We allow sharing of ASM_OPERANDS inside single
+ instruction. */
if (j && GET_CODE (XVECEXP (x, i, j)) == SET
- && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
+ && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
+ == ASM_OPERANDS))
verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
else
verify_rtx_sharing (XVECEXP (x, i, j), insn);
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
return copy;
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
void
set_first_insn (rtx insn)
{
- if (PREV_INSN (insn) != 0)
- abort ();
+ gcc_assert (!PREV_INSN (insn));
first_insn = insn;
}
void
set_last_insn (rtx insn)
{
- if (NEXT_INSN (insn) != 0)
- abort ();
+ gcc_assert (!NEXT_INSN (insn));
last_insn = insn;
}
while (insn)
{
insn = next_insn (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
while (insn)
{
insn = previous_insn (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
if (insn)
{
insn = NEXT_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
if (insn)
{
insn = PREV_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
+ if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
}
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || GET_CODE (insn) != NOTE)
+ if (insn == 0 || !NOTE_P (insn))
break;
}
rtx insn;
for (insn = get_last_insn ();
- insn && GET_CODE (insn) != CALL_INSN;
+ insn && !CALL_P (insn);
insn = PREV_INSN (insn))
;
int
active_insn_p (rtx insn)
{
- return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
- || (GET_CODE (insn) == INSN
+ return (CALL_P (insn) || JUMP_P (insn)
+ || (NONJUMP_INSN_P (insn)
&& (! reload_completed
|| (GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER))));
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
+ if (insn == 0 || LABEL_P (insn))
break;
}
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
+ if (insn == 0 || LABEL_P (insn))
break;
}
{
rtx user = next_nonnote_insn (insn);
- if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
+ if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
user = XVECEXP (PATTERN (user), 0, 0);
REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
return XEXP (note, 0);
insn = next_nonnote_insn (insn);
- if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
return XEXP (note, 0);
insn = prev_nonnote_insn (insn);
- if (! sets_cc0_p (PATTERN (insn)))
- abort ();
+ gcc_assert (sets_cc0_p (PATTERN (insn)));
return insn;
}
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
We may need to handle this specially. */
- if (after && GET_CODE (after) == BARRIER)
+ if (after && BARRIER_P (after))
{
has_barrier = 1;
after = NEXT_INSN (after);
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
njumps++;
one jump is created, otherwise the machine description
is responsible for this step using
split_branch_probability variable. */
- if (njumps != 1)
- abort ();
+ gcc_assert (njumps == 1);
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB,
GEN_INT (probability),
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
- if (GET_CODE (trial) == CALL_INSN)
+ if (CALL_P (trial))
{
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
while (*p)
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == CALL_INSN
- || (flag_non_call_exceptions
+ if (CALL_P (insn)
+ || (flag_non_call_exceptions && INSN_P (insn)
&& may_trap_p (PATTERN (insn))))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_EH_REGION,
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
- if (GET_CODE (trial) == INSN)
+ if (NONJUMP_INSN_P (trial))
{
insn = insn_last;
while (insn != NULL_RTX)
{
- if (GET_CODE (insn) == INSN)
+ if (NONJUMP_INSN_P (insn))
mark_label_nuses (PATTERN (insn));
insn = PREV_INSN (insn);
rtx next = NEXT_INSN (after);
basic_block bb;
- if (optimize && INSN_DELETED_P (after))
- abort ();
+ gcc_assert (!optimize || !INSN_DELETED_P (after));
NEXT_INSN (insn) = next;
PREV_INSN (insn) = after;
if (next)
{
PREV_INSN (next) = insn;
- if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
+ if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
}
else if (last_insn == after)
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
- if (GET_CODE (after) != BARRIER
- && GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (after)
+ && !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (after)))
{
set_block_for_insn (insn, bb);
either NOTE or LABEL. */
if (BB_END (bb) == after
/* Avoid clobbering of structure when creating new BB. */
- && GET_CODE (insn) != BARRIER
- && (GET_CODE (insn) != NOTE
+ && !BARRIER_P (insn)
+ && (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
BB_END (bb) = insn;
}
NEXT_INSN (after) = insn;
- if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
+ if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx sequence = PATTERN (after);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
rtx prev = PREV_INSN (before);
basic_block bb;
- if (optimize && INSN_DELETED_P (before))
- abort ();
+ gcc_assert (!optimize || !INSN_DELETED_P (before));
PREV_INSN (insn) = prev;
NEXT_INSN (insn) = before;
if (prev)
{
NEXT_INSN (prev) = insn;
- if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
+ if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
- if (GET_CODE (before) != BARRIER
- && GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (before)
+ && !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (before)))
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
bb->flags |= BB_DIRTY;
- /* Should not happen as first in the BB is always
- either NOTE or LABEl. */
- if (BB_HEAD (bb) == insn
- /* Avoid clobbering of structure when creating new BB. */
- && GET_CODE (insn) != BARRIER
- && (GET_CODE (insn) != NOTE
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
- abort ();
+ /* Should not happen as first in the BB is always either NOTE or
+ LABEl. */
+ gcc_assert (BB_HEAD (bb) != insn
+ /* Avoid clobbering of structure when creating new BB. */
+ || BARRIER_P (insn)
+ || (NOTE_P (insn)
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
}
PREV_INSN (before) = insn;
- if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
+ if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
if (prev)
{
NEXT_INSN (prev) = next;
- if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
+ if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
if (next)
{
PREV_INSN (next) = prev;
- if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
+ if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
}
else if (last_insn == insn)
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
- if (GET_CODE (insn) != BARRIER
+ if (!BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
{
/* Never ever delete the basic block note without deleting whole
basic block. */
- if (GET_CODE (insn) == NOTE)
- abort ();
+ gcc_assert (!NOTE_P (insn));
BB_HEAD (bb) = next;
}
if (BB_END (bb) == insn)
void
add_function_usage_to (rtx call_insn, rtx call_fusage)
{
- if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
- abort ();
+ gcc_assert (call_insn && CALL_P (call_insn));
/* Put the register usage information on the CALL. If there is already
some usage information, put ours at the end. */
reorder_insns_nobb (from, to, after);
- if (GET_CODE (after) != BARRIER
+ if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
bb->flags |= BB_DIRTY;
- if (GET_CODE (from) != BARRIER
+ if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
BB_END (bb) = to;
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
- set_block_for_insn (x, bb);
+ if (!BARRIER_P (x))
+ set_block_for_insn (x, bb);
}
}
return 0;
for (; insn; insn = PREV_INSN (insn))
- if (GET_CODE (insn) == NOTE
+ if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) >= 0)
break;
next = NEXT_INSN (insn);
/* We're only interested in notes. */
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
continue;
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_DELETED:
- case NOTE_INSN_LOOP_END_TOP_COND:
remove_insn (insn);
break;
case NOTE_INSN_EH_REGION_END:
/* Too many end notes. */
- if (eh_stack == NULL_RTX)
- abort ();
+ gcc_assert (eh_stack);
/* Mismatched nesting. */
- if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
- abort ();
+ gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
+ == NOTE_EH_HANDLER (insn));
tmp = eh_stack;
eh_stack = XEXP (eh_stack, 1);
free_INSN_LIST_node (tmp);
case NOTE_INSN_BLOCK_BEG:
/* By now, all notes indicating lexical blocks should have
NOTE_BLOCK filled in. */
- if (NOTE_BLOCK (insn) == NULL_TREE)
- abort ();
+ gcc_assert (NOTE_BLOCK (insn));
block_stack = alloc_INSN_LIST (insn, block_stack);
break;
case NOTE_INSN_BLOCK_END:
/* Too many end notes. */
- if (block_stack == NULL_RTX)
- abort ();
+ gcc_assert (block_stack);
/* Mismatched nesting. */
- if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
- abort ();
+ gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn));
tmp = block_stack;
block_stack = XEXP (block_stack, 1);
free_INSN_LIST_node (tmp);
break;
/* We're only interested in NOTEs. */
- if (GET_CODE (tmp) != NOTE)
+ if (!NOTE_P (tmp))
continue;
if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
}
/* Too many begin notes. */
- if (block_stack || eh_stack)
- abort ();
+ gcc_assert (!block_stack && !eh_stack);
}
\f
rtx last = before;
rtx insn;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
if (x == NULL_RTX)
return last;
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
{
rtx insn, last = NULL_RTX;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
switch (GET_CODE (x))
{
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
{
rtx last = NULL_RTX, insn;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
switch (GET_CODE (x))
{
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
rtx after_after;
basic_block bb;
- if (GET_CODE (after) != BARRIER
+ if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
bb->flags |= BB_DIRTY;
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
- if (GET_CODE (last) != BARRIER)
+ if (!BARRIER_P (last))
set_block_for_insn (last, bb);
- if (GET_CODE (last) != BARRIER)
+ if (!BARRIER_P (last))
set_block_for_insn (last, bb);
if (BB_END (bb) == after)
BB_END (bb) = last;
{
rtx last = after;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
if (x == NULL_RTX)
return last;
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
{
rtx last;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
switch (GET_CODE (x))
{
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
{
rtx last;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
switch (GET_CODE (x))
{
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
{
rtx note;
- set_file_and_line_for_stmt (location);
-
#ifdef USE_MAPPED_LOCATION
if (location == last_location)
return NULL_RTX;
means the insn only has one * useful * set). */
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
{
- if (note)
- abort ();
+ gcc_assert (!note);
return NULL_RTX;
}
enum rtx_code
classify_insn (rtx x)
{
- if (GET_CODE (x) == CODE_LABEL)
+ if (LABEL_P (x))
return CODE_LABEL;
if (GET_CODE (x) == CALL)
return CALL_INSN;
{
enum rtx_code code = classify_insn (x);
- if (code == CODE_LABEL)
- return emit_label (x);
- else if (code == INSN)
- return emit_insn (x);
- else if (code == JUMP_INSN)
+ switch (code)
{
- rtx insn = emit_jump_insn (x);
- if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
- return emit_barrier ();
- return insn;
+ case CODE_LABEL:
+ return emit_label (x);
+ case INSN:
+ return emit_insn (x);
+ case JUMP_INSN:
+ {
+ rtx insn = emit_jump_insn (x);
+ if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
+ return emit_barrier ();
+ return insn;
+ }
+ case CALL_INSN:
+ return emit_call_insn (x);
+ default:
+ gcc_unreachable ();
}
- else if (code == CALL_INSN)
- return emit_call_insn (x);
- else
- abort ();
}
\f
/* Space for free sequence stack entries. */
static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
-/* Begin emitting insns to a sequence which can be packaged in an
- RTL_EXPR. If this sequence will contain something that might cause
- the compiler to pop arguments to function calls (because those
- pops have previously been deferred; see INHIBIT_DEFER_POP for more
- details), use do_pending_stack_adjust before calling this function.
- That will ensure that the deferred pops are not accidentally
- emitted in the middle of this sequence. */
+/* Begin emitting insns to a sequence. If this sequence will contain
+ something that might cause the compiler to pop arguments to function
+ calls (because those pops have previously been deferred; see
+ INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
+ before calling this function. That will ensure that the deferred
+ pops are not accidentally emitted in the middle of this sequence. */
void
start_sequence (void)
tem->next = seq_stack;
tem->first = first_insn;
tem->last = last_insn;
- tem->sequence_rtl_expr = seq_rtl_expr;
seq_stack = tem;
last_insn = 0;
}
-/* Similarly, but indicate that this sequence will be placed in T, an
- RTL_EXPR. See the documentation for start_sequence for more
- information about how to use this function. */
-
-void
-start_sequence_for_rtl_expr (tree t)
-{
- start_sequence ();
-
- seq_rtl_expr = t;
-}
-
/* Set up the insn chain starting with FIRST as the current sequence,
saving the previously current one. See the documentation for
start_sequence for more information about how to use this function. */
first_insn = first;
last_insn = last;
/* We really should have the end of the insn chain here. */
- if (last && NEXT_INSN (last))
- abort ();
+ gcc_assert (!last || !NEXT_INSN (last));
}
/* Set up the outer-level insn chain
first_insn = top->first;
last_insn = top->last;
- seq_rtl_expr = top->sequence_rtl_expr;
}
/* After emitting to the outer-level insn chain, update the outer-level
top->first = first_insn;
top->last = last_insn;
- /* ??? Why don't we save seq_rtl_expr here? */
end_sequence ();
}
first_insn = tem->first;
last_insn = tem->last;
- seq_rtl_expr = tem->sequence_rtl_expr;
seq_stack = tem->next;
memset (tem, 0, sizeof (*tem));
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case CODE_LABEL:
case PC:
case CC0:
- case ADDRESSOF:
return orig;
case CLOBBER:
if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
if (code == SCRATCH)
{
i = copy_insn_n_scratches++;
- if (i >= MAX_RECOG_OPERANDS)
- abort ();
+ gcc_assert (i < MAX_RECOG_OPERANDS);
copy_insn_scratch_in[i] = orig;
copy_insn_scratch_out[i] = copy;
}
f->emit = ggc_alloc (sizeof (struct emit_status));
first_insn = NULL;
last_insn = NULL;
- seq_rtl_expr = NULL;
cur_insn_uid = 1;
reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
last_location = UNKNOWN_LOCATION;
#endif
}
-/* Generate the constant 0. */
+/* Generate a vector constant for mode MODE and constant value CONSTANT. */
static rtx
-gen_const_vector_0 (enum machine_mode mode)
+gen_const_vector (enum machine_mode mode, int constant)
{
rtx tem;
rtvec v;
v = rtvec_alloc (units);
- /* We need to call this function after we to set CONST0_RTX first. */
- if (!CONST0_RTX (inner))
- abort ();
+ /* We need to call this function after we set the scalar const_tiny_rtx
+ entries. */
+ gcc_assert (const_tiny_rtx[constant][(int) inner]);
for (i = 0; i < units; ++i)
- RTVEC_ELT (v, i) = CONST0_RTX (inner);
+ RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
tem = gen_rtx_raw_CONST_VECTOR (mode, v);
return tem;
}
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
- all elements are zero. */
+ all elements are zero, and the one vector when all elements are one. */
rtx
gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
{
- rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
+ enum machine_mode inner = GET_MODE_INNER (mode);
+ int nunits = GET_MODE_NUNITS (mode);
+ rtx x;
int i;
- for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
- if (RTVEC_ELT (v, i) != inner_zero)
- return gen_rtx_raw_CONST_VECTOR (mode, v);
- return CONST0_RTX (mode);
+ /* Check to see if all of the elements have the same value. */
+ x = RTVEC_ELT (v, nunits - 1);
+ for (i = nunits - 2; i >= 0; i--)
+ if (RTVEC_ELT (v, i) != x)
+ break;
+
+ /* If the values are all the same, check to see if we can use one of the
+ standard constant vectors. */
+ if (i == -1)
+ {
+ if (x == CONST0_RTX (inner))
+ return CONST0_RTX (mode);
+ else if (x == CONST1_RTX (inner))
+ return CONST1_RTX (mode);
+ }
+
+ return gen_rtx_raw_CONST_VECTOR (mode, v);
}
/* Create some permanent unique rtl objects shared between all functions.
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
}
\f
-/* Query and clear/ restore no_line_numbers. This is used by the
- switch / case handling in stmt.c to give proper line numbers in
- warnings about unreachable code. */
-
-int
-force_line_numbers (void)
-{
- int old = no_line_numbers;
-
- no_line_numbers = 0;
- if (old)
- force_next_line_note ();
- return old;
-}
-
-void
-restore_line_number_status (int old_value)
-{
- no_line_numbers = old_value;
-}
-
/* Produce exact duplicate of insn INSN after AFTER.
Care updating of libcall regions if present. */
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Update LABEL_NUSES. */