#define SHIFT_COUNT_TRUNCATED 0
#endif
-/* It is not safe to use ordinary gen_lowpart in combine.
- Use gen_lowpart_for_combine instead. See comments there. */
-#define gen_lowpart dont_use_gen_lowpart_you_dummy
-
/* Number of attempts to combine instructions in this function. */
static int combine_attempts;
combine_max_regno = nregs;
+ /* It is not safe to use ordinary gen_lowpart in combine.
+ See comments in gen_lowpart_for_combine. */
+ gen_lowpart = gen_lowpart_for_combine;
+
reg_nonzero_bits = xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT));
reg_sign_bit_copies = xcalloc (nregs, sizeof (unsigned char));
total_successes += combine_successes;
nonzero_sign_valid = 0;
+ gen_lowpart = gen_lowpart_general;
/* Make recognizer allow volatile MEMs again. */
init_recog ();
ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
newpat = XVECEXP (newpat, 0, 1);
SUBST (SET_SRC (newpat),
- gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
+ gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
if (i2_code_number >= 0)
SUBST (SET_SRC (x),
gen_rtx_AND (mode,
gen_rtx_LSHIFTRT
- (mode, gen_lowpart_for_combine (mode, inner),
+ (mode, gen_lowpart (mode, inner),
GEN_INT (pos)),
GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
gen_rtx_fmt_ee
(unsignedp ? LSHIFTRT : ASHIFTRT, mode,
gen_rtx_ASHIFT (mode,
- gen_lowpart_for_combine (mode, inner),
+ gen_lowpart (mode, inner),
GEN_INT (GET_MODE_BITSIZE (mode)
- len - pos)),
GEN_INT (GET_MODE_BITSIZE (mode) - len)));
if (op0_mode == VOIDmode)
op0_mode = GET_MODE (SUBREG_REG (x));
- /* simplify_subreg can't use gen_lowpart_for_combine. */
+ /* See if this can be moved to simplify_subreg. */
if (CONSTANT_P (SUBREG_REG (x))
&& subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
- /* Don't call gen_lowpart_for_combine if the inner mode
+ /* Don't call gen_lowpart if the inner mode
is VOIDmode and we cannot simplify it, as SUBREG without
inner mode is invalid. */
&& (GET_MODE (SUBREG_REG (x)) != VOIDmode
|| gen_lowpart_common (mode, SUBREG_REG (x))))
- return gen_lowpart_for_combine (mode, SUBREG_REG (x));
+ return gen_lowpart (mode, SUBREG_REG (x));
if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
break;
simplify_gen_unary (NOT, inner_mode, const1_rtx,
inner_mode),
XEXP (SUBREG_REG (XEXP (x, 0)), 1));
- return gen_lowpart_for_combine (mode, x);
+ return gen_lowpart (mode, x);
}
/* Apply De Morgan's laws to reduce number of patterns for machines
>= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
&& ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
- return gen_lowpart_for_combine (mode, XEXP (x, 0));
+ return gen_lowpart (mode, XEXP (x, 0));
/* A truncate of a comparison can be replaced with a subreg if
STORE_FLAG_VALUE permits. This is like the previous test,
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
&& ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
- return gen_lowpart_for_combine (mode, XEXP (x, 0));
+ return gen_lowpart (mode, XEXP (x, 0));
/* Similarly, a truncate of a register whose value is a
comparison can be replaced with a subreg if STORE_FLAG_VALUE
&& ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
&& (temp = get_last_value (XEXP (x, 0)))
&& GET_RTX_CLASS (GET_CODE (temp)) == '<')
- return gen_lowpart_for_combine (mode, XEXP (x, 0));
+ return gen_lowpart (mode, XEXP (x, 0));
break;
&& op1 == const0_rtx
&& mode == GET_MODE (op0)
&& nonzero_bits (op0, mode) == 1)
- return gen_lowpart_for_combine (mode,
- expand_compound_operation (op0));
+ return gen_lowpart (mode,
+ expand_compound_operation (op0));
else if (STORE_FLAG_VALUE == 1
&& new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
{
op0 = expand_compound_operation (op0);
return simplify_gen_unary (NEG, mode,
- gen_lowpart_for_combine (mode, op0),
+ gen_lowpart (mode, op0),
mode);
}
{
op0 = expand_compound_operation (op0);
return gen_binary (XOR, mode,
- gen_lowpart_for_combine (mode, op0),
+ gen_lowpart (mode, op0),
const1_rtx);
}
== GET_MODE_BITSIZE (mode)))
{
op0 = expand_compound_operation (op0);
- return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
+ return plus_constant (gen_lowpart (mode, op0), 1);
}
/* If STORE_FLAG_VALUE is -1, we have cases similar to
&& op1 == const0_rtx
&& (num_sign_bit_copies (op0, mode)
== GET_MODE_BITSIZE (mode)))
- return gen_lowpart_for_combine (mode,
- expand_compound_operation (op0));
+ return gen_lowpart (mode,
+ expand_compound_operation (op0));
else if (STORE_FLAG_VALUE == -1
&& new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
{
op0 = expand_compound_operation (op0);
return simplify_gen_unary (NEG, mode,
- gen_lowpart_for_combine (mode, op0),
+ gen_lowpart (mode, op0),
mode);
}
{
op0 = expand_compound_operation (op0);
return simplify_gen_unary (NOT, mode,
- gen_lowpart_for_combine (mode, op0),
+ gen_lowpart (mode, op0),
mode);
}
&& nonzero_bits (op0, mode) == 1)
{
op0 = expand_compound_operation (op0);
- return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
+ return plus_constant (gen_lowpart (mode, op0), -1);
}
/* If STORE_FLAG_VALUE says to just test the sign bit and X has just
temp = gen_binary (MULT, m, temp,
gen_binary (MULT, m, c1, const_true_rtx));
temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
- temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
+ temp = gen_binary (op, m, gen_lowpart (m, z), temp);
if (extend_op != NIL)
temp = simplify_gen_unary (extend_op, mode, temp, m);
&& (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
return
simplify_shift_const (NULL_RTX, ASHIFT, mode,
- gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
+ gen_lowpart (mode, XEXP (cond, 0)), i);
/* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
if (true_code == NE && XEXP (cond, 1) == const0_rtx
&& GET_CODE (SUBREG_REG (dest)) == REG)))
{
SUBST (SET_DEST (x),
- gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
+ gen_lowpart (GET_MODE (SUBREG_REG (src)),
dest));
SUBST (SET_SRC (x), SUBREG_REG (src));
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
{
x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
- gen_lowpart_for_combine
+ gen_lowpart
(GET_MODE (SUBREG_REG (SET_DEST (x))),
SET_SRC (x)));
continue;
break;
compute_mode = imode;
- inner = gen_lowpart_for_combine (imode, inner);
+ inner = gen_lowpart (imode, inner);
}
/* Compute a mask of LEN bits, if we can do this on the host machine. */
inner),
gen_binary (ASHIFT, compute_mode,
gen_binary (AND, compute_mode,
- gen_lowpart_for_combine
+ gen_lowpart
(compute_mode, SET_SRC (x)),
mask),
pos)));
{
if (tmode != inner_mode)
{
- /* We can't call gen_lowpart_for_combine in a DEST since we
+ /* We can't call gen_lowpart in a DEST since we
always want a SUBREG (see below) and it would sometimes
return a new hard register. */
if (pos || in_dest)
new = gen_rtx_SUBREG (tmode, inner, final_word);
}
else
- new = gen_lowpart_for_combine (tmode, inner);
+ new = gen_lowpart (tmode, inner);
}
else
new = inner;
}
else if (pos_rtx != 0
&& GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
- pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
+ pos_rtx = gen_lowpart (pos_mode, pos_rtx);
/* Make POS_RTX unless we already have it and it is correct. If we don't
have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
extraction_mode, inner, GEN_INT (len), pos_rtx);
if (! in_dest)
- new = gen_lowpart_for_combine (mode, new);
+ new = gen_lowpart (mode, new);
return new;
}
tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
}
else
- tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
+ tem = gen_lowpart (mode, XEXP (tem, 0));
return tem;
}
break;
if (new)
{
- x = gen_lowpart_for_combine (mode, new);
+ x = gen_lowpart (mode, new);
code = GET_CODE (x);
}
expression is VOIDmode.
Also do nothing if X is a CLOBBER; this can happen if X was
- the return value from a call to gen_lowpart_for_combine. */
+ the return value from a call to gen_lowpart. */
if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
return x;
get X in the proper mode. */
if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
&& (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
- return gen_lowpart_for_combine (mode, x);
+ return gen_lowpart (mode, x);
/* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
MASK are already known to be zero in X, we need not do anything. */
/* For most binary operations, just propagate into the operation and
change the mode if we have an operation of that mode. */
- op0 = gen_lowpart_for_combine (op_mode,
- force_to_mode (XEXP (x, 0), mode, mask,
- reg, next_select));
- op1 = gen_lowpart_for_combine (op_mode,
- force_to_mode (XEXP (x, 1), mode, mask,
- reg, next_select));
+ op0 = gen_lowpart (op_mode,
+ force_to_mode (XEXP (x, 0), mode, mask,
+ reg, next_select));
+ op1 = gen_lowpart (op_mode,
+ force_to_mode (XEXP (x, 1), mode, mask,
+ reg, next_select));
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
x = gen_binary (code, op_mode, op0, op1);
else
mask = fuller_mask;
- op0 = gen_lowpart_for_combine (op_mode,
- force_to_mode (XEXP (x, 0), op_mode,
- mask, reg, next_select));
+ op0 = gen_lowpart (op_mode,
+ force_to_mode (XEXP (x, 0), op_mode,
+ mask, reg, next_select));
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
x = gen_binary (code, op_mode, op0, XEXP (x, 1));
mask = fuller_mask;
unop:
- op0 = gen_lowpart_for_combine (op_mode,
- force_to_mode (XEXP (x, 0), mode, mask,
- reg, next_select));
+ op0 = gen_lowpart (op_mode,
+ force_to_mode (XEXP (x, 0), mode, mask,
+ reg, next_select));
if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
x = simplify_gen_unary (code, op_mode, op0, op_mode);
break;
written in a narrower mode. We play it safe and do not do so. */
SUBST (XEXP (x, 1),
- gen_lowpart_for_combine (GET_MODE (x),
+ gen_lowpart (GET_MODE (x),
force_to_mode (XEXP (x, 1), mode,
mask, reg, next_select)));
SUBST (XEXP (x, 2),
- gen_lowpart_for_combine (GET_MODE (x),
+ gen_lowpart (GET_MODE (x),
force_to_mode (XEXP (x, 2), mode,
mask, reg, next_select)));
break;
}
/* Ensure we return a value of the proper mode. */
- return gen_lowpart_for_combine (mode, x);
+ return gen_lowpart (mode, x);
}
\f
/* Return nonzero if X is an expression that has one of two values depending on
if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
&& GET_CODE (SUBREG_REG (y)) == MEM
&& rtx_equal_p (SUBREG_REG (y),
- gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
+ gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
return 1;
if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
&& GET_CODE (SUBREG_REG (x)) == MEM
&& rtx_equal_p (SUBREG_REG (x),
- gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
+ gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
return 1;
/* We used to see if get_last_value of X and Y were the same but that's
tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
SUBREG_REG (lhs), SUBREG_REG (rhs));
- return gen_lowpart_for_combine (GET_MODE (x), tem);
+ return gen_lowpart (GET_MODE (x), tem);
default:
return x;
if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
return
- gen_lowpart_for_combine
+ gen_lowpart
(mode,
apply_distributive_law
(gen_binary (GET_CODE (varop), GET_MODE (varop),
&& SUBREG_REG (XEXP (x, 0)) == varop)
varop = XEXP (x, 0);
else
- varop = gen_lowpart_for_combine (mode, varop);
+ varop = gen_lowpart (mode, varop);
/* If we can't make the SUBREG, try to return what we were given. */
if (GET_CODE (varop) == CLOBBER)
&& SUBREG_REG (XEXP (x, 0)) == varop)
varop = XEXP (x, 0);
else if (GET_MODE (varop) != shift_mode)
- varop = gen_lowpart_for_combine (shift_mode, varop);
+ varop = gen_lowpart (shift_mode, varop);
/* If we can't make the SUBREG, try to return what we were given. */
if (GET_CODE (varop) == CLOBBER)
GET_MODE_MASK (result_mode) >> orig_count);
/* Do the remainder of the processing in RESULT_MODE. */
- x = gen_lowpart_for_combine (result_mode, x);
+ x = gen_lowpart (result_mode, x);
/* If COMPLEMENT_P is set, we have to complement X before doing the outer
operation. */
return insn_code_number;
}
\f
-/* Like gen_lowpart but for use by combine. In combine it is not possible
- to create any new pseudoregs. However, it is safe to create
- invalid memory addresses, because combine will try to recognize
- them and all they will do is make the combine attempt fail.
+/* Like gen_lowpart_general but for use by combine. In combine it
+ is not possible to create any new pseudoregs. However, it is
+ safe to create invalid memory addresses, because combine will
+ try to recognize them and all they will do is make the combine
+ attempt fail.
If for some reason this cannot do its job, an rtx
(clobber (const_int 0)) is returned.
An insn containing that will not be recognized. */
-#undef gen_lowpart
-
static rtx
gen_lowpart_for_combine (enum machine_mode mode, rtx x)
{
tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
{
- op0 = gen_lowpart_for_combine (tmode, inner_op0);
- op1 = gen_lowpart_for_combine (tmode, inner_op1);
+ op0 = gen_lowpart (tmode, inner_op0);
+ op1 = gen_lowpart (tmode, inner_op1);
code = unsigned_condition (code);
changed = 1;
break;
&& const_op >> i == 0
&& (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
{
- op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
+ op0 = gen_lowpart (tmode, XEXP (op0, 0));
continue;
}
op0 = gen_binary (AND, tmode,
SUBREG_REG (XEXP (op0, 0)),
gen_int_mode (c1, tmode));
- op0 = gen_lowpart_for_combine (mode, op0);
+ op0 = gen_lowpart (mode, op0);
continue;
}
}
+ (GET_MODE_MASK (tmode) >> 1) + 1)
<= GET_MODE_MASK (tmode)))
{
- op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
+ op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
continue;
}
XEXP (op0, 1));
op0 = gen_binary (PLUS, tmode,
- gen_lowpart_for_combine (tmode, inner),
+ gen_lowpart (tmode, inner),
new_const);
continue;
}
if (GET_CODE (SUBREG_REG (op0)) == REG)
{
op0 = SUBREG_REG (op0);
- op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
+ op1 = gen_lowpart (GET_MODE (op0), op1);
}
}
else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
GET_MODE (SUBREG_REG (op0)))
& ~GET_MODE_MASK (GET_MODE (op0))) == 0)
{
- tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
+ tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
& ~GET_MODE_MASK (GET_MODE (op0))) == 0)
if (GET_CODE (op0) == AND
&& !have_insn_for (AND, mode))
op0 = gen_binary (AND, tmode,
- gen_lowpart_for_combine (tmode,
- XEXP (op0, 0)),
- gen_lowpart_for_combine (tmode,
- XEXP (op0, 1)));
+ gen_lowpart (tmode,
+ XEXP (op0, 0)),
+ gen_lowpart (tmode,
+ XEXP (op0, 1)));
- op0 = gen_lowpart_for_combine (tmode, op0);
+ op0 = gen_lowpart (tmode, op0);
if (zero_extended && GET_CODE (op1) == CONST_INT)
op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
- op1 = gen_lowpart_for_combine (tmode, op1);
+ op1 = gen_lowpart (tmode, op1);
break;
}
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
{
op0 = gen_binary (AND, tmode,
- gen_lowpart_for_combine (tmode, op0),
+ gen_lowpart (tmode, op0),
GEN_INT ((HOST_WIDE_INT) 1
<< (GET_MODE_BITSIZE (mode) - 1)));
code = (code == LT) ? NE : EQ;
&& GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
&& subreg_lowpart_p (SET_DEST (setter)))
record_value_for_reg (dest, record_dead_insn,
- gen_lowpart_for_combine (GET_MODE (dest),
+ gen_lowpart (GET_MODE (dest),
SET_SRC (setter)));
else
record_value_for_reg (dest, record_dead_insn, NULL_RTX);
&& (GET_MODE_SIZE (GET_MODE (x))
<= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
&& (value = get_last_value (SUBREG_REG (x))) != 0)
- return gen_lowpart_for_combine (GET_MODE (x), value);
+ return gen_lowpart (GET_MODE (x), value);
if (GET_CODE (x) != REG)
return 0;
int exp_q = REG_QTY (REGNO (classp->exp));
struct qty_table_elem *exp_ent = &qty_table[exp_q];
- exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
+ exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
exp_ent->const_insn = this_insn;
}
struct qty_table_elem *x_ent = &qty_table[x_q];
x_ent->const_rtx
- = gen_lowpart_if_possible (GET_MODE (x), p->exp);
+ = gen_lowpart (GET_MODE (x), p->exp);
x_ent->const_insn = this_insn;
break;
}
if (((BYTES_BIG_ENDIAN
&& offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
|| (! BYTES_BIG_ENDIAN && offset == 0))
- && (new = gen_lowpart_if_possible (mode, constant)) != 0)
+ && (new = gen_lowpart (mode, constant)) != 0)
return new;
}
&& GET_CODE (arg_ent->const_rtx) != REG
&& GET_CODE (arg_ent->const_rtx) != PLUS)
const_arg
- = gen_lowpart_if_possible (GET_MODE (arg),
+ = gen_lowpart (GET_MODE (arg),
arg_ent->const_rtx);
}
break;
struct qty_table_elem *x_ent = &qty_table[x_q];
if (x_ent->const_rtx)
- x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
+ x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
}
if (x == 0 || CONSTANT_P (x))
If the requested operation cannot be done, 0 is returned.
- This is similar to gen_lowpart in emit-rtl.c. */
+ This is similar to gen_lowpart_general in emit-rtl.c. */
rtx
gen_lowpart_if_possible (enum machine_mode mode, rtx x)
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
- rtx tem = gen_lowpart_if_possible (inner_mode, op1);
+ rtx tem = gen_lowpart (inner_mode, op1);
record_jump_cond (code, mode, SUBREG_REG (op0),
tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
- rtx tem = gen_lowpart_if_possible (inner_mode, op0);
+ rtx tem = gen_lowpart (inner_mode, op0);
record_jump_cond (code, mode, SUBREG_REG (op1),
tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
- rtx tem = gen_lowpart_if_possible (inner_mode, op1);
+ rtx tem = gen_lowpart (inner_mode, op1);
record_jump_cond (code, mode, SUBREG_REG (op0),
tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
- rtx tem = gen_lowpart_if_possible (inner_mode, op0);
+ rtx tem = gen_lowpart (inner_mode, op0);
record_jump_cond (code, mode, SUBREG_REG (op1),
tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
const_elt; const_elt = const_elt->next_same_value)
if (GET_CODE (const_elt->exp) == REG)
{
- src_related = gen_lowpart_if_possible (mode,
+ src_related = gen_lowpart (mode,
const_elt->exp);
break;
}
GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
tmode = GET_MODE_WIDER_MODE (tmode))
{
- rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
+ rtx inner = gen_lowpart (tmode, XEXP (src, 0));
struct table_elt *larger_elt;
if (inner)
if (GET_CODE (larger_elt->exp) == REG)
{
src_related
- = gen_lowpart_if_possible (mode, larger_elt->exp);
+ = gen_lowpart (mode, larger_elt->exp);
break;
}
larger_elt; larger_elt = larger_elt->next_same_value)
if (GET_CODE (larger_elt->exp) == REG)
{
- src_related = gen_lowpart_if_possible (mode,
+ src_related = gen_lowpart (mode,
larger_elt->exp);
break;
}
we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
make that equivalence as well.
- However, BAR may have equivalences for which gen_lowpart_if_possible
- will produce a simpler value than gen_lowpart_if_possible applied to
+ However, BAR may have equivalences for which gen_lowpart
+ will produce a simpler value than gen_lowpart applied to
BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
BAR's equivalences. If we don't get a simplified form, make
the SUBREG. It will not be used in an equivalence, but will
&& (CONSTANT_P (ent->const_rtx)
|| GET_CODE (ent->const_rtx) == REG))
{
- rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
+ rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
if (new)
return new;
}
constant_pool_entries_cost = 0;
constant_pool_entries_regcost = 0;
val.path_size = 0;
+ gen_lowpart = gen_lowpart_if_possible;
init_recog ();
init_alias_analysis ();
free (uid_cuid);
free (reg_eqv_table);
free (val.path);
+ gen_lowpart = gen_lowpart_general;
return cse_jumps_altered || recorded_label_ref;
}