From b92e2e4393509fe1bed60ef7db58013d3e8c9239 Mon Sep 17 00:00:00 2001 From: bonzini Date: Thu, 15 Dec 2005 08:51:16 +0000 Subject: [PATCH] 2005-12-15 Paolo Bonzini * combine.c: Remove force_to_mode's fourth parameter. (force_to_mode): Do not handle REG. (canon_reg_for_combine): New. (make_field_assignment): Use it where the removed argument was non-NULL. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@108564 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog | 7 +++ gcc/combine.c | 189 +++++++++++++++++++++++++++++++++++++++++++--------------- 2 files changed, 147 insertions(+), 49 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 0546407..de3e458 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,10 @@ +2005-12-15 Paolo Bonzini + + * combine.c: Remove force_to_mode's fourth parameter. + (force_to_mode): Do not handle REG. + (canon_reg_for_combine): New. + (make_field_assignment): Use it where the removed argument was non-NULL. + 2005-12-15 Alan Modra PR target/25406 diff --git a/gcc/combine.c b/gcc/combine.c index b13ea86..d09461b 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -385,8 +385,9 @@ static rtx extract_left_shift (rtx, int); static rtx make_compound_operation (rtx, enum rtx_code); static int get_pos_from_mask (unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT *); +static rtx canon_reg_for_combine (rtx, rtx); static rtx force_to_mode (rtx, enum machine_mode, - unsigned HOST_WIDE_INT, rtx, int); + unsigned HOST_WIDE_INT, int); static rtx if_then_else_cond (rtx, rtx *, rtx *); static rtx known_cond (rtx, enum rtx_code, rtx, rtx); static int rtx_equal_for_field_assignment_p (rtx, rtx); @@ -4264,7 +4265,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))) SUBST (XEXP (x, 0), force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), - GET_MODE_MASK (mode), NULL_RTX, 0)); + GET_MODE_MASK (mode), 0)); /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */ if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND @@ -4864,7 +4865,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) ((HOST_WIDE_INT) 1 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) - 1, - NULL_RTX, 0)); + 0)); break; case VEC_SELECT: @@ -5266,7 +5267,7 @@ simplify_set (rtx x) if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) { - src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); + src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0); SUBST (SET_SRC (x), src); } @@ -6395,7 +6396,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, len >= HOST_BITS_PER_WIDE_INT ? ~(unsigned HOST_WIDE_INT) 0 : ((unsigned HOST_WIDE_INT) 1 << len) - 1, - NULL_RTX, 0); + 0); /* If this extraction is going into the destination of a SET, make a STRICT_LOW_PART unless we made a MEM. */ @@ -6585,7 +6586,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, ? ~(unsigned HOST_WIDE_INT) 0 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1) << orig_pos), - NULL_RTX, 0); + 0); } /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we @@ -6918,7 +6919,7 @@ make_compound_operation (rtx x, enum rtx_code in_code) && subreg_lowpart_p (x)) { rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0, - NULL_RTX, 0); + 0); /* If we have something other than a SUBREG, we might have done an expansion, so rerun ourselves. */ @@ -6991,6 +6992,105 @@ get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen) return pos; } +/* If X refers to a register that equals REG in value, replace these + references with REG. */ +static rtx +canon_reg_for_combine (rtx x, rtx reg) +{ + rtx op0, op1, op2; + const char *fmt; + int i; + bool copied; + + enum rtx_code code = GET_CODE (x); + switch (GET_RTX_CLASS (code)) + { + case RTX_UNARY: + op0 = canon_reg_for_combine (XEXP (x, 0), reg); + if (op0 != XEXP (x, 0)) + return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0, + GET_MODE (reg)); + break; + + case RTX_BIN_ARITH: + case RTX_COMM_ARITH: + op0 = canon_reg_for_combine (XEXP (x, 0), reg); + op1 = canon_reg_for_combine (XEXP (x, 1), reg); + if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) + return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); + break; + + case RTX_COMPARE: + case RTX_COMM_COMPARE: + op0 = canon_reg_for_combine (XEXP (x, 0), reg); + op1 = canon_reg_for_combine (XEXP (x, 1), reg); + if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) + return simplify_gen_relational (GET_CODE (x), GET_MODE (x), + GET_MODE (op0), op0, op1); + break; + + case RTX_TERNARY: + case RTX_BITFIELD_OPS: + op0 = canon_reg_for_combine (XEXP (x, 0), reg); + op1 = canon_reg_for_combine (XEXP (x, 1), reg); + op2 = canon_reg_for_combine (XEXP (x, 2), reg); + if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2)) + return simplify_gen_ternary (GET_CODE (x), GET_MODE (x), + GET_MODE (op0), op0, op1, op2); + + case RTX_OBJ: + if (REG_P (x)) + { + if (rtx_equal_p (get_last_value (reg), x) + || rtx_equal_p (reg, get_last_value (x))) + return reg; + else + break; + } + + /* fall through */ + + default: + fmt = GET_RTX_FORMAT (code); + copied = false; + for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) + if (fmt[i] == 'e') + { + rtx op = canon_reg_for_combine (XEXP (x, i), reg); + if (op != XEXP (x, i)) + { + if (!copied) + { + copied = true; + x = copy_rtx (x); + } + XEXP (x, i) = op; + } + } + else if (fmt[i] == 'E') + { + int j; + for (j = 0; j < XVECLEN (x, i); j++) + { + rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg); + if (op != XVECEXP (x, i, j)) + { + if (!copied) + { + copied = true; + x = copy_rtx (x); + } + XVECEXP (x, i, j) = op; + } + } + } + + break; + } + + return x; +} + /* See if X can be simplified knowing that we will only refer to it in MODE and will only refer to those bits that are nonzero in MASK. If other bits are being computed or if masking operations are done @@ -7000,16 +7100,13 @@ get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen) Return a possibly simplified expression, but always convert X to MODE. If X is a CONST_INT, AND the CONST_INT with MASK. - Also, if REG is nonzero and X is a register equal in value to REG, - replace X with REG. - If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK are all off in X. This is used when X will be complemented, by either NOT, NEG, or XOR. */ static rtx force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, - rtx reg, int just_select) + int just_select) { enum rtx_code code = GET_CODE (x); int next_select = just_select || code == XOR || code == NOT || code == NEG; @@ -7091,7 +7188,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, within that boundary, we don't need the USE any more. */ if (! BITS_BIG_ENDIAN && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) - return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); + return force_to_mode (XEXP (x, 0), mode, mask, next_select); break; case SIGN_EXTEND: @@ -7100,13 +7197,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, case SIGN_EXTRACT: x = expand_compound_operation (x); if (GET_CODE (x) != code) - return force_to_mode (x, mode, mask, reg, next_select); - break; - - case REG: - if (reg != 0 && (rtx_equal_p (get_last_value (reg), x) - || rtx_equal_p (reg, get_last_value (x)))) - x = reg; + return force_to_mode (x, mode, mask, next_select); break; case SUBREG: @@ -7119,7 +7210,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, || (0 == (mask & GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) - return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select); + return force_to_mode (SUBREG_REG (x), mode, mask, next_select); break; case AND: @@ -7195,7 +7286,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && (INTVAL (XEXP (x, 1)) & ~smask) != 0) return force_to_mode (plus_constant (XEXP (x, 0), (INTVAL (XEXP (x, 1)) & smask)), - mode, smask, reg, next_select); + mode, smask, next_select); } /* ... fall through ... */ @@ -7217,7 +7308,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, { x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), GET_MODE (x)); - return force_to_mode (x, mode, mask, reg, next_select); + return force_to_mode (x, mode, mask, next_select); } /* Similarly, if C contains every bit in the fuller_mask, then we may @@ -7228,7 +7319,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, { x = simplify_gen_unary (NOT, GET_MODE (x), XEXP (x, 1), GET_MODE (x)); - return force_to_mode (x, mode, mask, reg, next_select); + return force_to_mode (x, mode, mask, next_select); } mask = fuller_mask; @@ -7258,7 +7349,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, XEXP (XEXP (x, 0), 0), temp); x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); - return force_to_mode (x, mode, mask, reg, next_select); + return force_to_mode (x, mode, mask, next_select); } binop: @@ -7267,10 +7358,10 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, op0 = gen_lowpart (op_mode, force_to_mode (XEXP (x, 0), mode, mask, - reg, next_select)); + next_select)); op1 = gen_lowpart (op_mode, force_to_mode (XEXP (x, 1), mode, mask, - reg, next_select)); + next_select)); if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) x = simplify_gen_binary (code, op_mode, op0, op1); @@ -7304,7 +7395,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, op0 = gen_lowpart (op_mode, force_to_mode (XEXP (x, 0), op_mode, - mask, reg, next_select)); + mask, next_select)); if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1)); @@ -7331,7 +7422,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0) op_mode = GET_MODE (x); - inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select); + inner = force_to_mode (inner, op_mode, inner_mask, next_select); if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); @@ -7368,7 +7459,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT && (mask == ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) - return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); + return force_to_mode (XEXP (x, 0), mode, mask, next_select); /* If this is a shift by a constant, get a mask that contains those bits that are not copies of the sign bit. We then have two cases: If @@ -7416,7 +7507,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); if (GET_CODE (x) != ASHIFTRT) - return force_to_mode (x, mode, mask, reg, next_select); + return force_to_mode (x, mode, mask, next_select); } } @@ -7440,7 +7531,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && GET_CODE (XEXP (x, 0)) == ASHIFT && XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, - reg, next_select); + next_select); break; @@ -7459,7 +7550,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, if (temp && GET_CODE (temp) == CONST_INT) SUBST (XEXP (x, 0), force_to_mode (XEXP (x, 0), GET_MODE (x), - INTVAL (temp), reg, next_select)); + INTVAL (temp), next_select)); } break; @@ -7467,7 +7558,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, /* If we just want the low-order bit, the NEG isn't needed since it won't change the low-order bit. */ if (mask == 1) - return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select); + return force_to_mode (XEXP (x, 0), mode, mask, just_select); /* We need any bits less significant than the most significant bit in MASK since carries from those bits will affect the bits we are @@ -7494,7 +7585,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); - return force_to_mode (x, mode, mask, reg, next_select); + return force_to_mode (x, mode, mask, next_select); } /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must @@ -7504,7 +7595,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, unop: op0 = gen_lowpart (op_mode, force_to_mode (XEXP (x, 0), mode, mask, - reg, next_select)); + next_select)); if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) x = simplify_gen_unary (code, op_mode, op0, op_mode); break; @@ -7518,7 +7609,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0 && (nonzero_bits (XEXP (x, 0), mode) == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE)) - return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); + return force_to_mode (XEXP (x, 0), mode, mask, next_select); break; @@ -7527,13 +7618,11 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, written in a narrower mode. We play it safe and do not do so. */ SUBST (XEXP (x, 1), - gen_lowpart (GET_MODE (x), - force_to_mode (XEXP (x, 1), mode, - mask, reg, next_select))); + gen_lowpart (GET_MODE (x), force_to_mode (XEXP (x, 1), mode, + mask, next_select))); SUBST (XEXP (x, 2), - gen_lowpart (GET_MODE (x), - force_to_mode (XEXP (x, 2), mode, - mask, reg, next_select))); + gen_lowpart (GET_MODE (x), force_to_mode (XEXP (x, 2), mode, + mask, next_select))); break; default: @@ -8085,13 +8174,15 @@ make_field_assignment (rtx x) /* Shift OTHER right POS places and make it the source, restricting it to the proper length and mode. */ - src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT, - GET_MODE (src), other, pos), - mode, + src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT, + GET_MODE (src), + other, pos), + dest); + src = force_to_mode (src, mode, GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT ? ~(unsigned HOST_WIDE_INT) 0 : ((unsigned HOST_WIDE_INT) 1 << len) - 1, - dest, 0); + 0); /* If SRC is masked by an AND that does not make a difference in the value being stored, strip it. */ @@ -8320,7 +8411,7 @@ simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop, CONSTOP are not significant and will never be examined. We must ensure that is the case by explicitly masking out those bits before returning. */ - varop = force_to_mode (varop, mode, constop, NULL_RTX, 0); + varop = force_to_mode (varop, mode, constop, 0); /* If VAROP is a CLOBBER, we will fail so return it. */ if (GET_CODE (varop) == CLOBBER) @@ -8374,7 +8465,7 @@ simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop, XEXP (varop, 1), constop)))); - /* If VAROP is PLUS, and the constant is a mask of low bite, distribute + /* If VAROP is PLUS, and the constant is a mask of low bits, distribute the AND and see if one of the operands simplifies to zero. If so, we may eliminate it. */ @@ -10095,7 +10186,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) op0 = force_to_mode (op0, mode, ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)), - NULL_RTX, 0); + 0); /* Now try cases based on the opcode of OP0. If none of the cases does a "continue", we exit this loop immediately after the -- 2.7.4