X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=gcc%2Frtlanal.c;h=3ffd063263893f36df1ae82ae12ca4f373d96f10;hb=87b3f81fc94c4a40ddc8ef8107757696fd2accad;hp=4cdcb63037008439889ca9d4b91fb7aaeb3ee512;hpb=4f1605d2ca37c757a35b3157a55f7b9c5615ae31;p=platform%2Fupstream%2Fgcc.git diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c index 4cdcb63..3ffd063 100644 --- a/gcc/rtlanal.c +++ b/gcc/rtlanal.c @@ -33,7 +33,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "output.h" #include "tm_p.h" #include "flags.h" -#include "basic-block.h" #include "real.h" #include "regs.h" #include "function.h" @@ -41,12 +40,11 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA /* Forward declarations */ static int global_reg_mentioned_p_1 (rtx *, void *); static void set_of_1 (rtx, rtx, void *); -static void insn_dependent_p_1 (rtx, rtx, void *); +static bool covers_regno_p (rtx, unsigned int); +static bool covers_regno_no_parallel_p (rtx, unsigned int); static int rtx_referenced_p_1 (rtx *, void *); static int computed_jump_p_1 (rtx); static void parms_set (rtx, rtx, void *); -static bool hoist_test_store (rtx, rtx, regset); -static void hoist_update_store (rtx, rtx *, rtx, rtx); static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode, rtx, enum machine_mode, @@ -81,7 +79,7 @@ rtx_unstable_p (rtx x) switch (code) { case MEM: - return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0)); + return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); case CONST: case CONST_INT: @@ -95,8 +93,7 @@ rtx_unstable_p (rtx x) /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx /* The arg pointer varies if it is not a fixed register. */ - || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]) - || RTX_UNCHANGING_P (x)) + || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) return 0; #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED /* ??? When call-clobbered, the value is stable modulo the restore @@ -156,7 +153,7 @@ rtx_varies_p (rtx x, int for_alias) switch (code) { case MEM: - return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); + return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); case CONST: case CONST_INT: @@ -436,138 +433,6 @@ get_related_value (rtx x) return 0; } -/* Given a tablejump insn INSN, return the RTL expression for the offset - into the jump table. If the offset cannot be determined, then return - NULL_RTX. - - If EARLIEST is nonzero, it is a pointer to a place where the earliest - insn used in locating the offset was found. */ - -rtx -get_jump_table_offset (rtx insn, rtx *earliest) -{ - rtx label = NULL; - rtx table = NULL; - rtx set; - rtx old_insn; - rtx x; - rtx old_x; - rtx y; - rtx old_y; - int i; - - if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn))) - return NULL_RTX; - - x = SET_SRC (set); - - /* Some targets (eg, ARM) emit a tablejump that also - contains the out-of-range target. */ - if (GET_CODE (x) == IF_THEN_ELSE - && GET_CODE (XEXP (x, 2)) == LABEL_REF) - x = XEXP (x, 1); - - /* Search backwards and locate the expression stored in X. */ - for (old_x = NULL_RTX; REG_P (x) && x != old_x; - old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) - ; - - /* If X is an expression using a relative address then strip - off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM, - or the jump table label. */ - if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC - && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)) - { - for (i = 0; i < 2; i++) - { - old_insn = insn; - y = XEXP (x, i); - - if (y == pc_rtx || y == pic_offset_table_rtx) - break; - - for (old_y = NULL_RTX; REG_P (y) && y != old_y; - old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0)) - ; - - if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label)) - break; - } - - if (i >= 2) - return NULL_RTX; - - x = XEXP (x, 1 - i); - - for (old_x = NULL_RTX; REG_P (x) && x != old_x; - old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) - ; - } - - /* Strip off any sign or zero extension. */ - if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND) - { - x = XEXP (x, 0); - - for (old_x = NULL_RTX; REG_P (x) && x != old_x; - old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) - ; - } - - /* If X isn't a MEM then this isn't a tablejump we understand. */ - if (!MEM_P (x)) - return NULL_RTX; - - /* Strip off the MEM. */ - x = XEXP (x, 0); - - for (old_x = NULL_RTX; REG_P (x) && x != old_x; - old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0)) - ; - - /* If X isn't a PLUS than this isn't a tablejump we understand. */ - if (GET_CODE (x) != PLUS) - return NULL_RTX; - - /* At this point we should have an expression representing the jump table - plus an offset. Examine each operand in order to determine which one - represents the jump table. Knowing that tells us that the other operand - must represent the offset. */ - for (i = 0; i < 2; i++) - { - old_insn = insn; - y = XEXP (x, i); - - for (old_y = NULL_RTX; REG_P (y) && y != old_y; - old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0)) - ; - - if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF) - && reg_mentioned_p (label, y)) - break; - } - - if (i >= 2) - return NULL_RTX; - - x = XEXP (x, 1 - i); - - /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */ - if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS) - for (i = 0; i < 2; i++) - if (XEXP (x, i) == pic_offset_table_rtx) - { - x = XEXP (x, 1 - i); - break; - } - - if (earliest) - *earliest = insn; - - /* Return the RTL expression representing the offset. */ - return x; -} - /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions a global register. */ @@ -783,19 +648,6 @@ no_labels_between_p (rtx beg, rtx end) return 1; } -/* Return 1 if in between BEG and END, exclusive of BEG and END, there is - no JUMP_INSN insn. */ - -int -no_jumps_between_p (rtx beg, rtx end) -{ - rtx p; - for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) - if (JUMP_P (p)) - return 0; - return 1; -} - /* Nonzero if register REG is used in an insn between FROM_INSN and TO_INSN (exclusive of those two). */ @@ -893,27 +745,6 @@ reg_referenced_p (rtx x, rtx body) return 0; } } - -/* Nonzero if register REG is referenced in an insn between - FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do - not count. */ - -int -reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn) -{ - rtx insn; - - if (from_insn == to_insn) - return 0; - - for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) - if (INSN_P (insn) - && (reg_referenced_p (reg, PATTERN (insn)) - || (CALL_P (insn) - && find_reg_fusage (insn, USE, reg)))) - return 1; - return 0; -} /* Nonzero if register REG is set or clobbered in an insn between FROM_INSN and TO_INSN (exclusive of those two). */ @@ -1027,7 +858,7 @@ modified_between_p (rtx x, rtx start, rtx end) return 1; case MEM: - if (RTX_UNCHANGING_P (x)) + if (MEM_READONLY_P (x)) return 0; if (modified_between_p (XEXP (x, 0), start, end)) return 1; @@ -1085,7 +916,7 @@ modified_in_p (rtx x, rtx insn) return 1; case MEM: - if (RTX_UNCHANGING_P (x)) + if (MEM_READONLY_P (x)) return 0; if (modified_in_p (XEXP (x, 0), insn)) return 1; @@ -1115,41 +946,6 @@ modified_in_p (rtx x, rtx insn) return 0; } - -/* Return true if anything in insn X is (anti,output,true) dependent on - anything in insn Y. */ - -int -insn_dependent_p (rtx x, rtx y) -{ - rtx tmp; - - if (! INSN_P (x) || ! INSN_P (y)) - abort (); - - tmp = PATTERN (y); - note_stores (PATTERN (x), insn_dependent_p_1, &tmp); - if (tmp == NULL_RTX) - return 1; - - tmp = PATTERN (x); - note_stores (PATTERN (y), insn_dependent_p_1, &tmp); - if (tmp == NULL_RTX) - return 1; - - return 0; -} - -/* A helper routine for insn_dependent_p called through note_stores. */ - -static void -insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data) -{ - rtx * pinsn = (rtx *) data; - - if (*pinsn && reg_mentioned_p (x, *pinsn)) - *pinsn = NULL_RTX; -} /* Helper function for set_of. */ struct set_of_data @@ -1579,11 +1375,7 @@ reg_overlap_mentioned_p (rtx x, rtx in) } default: -#ifdef ENABLE_CHECKING - if (!CONSTANT_P (x)) - abort (); -#endif - + gcc_assert (CONSTANT_P (x)); return 0; } } @@ -1745,8 +1537,7 @@ dead_or_set_p (rtx insn, rtx x) if (GET_CODE (x) == CC0) return 1; - if (!REG_P (x)) - abort (); + gcc_assert (REG_P (x)); regno = REGNO (x); last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno @@ -1759,13 +1550,64 @@ dead_or_set_p (rtx insn, rtx x) return 1; } +/* Return TRUE iff DEST is a register or subreg of a register and + doesn't change the number of words of the inner register, and any + part of the register is TEST_REGNO. */ + +static bool +covers_regno_no_parallel_p (rtx dest, unsigned int test_regno) +{ + unsigned int regno, endregno; + + if (GET_CODE (dest) == SUBREG + && (((GET_MODE_SIZE (GET_MODE (dest)) + + UNITS_PER_WORD - 1) / UNITS_PER_WORD) + == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) + + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) + dest = SUBREG_REG (dest); + + if (!REG_P (dest)) + return false; + + regno = REGNO (dest); + endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1 + : regno + hard_regno_nregs[regno][GET_MODE (dest)]); + return (test_regno >= regno && test_regno < endregno); +} + +/* Like covers_regno_no_parallel_p, but also handles PARALLELs where + any member matches the covers_regno_no_parallel_p criteria. */ + +static bool +covers_regno_p (rtx dest, unsigned int test_regno) +{ + if (GET_CODE (dest) == PARALLEL) + { + /* Some targets place small structures in registers for return + values of functions, and those registers are wrapped in + PARALLELs that we may see as the destination of a SET. */ + int i; + + for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) + { + rtx inner = XEXP (XVECEXP (dest, 0, i), 0); + if (inner != NULL_RTX + && covers_regno_no_parallel_p (inner, test_regno)) + return true; + } + + return false; + } + else + return covers_regno_no_parallel_p (dest, test_regno); +} + /* Utility function for dead_or_set_p to check an individual register. Also called from flow.c. */ int dead_or_set_regno_p (rtx insn, unsigned int test_regno) { - unsigned int regno, endregno; rtx pattern; /* See if there is a death note for something that includes TEST_REGNO. */ @@ -1782,28 +1624,7 @@ dead_or_set_regno_p (rtx insn, unsigned int test_regno) pattern = COND_EXEC_CODE (pattern); if (GET_CODE (pattern) == SET) - { - rtx dest = SET_DEST (pattern); - - /* A value is totally replaced if it is the destination or the - destination is a SUBREG of REGNO that does not change the number of - words in it. */ - if (GET_CODE (dest) == SUBREG - && (((GET_MODE_SIZE (GET_MODE (dest)) - + UNITS_PER_WORD - 1) / UNITS_PER_WORD) - == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) - dest = SUBREG_REG (dest); - - if (!REG_P (dest)) - return 0; - - regno = REGNO (dest); - endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1 - : regno + hard_regno_nregs[regno][GET_MODE (dest)]); - - return (test_regno >= regno && test_regno < endregno); - } + return covers_regno_p (SET_DEST (pattern), test_regno); else if (GET_CODE (pattern) == PARALLEL) { int i; @@ -1815,27 +1636,9 @@ dead_or_set_regno_p (rtx insn, unsigned int test_regno) if (GET_CODE (body) == COND_EXEC) body = COND_EXEC_CODE (body); - if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) - { - rtx dest = SET_DEST (body); - - if (GET_CODE (dest) == SUBREG - && (((GET_MODE_SIZE (GET_MODE (dest)) - + UNITS_PER_WORD - 1) / UNITS_PER_WORD) - == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) - dest = SUBREG_REG (dest); - - if (!REG_P (dest)) - continue; - - regno = REGNO (dest); - endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1 - : regno + hard_regno_nregs[regno][GET_MODE (dest)]); - - if (test_regno >= regno && test_regno < endregno) - return 1; - } + if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) + && covers_regno_p (SET_DEST (body), test_regno)) + return 1; } } @@ -1928,8 +1731,7 @@ find_reg_fusage (rtx insn, enum rtx_code code, rtx datum) if (!CALL_P (insn)) return 0; - if (! datum) - abort (); + gcc_assert (datum); if (!REG_P (datum)) { @@ -2041,7 +1843,7 @@ remove_note (rtx insn, rtx note) return; } - abort (); + gcc_unreachable (); } /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and @@ -2521,8 +2323,7 @@ replace_rtx (rtx x, rtx from, rtx to) x = simplify_subreg (GET_MODE (x), new, GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); - if (! x) - abort (); + gcc_assert (x); } else SUBREG_REG (x) = new; @@ -2537,8 +2338,7 @@ replace_rtx (rtx x, rtx from, rtx to) { x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), new, GET_MODE (XEXP (x, 0))); - if (! x) - abort (); + gcc_assert (x); } else XEXP (x, 0) = new; @@ -2989,6 +2789,7 @@ commutative_operand_precedence (rtx op) if (code == CONST_DOUBLE) return -6; op = avoid_constant_pool_reference (op); + code = GET_CODE (op); switch (GET_RTX_CLASS (code)) { @@ -3155,7 +2956,7 @@ loc_mentioned_in_p (rtx *loc, rtx in) for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { - if (loc == &in->u.fld[i].rtx) + if (loc == &in->u.fld[i].rt_rtx) return 1; if (fmt[i] == 'e') { @@ -3190,11 +2991,10 @@ subreg_lsb_1 (enum machine_mode outer_mode, if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) /* If the subreg crosses a word boundary ensure that it also begins and ends on a word boundary. */ - if ((subreg_byte % UNITS_PER_WORD - + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD - && (subreg_byte % UNITS_PER_WORD - || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)) - abort (); + gcc_assert (!((subreg_byte % UNITS_PER_WORD + + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD + && (subreg_byte % UNITS_PER_WORD + || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); if (WORDS_BIG_ENDIAN) word = (GET_MODE_SIZE (inner_mode) @@ -3237,8 +3037,7 @@ subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, int mode_multiple, nregs_multiple; int y_offset; - if (xregno >= FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (xregno < FIRST_PSEUDO_REGISTER); nregs_xmode = hard_regno_nregs[xregno][xmode]; nregs_ymode = hard_regno_nregs[xregno][ymode]; @@ -3257,8 +3056,7 @@ subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, /* size of ymode must not be greater than the size of xmode. */ mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); - if (mode_multiple == 0) - abort (); + gcc_assert (mode_multiple != 0); y_offset = offset / GET_MODE_SIZE (ymode); nregs_multiple = nregs_xmode / nregs_ymode; @@ -3280,8 +3078,7 @@ subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, int mode_multiple, nregs_multiple; int y_offset; - if (xregno >= FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (xregno < FIRST_PSEUDO_REGISTER); nregs_xmode = hard_regno_nregs[xregno][xmode]; nregs_ymode = hard_regno_nregs[xregno][ymode]; @@ -3297,15 +3094,12 @@ subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, if (offset == subreg_lowpart_offset (ymode, xmode)) return true; -#ifdef ENABLE_CHECKING /* This should always pass, otherwise we don't know how to verify the constraint. These conditions may be relaxed but subreg_offset would need to be redesigned. */ - if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode) - || GET_MODE_SIZE (ymode) % nregs_ymode - || nregs_xmode % nregs_ymode) - abort (); -#endif + gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); + gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0); + gcc_assert ((nregs_xmode % nregs_ymode) == 0); /* The XMODE value can be seen as a vector of NREGS_XMODE values. The subreg must represent a lowpart of given field. @@ -3317,16 +3111,14 @@ subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, /* size of ymode must not be greater than the size of xmode. */ mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); - if (mode_multiple == 0) - abort (); + gcc_assert (mode_multiple != 0); y_offset = offset / GET_MODE_SIZE (ymode); nregs_multiple = nregs_xmode / nregs_ymode; -#ifdef ENABLE_CHECKING - if (offset % GET_MODE_SIZE (ymode) - || mode_multiple % nregs_multiple) - abort (); -#endif + + gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0); + gcc_assert ((mode_multiple % nregs_multiple) == 0); + return (!(y_offset % (mode_multiple / nregs_multiple))); } @@ -3381,8 +3173,7 @@ find_first_parameter_load (rtx call_insn, rtx boundary) if (GET_CODE (XEXP (p, 0)) == USE && REG_P (XEXP (XEXP (p, 0), 0))) { - if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); /* We only care about registers which can hold function arguments. */ @@ -3410,8 +3201,7 @@ find_first_parameter_load (rtx call_insn, rtx boundary) CODE_LABEL. */ if (LABEL_P (before)) { - if (before != boundary) - abort (); + gcc_assert (before == boundary); break; } @@ -3454,261 +3244,6 @@ keep_with_call_p (rtx insn) return false; } -/* Return true when store to register X can be hoisted to the place - with LIVE registers (can be NULL). Value VAL contains destination - whose value will be used. */ - -static bool -hoist_test_store (rtx x, rtx val, regset live) -{ - if (GET_CODE (x) == SCRATCH) - return true; - - if (rtx_equal_p (x, val)) - return true; - - /* Allow subreg of X in case it is not writing just part of multireg pseudo. - Then we would need to update all users to care hoisting the store too. - Caller may represent that by specifying whole subreg as val. */ - - if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val)) - { - if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD - && GET_MODE_BITSIZE (GET_MODE (x)) < - GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) - return false; - return true; - } - if (GET_CODE (x) == SUBREG) - x = SUBREG_REG (x); - - /* Anything except register store is not hoistable. This includes the - partial stores to registers. */ - - if (!REG_P (x)) - return false; - - /* Pseudo registers can be always replaced by another pseudo to avoid - the side effect, for hard register we must ensure that they are dead. - Eventually we may want to add code to try turn pseudos to hards, but it - is unlikely useful. */ - - if (REGNO (x) < FIRST_PSEUDO_REGISTER) - { - int regno = REGNO (x); - int n = hard_regno_nregs[regno][GET_MODE (x)]; - - if (!live) - return false; - if (REGNO_REG_SET_P (live, regno)) - return false; - while (--n > 0) - if (REGNO_REG_SET_P (live, regno + n)) - return false; - } - return true; -} - - -/* Return true if INSN can be hoisted to place with LIVE hard registers - (LIVE can be NULL when unknown). VAL is expected to be stored by the insn - and used by the hoisting pass. */ - -bool -can_hoist_insn_p (rtx insn, rtx val, regset live) -{ - rtx pat = PATTERN (insn); - int i; - - /* It probably does not worth the complexity to handle multiple - set stores. */ - if (!single_set (insn)) - return false; - /* We can move CALL_INSN, but we need to check that all caller clobbered - regs are dead. */ - if (CALL_P (insn)) - return false; - /* In future we will handle hoisting of libcall sequences, but - give up for now. */ - if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) - return false; - switch (GET_CODE (pat)) - { - case SET: - if (!hoist_test_store (SET_DEST (pat), val, live)) - return false; - break; - case USE: - /* USES do have sick semantics, so do not move them. */ - return false; - break; - case CLOBBER: - if (!hoist_test_store (XEXP (pat, 0), val, live)) - return false; - break; - case PARALLEL: - for (i = 0; i < XVECLEN (pat, 0); i++) - { - rtx x = XVECEXP (pat, 0, i); - switch (GET_CODE (x)) - { - case SET: - if (!hoist_test_store (SET_DEST (x), val, live)) - return false; - break; - case USE: - /* We need to fix callers to really ensure availability - of all values insn uses, but for now it is safe to prohibit - hoisting of any insn having such a hidden uses. */ - return false; - break; - case CLOBBER: - if (!hoist_test_store (SET_DEST (x), val, live)) - return false; - break; - default: - break; - } - } - break; - default: - abort (); - } - return true; -} - -/* Update store after hoisting - replace all stores to pseudo registers - by new ones to avoid clobbering of values except for store to VAL that will - be updated to NEW. */ - -static void -hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new) -{ - rtx x = *xp; - - if (GET_CODE (x) == SCRATCH) - return; - - if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val) - validate_change (insn, xp, - simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new), - SUBREG_BYTE (x)), 1); - if (rtx_equal_p (x, val)) - { - validate_change (insn, xp, new, 1); - return; - } - if (GET_CODE (x) == SUBREG) - { - xp = &SUBREG_REG (x); - x = *xp; - } - - if (!REG_P (x)) - abort (); - - /* We've verified that hard registers are dead, so we may keep the side - effect. Otherwise replace it by new pseudo. */ - if (REGNO (x) >= FIRST_PSEUDO_REGISTER) - validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1); - REG_NOTES (insn) - = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn)); -} - -/* Create a copy of INSN after AFTER replacing store of VAL to NEW - and each other side effect to pseudo register by new pseudo register. */ - -rtx -hoist_insn_after (rtx insn, rtx after, rtx val, rtx new) -{ - rtx pat; - int i; - rtx note; - - insn = emit_copy_of_insn_after (insn, after); - pat = PATTERN (insn); - - /* Remove REG_UNUSED notes as we will re-emit them. */ - while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX))) - remove_note (insn, note); - - /* To get this working callers must ensure to move everything referenced - by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably - easier. */ - while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))) - remove_note (insn, note); - while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX))) - remove_note (insn, note); - - /* Remove REG_DEAD notes as they might not be valid anymore in case - we create redundancy. */ - while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX))) - remove_note (insn, note); - switch (GET_CODE (pat)) - { - case SET: - hoist_update_store (insn, &SET_DEST (pat), val, new); - break; - case USE: - break; - case CLOBBER: - hoist_update_store (insn, &XEXP (pat, 0), val, new); - break; - case PARALLEL: - for (i = 0; i < XVECLEN (pat, 0); i++) - { - rtx x = XVECEXP (pat, 0, i); - switch (GET_CODE (x)) - { - case SET: - hoist_update_store (insn, &SET_DEST (x), val, new); - break; - case USE: - break; - case CLOBBER: - hoist_update_store (insn, &SET_DEST (x), val, new); - break; - default: - break; - } - } - break; - default: - abort (); - } - if (!apply_change_group ()) - abort (); - - return insn; -} - -rtx -hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new) -{ - rtx new_insn; - - /* We cannot insert instructions on an abnormal critical edge. - It will be easier to find the culprit if we die now. */ - if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)) - abort (); - - /* Do not use emit_insn_on_edge as we want to preserve notes and similar - stuff. We also emit CALL_INSNS and firends. */ - if (e->insns.r == NULL_RTX) - { - start_sequence (); - emit_note (NOTE_INSN_DELETED); - } - else - push_to_sequence (e->insns.r); - - new_insn = hoist_insn_after (insn, get_last_insn (), val, new); - - e->insns.r = get_insns (); - end_sequence (); - return new_insn; -} - /* Return true if LABEL is a target of JUMP_INSN. This applies only to non-complex jumps. That is, direct unconditional, conditional, and tablejumps, but not computed jumps or returns. It also does @@ -4179,7 +3714,7 @@ nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x, result_low = MIN (low0, low1); break; default: - abort (); + gcc_unreachable (); } if (result_width < mode_width) @@ -4811,3 +4346,315 @@ insn_rtx_cost (rtx pat) cost = rtx_cost (SET_SRC (set), SET); return cost > 0 ? cost : COSTS_N_INSNS (1); } + +/* Given an insn INSN and condition COND, return the condition in a + canonical form to simplify testing by callers. Specifically: + + (1) The code will always be a comparison operation (EQ, NE, GT, etc.). + (2) Both operands will be machine operands; (cc0) will have been replaced. + (3) If an operand is a constant, it will be the second operand. + (4) (LE x const) will be replaced with (LT x ) and similarly + for GE, GEU, and LEU. + + If the condition cannot be understood, or is an inequality floating-point + comparison which needs to be reversed, 0 will be returned. + + If REVERSE is nonzero, then reverse the condition prior to canonizing it. + + If EARLIEST is nonzero, it is a pointer to a place where the earliest + insn used in locating the condition was found. If a replacement test + of the condition is desired, it should be placed in front of that + insn and we will be sure that the inputs are still valid. + + If WANT_REG is nonzero, we wish the condition to be relative to that + register, if possible. Therefore, do not canonicalize the condition + further. If ALLOW_CC_MODE is nonzero, allow the condition returned + to be a compare to a CC mode register. + + If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST + and at INSN. */ + +rtx +canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, + rtx want_reg, int allow_cc_mode, int valid_at_insn_p) +{ + enum rtx_code code; + rtx prev = insn; + rtx set; + rtx tem; + rtx op0, op1; + int reverse_code = 0; + enum machine_mode mode; + + code = GET_CODE (cond); + mode = GET_MODE (cond); + op0 = XEXP (cond, 0); + op1 = XEXP (cond, 1); + + if (reverse) + code = reversed_comparison_code (cond, insn); + if (code == UNKNOWN) + return 0; + + if (earliest) + *earliest = insn; + + /* If we are comparing a register with zero, see if the register is set + in the previous insn to a COMPARE or a comparison operation. Perform + the same tests as a function of STORE_FLAG_VALUE as find_comparison_args + in cse.c */ + + while ((GET_RTX_CLASS (code) == RTX_COMPARE + || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) + && op1 == CONST0_RTX (GET_MODE (op0)) + && op0 != want_reg) + { + /* Set nonzero when we find something of interest. */ + rtx x = 0; + +#ifdef HAVE_cc0 + /* If comparison with cc0, import actual comparison from compare + insn. */ + if (op0 == cc0_rtx) + { + if ((prev = prev_nonnote_insn (prev)) == 0 + || !NONJUMP_INSN_P (prev) + || (set = single_set (prev)) == 0 + || SET_DEST (set) != cc0_rtx) + return 0; + + op0 = SET_SRC (set); + op1 = CONST0_RTX (GET_MODE (op0)); + if (earliest) + *earliest = prev; + } +#endif + + /* If this is a COMPARE, pick up the two things being compared. */ + if (GET_CODE (op0) == COMPARE) + { + op1 = XEXP (op0, 1); + op0 = XEXP (op0, 0); + continue; + } + else if (!REG_P (op0)) + break; + + /* Go back to the previous insn. Stop if it is not an INSN. We also + stop if it isn't a single set or if it has a REG_INC note because + we don't want to bother dealing with it. */ + + if ((prev = prev_nonnote_insn (prev)) == 0 + || !NONJUMP_INSN_P (prev) + || FIND_REG_INC_NOTE (prev, NULL_RTX)) + break; + + set = set_of (op0, prev); + + if (set + && (GET_CODE (set) != SET + || !rtx_equal_p (SET_DEST (set), op0))) + break; + + /* If this is setting OP0, get what it sets it to if it looks + relevant. */ + if (set) + { + enum machine_mode inner_mode = GET_MODE (SET_DEST (set)); +#ifdef FLOAT_STORE_FLAG_VALUE + REAL_VALUE_TYPE fsfv; +#endif + + /* ??? We may not combine comparisons done in a CCmode with + comparisons not done in a CCmode. This is to aid targets + like Alpha that have an IEEE compliant EQ instruction, and + a non-IEEE compliant BEQ instruction. The use of CCmode is + actually artificial, simply to prevent the combination, but + should not affect other platforms. + + However, we must allow VOIDmode comparisons to match either + CCmode or non-CCmode comparison, because some ports have + modeless comparisons inside branch patterns. + + ??? This mode check should perhaps look more like the mode check + in simplify_comparison in combine. */ + + if ((GET_CODE (SET_SRC (set)) == COMPARE + || (((code == NE + || (code == LT + && GET_MODE_CLASS (inner_mode) == MODE_INT + && (GET_MODE_BITSIZE (inner_mode) + <= HOST_BITS_PER_WIDE_INT) + && (STORE_FLAG_VALUE + & ((HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (inner_mode) - 1)))) +#ifdef FLOAT_STORE_FLAG_VALUE + || (code == LT + && GET_MODE_CLASS (inner_mode) == MODE_FLOAT + && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), + REAL_VALUE_NEGATIVE (fsfv))) +#endif + )) + && COMPARISON_P (SET_SRC (set)))) + && (((GET_MODE_CLASS (mode) == MODE_CC) + == (GET_MODE_CLASS (inner_mode) == MODE_CC)) + || mode == VOIDmode || inner_mode == VOIDmode)) + x = SET_SRC (set); + else if (((code == EQ + || (code == GE + && (GET_MODE_BITSIZE (inner_mode) + <= HOST_BITS_PER_WIDE_INT) + && GET_MODE_CLASS (inner_mode) == MODE_INT + && (STORE_FLAG_VALUE + & ((HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (inner_mode) - 1)))) +#ifdef FLOAT_STORE_FLAG_VALUE + || (code == GE + && GET_MODE_CLASS (inner_mode) == MODE_FLOAT + && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), + REAL_VALUE_NEGATIVE (fsfv))) +#endif + )) + && COMPARISON_P (SET_SRC (set)) + && (((GET_MODE_CLASS (mode) == MODE_CC) + == (GET_MODE_CLASS (inner_mode) == MODE_CC)) + || mode == VOIDmode || inner_mode == VOIDmode)) + + { + reverse_code = 1; + x = SET_SRC (set); + } + else + break; + } + + else if (reg_set_p (op0, prev)) + /* If this sets OP0, but not directly, we have to give up. */ + break; + + if (x) + { + /* If the caller is expecting the condition to be valid at INSN, + make sure X doesn't change before INSN. */ + if (valid_at_insn_p) + if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) + break; + if (COMPARISON_P (x)) + code = GET_CODE (x); + if (reverse_code) + { + code = reversed_comparison_code (x, prev); + if (code == UNKNOWN) + return 0; + reverse_code = 0; + } + + op0 = XEXP (x, 0), op1 = XEXP (x, 1); + if (earliest) + *earliest = prev; + } + } + + /* If constant is first, put it last. */ + if (CONSTANT_P (op0)) + code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; + + /* If OP0 is the result of a comparison, we weren't able to find what + was really being compared, so fail. */ + if (!allow_cc_mode + && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) + return 0; + + /* Canonicalize any ordered comparison with integers involving equality + if we can do computations in the relevant mode and we do not + overflow. */ + + if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC + && GET_CODE (op1) == CONST_INT + && GET_MODE (op0) != VOIDmode + && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) + { + HOST_WIDE_INT const_val = INTVAL (op1); + unsigned HOST_WIDE_INT uconst_val = const_val; + unsigned HOST_WIDE_INT max_val + = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); + + switch (code) + { + case LE: + if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) + code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); + break; + + /* When cross-compiling, const_val might be sign-extended from + BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ + case GE: + if ((HOST_WIDE_INT) (const_val & max_val) + != (((HOST_WIDE_INT) 1 + << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) + code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); + break; + + case LEU: + if (uconst_val < max_val) + code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); + break; + + case GEU: + if (uconst_val != 0) + code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); + break; + + default: + break; + } + } + + /* Never return CC0; return zero instead. */ + if (CC0_P (op0)) + return 0; + + return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); +} + +/* Given a jump insn JUMP, return the condition that will cause it to branch + to its JUMP_LABEL. If the condition cannot be understood, or is an + inequality floating-point comparison which needs to be reversed, 0 will + be returned. + + If EARLIEST is nonzero, it is a pointer to a place where the earliest + insn used in locating the condition was found. If a replacement test + of the condition is desired, it should be placed in front of that + insn and we will be sure that the inputs are still valid. If EARLIEST + is null, the returned condition will be valid at INSN. + + If ALLOW_CC_MODE is nonzero, allow the condition returned to be a + compare CC mode register. + + VALID_AT_INSN_P is the same as for canonicalize_condition. */ + +rtx +get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p) +{ + rtx cond; + int reverse; + rtx set; + + /* If this is not a standard conditional jump, we can't parse it. */ + if (!JUMP_P (jump) + || ! any_condjump_p (jump)) + return 0; + set = pc_set (jump); + + cond = XEXP (SET_SRC (set), 0); + + /* If this branches to JUMP_LABEL when the condition is false, reverse + the condition. */ + reverse + = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF + && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump); + + return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, + allow_cc_mode, valid_at_insn_p); +} +