/* Forward propagation of expressions for single use variables.
- Copyright (C) 2004-2014 Free Software Foundation, Inc.
+ Copyright (C) 2004-2015 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "rtl.h"
#include "tree.h"
-#include "stor-layout.h"
+#include "gimple.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
#include "tm_p.h"
-#include "predict.h"
-#include "vec.h"
-#include "hashtab.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "hard-reg-set.h"
-#include "input.h"
-#include "function.h"
-#include "dominance.h"
-#include "cfg.h"
-#include "basic-block.h"
+#include "ssa.h"
+#include "expmed.h"
+#include "optabs-query.h"
+#include "insn-config.h"
+#include "emit-rtl.h"
#include "gimple-pretty-print.h"
-#include "tree-ssa-alias.h"
+#include "diagnostic.h"
+#include "alias.h"
+#include "fold-const.h"
+#include "stor-layout.h"
#include "internal-fn.h"
#include "gimple-fold.h"
#include "tree-eh.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimplify.h"
#include "gimple-iterator.h"
#include "gimplify-me.h"
-#include "gimple-ssa.h"
#include "tree-cfg.h"
-#include "tree-phinodes.h"
-#include "ssa-iterators.h"
-#include "stringpool.h"
-#include "tree-ssanames.h"
+#include "flags.h"
+#include "dojump.h"
+#include "explow.h"
+#include "calls.h"
+#include "varasm.h"
+#include "stmt.h"
#include "expr.h"
#include "tree-dfa.h"
-#include "tree-pass.h"
#include "langhooks.h"
-#include "flags.h"
-#include "diagnostic.h"
-#include "expr.h"
#include "cfgloop.h"
-#include "insn-codes.h"
-#include "optabs.h"
#include "tree-ssa-propagate.h"
#include "tree-ssa-dom.h"
#include "builtins.h"
/* Set to true if we delete dead edges during the optimization. */
static bool cfg_changed;
-static tree rhs_to_tree (tree type, gimple stmt);
+static tree rhs_to_tree (tree type, gimple *stmt);
static bitmap to_purge;
}
-/* Get the next statement we can propagate NAME's value into skipping
- trivial copies. Returns the statement that is suitable as a
- propagation destination or NULL_TREE if there is no such one.
- This only returns destinations in a single-use chain. FINAL_NAME_P
- if non-NULL is written to the ssa name that represents the use. */
-
-static gimple
-get_prop_dest_stmt (tree name, tree *final_name_p)
-{
- use_operand_p use;
- gimple use_stmt;
-
- do {
- /* If name has multiple uses, bail out. */
- if (!single_imm_use (name, &use, &use_stmt))
- return NULL;
-
- /* If this is not a trivial copy, we found it. */
- if (!gimple_assign_ssa_name_copy_p (use_stmt)
- || gimple_assign_rhs1 (use_stmt) != name)
- break;
-
- /* Continue searching uses of the copy destination. */
- name = gimple_assign_lhs (use_stmt);
- } while (1);
-
- if (final_name_p)
- *final_name_p = name;
-
- return use_stmt;
-}
-
/* Get the statement we can propagate from into NAME skipping
trivial copies. Returns the statement which defines the
propagation source or NULL_TREE if there is no such one.
it is set to whether the chain to NAME is a single use chain
or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
-static gimple
+static gimple *
get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
{
bool single_use = true;
do {
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (!has_single_use (name))
{
propagation source. Returns true if so, otherwise false. */
static bool
-can_propagate_from (gimple def_stmt)
+can_propagate_from (gimple *def_stmt)
{
gcc_assert (is_gimple_assign (def_stmt));
remove_prop_source_from_use (tree name)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
bool cfg_changed = false;
do {
return cfg_changed;
}
-/* Return the rhs of a gimple_assign STMT in a form of a single tree,
+/* Return the rhs of a gassign *STMT in a form of a single tree,
converted to type TYPE.
This should disappear, but is needed so we can combine expressions and use
routines that deal with gimple exclusively . */
static tree
-rhs_to_tree (tree type, gimple stmt)
+rhs_to_tree (tree type, gimple *stmt)
{
location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
considered simplified. */
static tree
-combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
+combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
were no simplifying combines. */
static tree
-forward_propagate_into_comparison_1 (gimple stmt,
+forward_propagate_into_comparison_1 (gimple *stmt,
enum tree_code code, tree type,
tree op0, tree op1)
{
simplify comparisons against constants. */
if (TREE_CODE (op0) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
+ gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
if (def_stmt && can_propagate_from (def_stmt))
{
+ enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
+ bool invariant_only_p = !single_use0_p;
+
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
+
+ /* Always combine comparisons or conversions from booleans. */
+ if (TREE_CODE (op1) == INTEGER_CST
+ && ((CONVERT_EXPR_CODE_P (def_code)
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
+ == BOOLEAN_TYPE)
+ || TREE_CODE_CLASS (def_code) == tcc_comparison))
+ invariant_only_p = false;
+
tmp = combine_cond_expr_cond (stmt, code, type,
- rhs0, op1, !single_use0_p);
+ rhs0, op1, invariant_only_p);
if (tmp)
return tmp;
}
/* If that wasn't successful, try the second operand. */
if (TREE_CODE (op1) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
+ gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
if (def_stmt && can_propagate_from (def_stmt))
{
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
static int
forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree tmp;
bool cfg_changed = false;
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
This must be kept in sync with forward_propagate_into_cond. */
static int
-forward_propagate_into_gimple_cond (gimple stmt)
+forward_propagate_into_gimple_cond (gcond *stmt)
{
tree tmp;
enum tree_code code = gimple_cond_code (stmt);
static bool
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
tree tmp = NULL_TREE;
tree cond = gimple_assign_rhs1 (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
- bool swap = false;
/* We can do tree combining on SSA_NAME and comparison expressions. */
if (COMPARISON_CLASS_P (cond))
{
enum tree_code def_code;
tree name = cond;
- gimple def_stmt = get_prop_source_stmt (name, true, NULL);
+ gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
if (!def_stmt || !can_propagate_from (def_stmt))
return 0;
TREE_TYPE (cond),
gimple_assign_rhs1 (def_stmt),
gimple_assign_rhs2 (def_stmt));
- else if (code == COND_EXPR
- && ((def_code == BIT_NOT_EXPR
- && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
- || (def_code == BIT_XOR_EXPR
- && integer_onep (gimple_assign_rhs2 (def_stmt)))))
- {
- tmp = gimple_assign_rhs1 (def_stmt);
- swap = true;
- }
}
if (tmp
else if (integer_zerop (tmp))
gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
else
- {
- gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
- if (swap)
- {
- tree t = gimple_assign_rhs2 (stmt);
- gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
- gimple_assign_set_rhs3 (stmt, t);
- }
- }
+ gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
stmt = gsi_stmt (*gsi_p);
update_stmt (stmt);
return 0;
}
-/* Propagate from the ssa name definition statements of COND_EXPR
- values in the rhs of statement STMT into the conditional arms
- if that simplifies it.
- Returns true if the stmt was changed. */
-
-static bool
-combine_cond_exprs (gimple_stmt_iterator *gsi_p)
-{
- gimple stmt = gsi_stmt (*gsi_p);
- tree cond, val1, val2;
- bool changed = false;
-
- cond = gimple_assign_rhs1 (stmt);
- val1 = gimple_assign_rhs2 (stmt);
- if (TREE_CODE (val1) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (val1);
- if (is_gimple_assign (def_stmt)
- && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
- && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
- {
- val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
- gimple_assign_set_rhs2 (stmt, val1);
- changed = true;
- }
- }
- val2 = gimple_assign_rhs3 (stmt);
- if (TREE_CODE (val2) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (val2);
- if (is_gimple_assign (def_stmt)
- && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
- && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
- {
- val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
- gimple_assign_set_rhs3 (stmt, val2);
- changed = true;
- }
- }
- if (operand_equal_p (val1, val2, 0))
- {
- gimple_assign_set_rhs_from_tree (gsi_p, val1);
- stmt = gsi_stmt (*gsi_p);
- changed = true;
- }
-
- if (changed)
- update_stmt (stmt);
-
- return changed;
-}
-
/* We've just substituted an ADDR_EXPR into stmt. Update all the
relevant data structures to match. */
static void
-tidy_after_forward_propagate_addr (gimple stmt)
+tidy_after_forward_propagate_addr (gimple *stmt)
{
/* We may have turned a trapping insn into a non-trapping insn. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
bool single_use_p)
{
tree lhs, rhs, rhs2, array_ref;
- gimple use_stmt = gsi_stmt (*use_stmt_gsi);
+ gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
bool res = true;
if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
- new_def_rhs, NULL_TREE);
+ new_def_rhs);
else if (is_gimple_min_invariant (new_def_rhs))
- gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
- new_def_rhs, NULL_TREE);
+ gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
else
return false;
gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
{
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
bool all = true;
bool single_use_p = parent_single_use_p && has_single_use (name);
}
-/* Forward propagate the comparison defined in *DEFGSI like
- cond_1 = x CMP y to uses of the form
- a_1 = (T')cond_1
- a_1 = !cond_1
- a_1 = cond_1 != 0
- Returns true if stmt is now unused. Advance DEFGSI to the next
- statement. */
-
-static bool
-forward_propagate_comparison (gimple_stmt_iterator *defgsi)
-{
- gimple stmt = gsi_stmt (*defgsi);
- tree name = gimple_assign_lhs (stmt);
- gimple use_stmt;
- tree tmp = NULL_TREE;
- gimple_stmt_iterator gsi;
- enum tree_code code;
- tree lhs;
-
- /* Don't propagate ssa names that occur in abnormal phis. */
- if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
- || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
- goto bailout;
-
- /* Do not un-cse comparisons. But propagate through copies. */
- use_stmt = get_prop_dest_stmt (name, &name);
- if (!use_stmt
- || !is_gimple_assign (use_stmt))
- goto bailout;
-
- code = gimple_assign_rhs_code (use_stmt);
- lhs = gimple_assign_lhs (use_stmt);
- if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
- goto bailout;
-
- /* We can propagate the condition into a statement that
- computes the logical negation of the comparison result. */
- if ((code == BIT_NOT_EXPR
- && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
- || (code == BIT_XOR_EXPR
- && integer_onep (gimple_assign_rhs2 (use_stmt))))
- {
- tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
- bool nans = HONOR_NANS (TYPE_MODE (type));
- enum tree_code inv_code;
- inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
- if (inv_code == ERROR_MARK)
- goto bailout;
-
- tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt));
- }
- else
- goto bailout;
-
- gsi = gsi_for_stmt (use_stmt);
- gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
- use_stmt = gsi_stmt (gsi);
- update_stmt (use_stmt);
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, " Replaced '");
- print_gimple_expr (dump_file, stmt, 0, dump_flags);
- fprintf (dump_file, "' with '");
- print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
- fprintf (dump_file, "'\n");
- }
-
- /* When we remove stmt now the iterator defgsi goes off it's current
- sequence, hence advance it now. */
- gsi_next (defgsi);
-
- /* Remove defining statements. */
- return remove_prop_source_from_use (name);
-
-bailout:
- gsi_next (defgsi);
- return false;
-}
-
-
-/* GSI_P points to a statement which performs a narrowing integral
- conversion.
-
- Look for cases like:
-
- t = x & c;
- y = (T) t;
-
- Turn them into:
-
- t = x & c;
- y = (T) x;
-
- If T is narrower than X's type and C merely masks off bits outside
- of (T) and nothing else.
-
- Normally we'd let DCE remove the dead statement. But no DCE runs
- after the last forwprop/combine pass, so we remove the obviously
- dead code ourselves.
-
- Return TRUE if a change was made, FALSE otherwise. */
-
-static bool
-simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
-{
- gimple stmt = gsi_stmt (*gsi_p);
- gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
-
- /* See if the input for the conversion was set via a BIT_AND_EXPR and
- the only use of the BIT_AND_EXPR result is the conversion. */
- if (is_gimple_assign (rhs_def_stmt)
- && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
- && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
- {
- tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
- tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
- tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
-
- /* Now verify suitability of the BIT_AND_EXPR's operands.
- The first must be an SSA_NAME that we can propagate and the
- second must be an integer constant that masks out all the
- bits outside the final result's type, but nothing else. */
- if (TREE_CODE (rhs_def_operand1) == SSA_NAME
- && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
- && TREE_CODE (rhs_def_operand2) == INTEGER_CST
- && operand_equal_p (rhs_def_operand2,
- build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
- TYPE_PRECISION (lhs_type)),
- 0))
- {
- /* This is an optimizable case. Replace the source operand
- in the conversion with the first source operand of the
- BIT_AND_EXPR. */
- gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
- stmt = gsi_stmt (*gsi_p);
- update_stmt (stmt);
-
- /* There is no DCE after the last forwprop pass. It's
- easy to clean up the first order effects here. */
- gimple_stmt_iterator si;
- si = gsi_for_stmt (rhs_def_stmt);
- gsi_remove (&si, true);
- fwprop_invalidate_lattice (gimple_get_lhs (rhs_def_stmt));
- release_defs (rhs_def_stmt);
- return true;
- }
- }
-
- return false;
-}
-
-
/* Helper function for simplify_gimple_switch. Remove case labels that
have values outside the range of the new type. */
static void
-simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
+simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
{
unsigned int branch_num = gimple_switch_num_labels (stmt);
auto_vec<tree> labels (branch_num);
the condition which we may be able to optimize better. */
static bool
-simplify_gimple_switch (gimple stmt)
+simplify_gimple_switch (gswitch *stmt)
{
/* The optimization that we really care about is removing unnecessary
casts. That will let us do much better in propagating the inferred
tree cond = gimple_switch_index (stmt);
if (TREE_CODE (cond) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (cond);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
if (gimple_assign_cast_p (def_stmt))
{
tree def = gimple_assign_rhs1 (def_stmt);
{
tree p = i ? p1 : p2;
tree off = size_zero_node;
- gimple stmt;
+ gimple *stmt;
enum tree_code code;
/* For each of p1 and p2 we need to iterate at least
static bool
simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
{
- gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
+ gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
tree vuse = gimple_vuse (stmt2);
if (vuse == NULL)
return false;
tree val2 = gimple_call_arg (stmt2, 1);
tree len2 = gimple_call_arg (stmt2, 2);
tree diff, vdef, new_str_cst;
- gimple use_stmt;
+ gimple *use_stmt;
unsigned int ptr1_align;
unsigned HOST_WIDE_INT src_len;
char *src_buf;
use_operand_p use_p;
if (!tree_fits_shwi_p (val2)
- || !tree_fits_uhwi_p (len2))
+ || !tree_fits_uhwi_p (len2)
+ || compare_tree_int (len2, 1024) == 1)
break;
if (is_gimple_call (stmt1))
{
is not constant, or is bigger than memcpy length, bail out. */
if (diff == NULL
|| !tree_fits_uhwi_p (diff)
- || tree_int_cst_lt (len1, diff))
+ || tree_int_cst_lt (len1, diff)
+ || compare_tree_int (diff, 1024) == 1)
break;
/* Use maximum of difference plus memset length and memcpy length
static inline void
defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
{
- gimple def;
+ gimple *def;
enum tree_code code1;
tree arg11;
tree arg21;
static bool
simplify_rotate (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree arg[2], rtype, rotcnt = NULL_TREE;
tree def_arg1[2], def_arg2[2];
enum tree_code def_code[2];
tree lhs;
int i;
bool swapped_p = false;
- gimple g;
+ gimple *g;
arg[0] = gimple_assign_rhs1 (stmt);
arg[1] = gimple_assign_rhs2 (stmt);
if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
TREE_TYPE (rotcnt)))
{
- g = gimple_build_assign_with_ops (NOP_EXPR,
- make_ssa_name (TREE_TYPE (def_arg2[0]),
- NULL),
- rotcnt, NULL_TREE);
+ g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
+ NOP_EXPR, rotcnt);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
rotcnt = gimple_assign_lhs (g);
}
lhs = gimple_assign_lhs (stmt);
if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
- lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
- g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
- ? LROTATE_EXPR : RROTATE_EXPR,
- lhs, def_arg1[0], rotcnt);
+ lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
+ g = gimple_build_assign (lhs,
+ ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
+ ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
{
gsi_insert_before (gsi, g, GSI_SAME_STMT);
- g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
- lhs, NULL_TREE);
+ g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
}
gsi_replace (gsi, g, false);
return true;
}
-/* Perform re-associations of the plus or minus statement STMT that are
- always permitted. Returns true if the CFG was changed. */
-
-static bool
-associate_plusminus (gimple_stmt_iterator *gsi)
-{
- gimple stmt = gsi_stmt (*gsi);
- tree rhs1 = gimple_assign_rhs1 (stmt);
- tree rhs2 = gimple_assign_rhs2 (stmt);
- enum tree_code code = gimple_assign_rhs_code (stmt);
- bool changed;
-
- /* We can't reassociate at all for saturating types. */
- if (TYPE_SATURATING (TREE_TYPE (rhs1)))
- return false;
-
- /* First contract negates. */
- do
- {
- changed = false;
-
- /* A +- (-B) -> A -+ B. */
- if (TREE_CODE (rhs2) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
- if (is_gimple_assign (def_stmt)
- && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
- && can_propagate_from (def_stmt))
- {
- code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
- gimple_assign_set_rhs_code (stmt, code);
- rhs2 = gimple_assign_rhs1 (def_stmt);
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- changed = true;
- }
- }
-
- /* (-A) + B -> B - A. */
- if (TREE_CODE (rhs1) == SSA_NAME
- && code == PLUS_EXPR)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
- if (is_gimple_assign (def_stmt)
- && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
- && can_propagate_from (def_stmt))
- {
- code = MINUS_EXPR;
- gimple_assign_set_rhs_code (stmt, code);
- rhs1 = rhs2;
- gimple_assign_set_rhs1 (stmt, rhs1);
- rhs2 = gimple_assign_rhs1 (def_stmt);
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- changed = true;
- }
- }
- }
- while (changed);
-
- /* We can't reassociate floating-point or fixed-point plus or minus
- because of saturation to +-Inf. */
- if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
- || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
- goto out;
-
- /* Second match patterns that allow contracting a plus-minus pair
- irrespective of overflow issues.
-
- (A +- B) - A -> +- B
- (A +- B) -+ B -> A
- (CST +- A) +- CST -> CST +- A
- (A +- CST) +- CST -> A +- CST
- ~A + A -> -1
- ~A + 1 -> -A
- A - (A +- B) -> -+ B
- A +- (B +- A) -> +- B
- CST +- (CST +- A) -> CST +- A
- CST +- (A +- CST) -> CST +- A
- A + ~A -> -1
- (T)(P + A) - (T)P -> (T)A
-
- via commutating the addition and contracting operations to zero
- by reassociation. */
-
- if (TREE_CODE (rhs1) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
- if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
- {
- enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
- if (def_code == PLUS_EXPR
- || def_code == MINUS_EXPR)
- {
- tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
- tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
- if (operand_equal_p (def_rhs1, rhs2, 0)
- && code == MINUS_EXPR)
- {
- /* (A +- B) - A -> +- B. */
- code = ((def_code == PLUS_EXPR)
- ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
- rhs1 = def_rhs2;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- else if (operand_equal_p (def_rhs2, rhs2, 0)
- && code != def_code)
- {
- /* (A +- B) -+ B -> A. */
- code = TREE_CODE (def_rhs1);
- rhs1 = def_rhs1;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- else if (CONSTANT_CLASS_P (rhs2)
- && CONSTANT_CLASS_P (def_rhs1))
- {
- /* (CST +- A) +- CST -> CST +- A. */
- tree cst = fold_binary (code, TREE_TYPE (rhs1),
- def_rhs1, rhs2);
- if (cst && !TREE_OVERFLOW (cst))
- {
- code = def_code;
- gimple_assign_set_rhs_code (stmt, code);
- rhs1 = cst;
- gimple_assign_set_rhs1 (stmt, rhs1);
- rhs2 = def_rhs2;
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- }
- }
- else if (CONSTANT_CLASS_P (rhs2)
- && CONSTANT_CLASS_P (def_rhs2))
- {
- /* (A +- CST) +- CST -> A +- CST. */
- enum tree_code mix = (code == def_code)
- ? PLUS_EXPR : MINUS_EXPR;
- tree cst = fold_binary (mix, TREE_TYPE (rhs1),
- def_rhs2, rhs2);
- if (cst && !TREE_OVERFLOW (cst))
- {
- code = def_code;
- gimple_assign_set_rhs_code (stmt, code);
- rhs1 = def_rhs1;
- gimple_assign_set_rhs1 (stmt, rhs1);
- rhs2 = cst;
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- }
- }
- }
- else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
- {
- tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
- if (operand_equal_p (def_rhs1, rhs2, 0))
- {
- /* ~A + A -> -1. */
- rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
- rhs2 = NULL_TREE;
- code = TREE_CODE (rhs1);
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
- && integer_onep (rhs2))
- || (TREE_CODE (rhs2) == COMPLEX_CST
- && integer_onep (TREE_REALPART (rhs2))
- && integer_onep (TREE_IMAGPART (rhs2))))
- {
- /* ~A + 1 -> -A. */
- code = NEGATE_EXPR;
- rhs1 = def_rhs1;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- }
- else if (code == MINUS_EXPR
- && CONVERT_EXPR_CODE_P (def_code)
- && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
- && TREE_CODE (rhs2) == SSA_NAME)
- {
- /* (T)(P + A) - (T)P -> (T)A. */
- gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs2);
- if (is_gimple_assign (def_stmt2)
- && can_propagate_from (def_stmt2)
- && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2))
- && TREE_CODE (gimple_assign_rhs1 (def_stmt2)) == SSA_NAME)
- {
- /* Now we have (T)X - (T)P. */
- tree p = gimple_assign_rhs1 (def_stmt2);
- def_stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
- if (is_gimple_assign (def_stmt2)
- && can_propagate_from (def_stmt2)
- && (gimple_assign_rhs_code (def_stmt2) == POINTER_PLUS_EXPR
- || gimple_assign_rhs_code (def_stmt2) == PLUS_EXPR)
- && gimple_assign_rhs1 (def_stmt2) == p)
- {
- /* And finally (T)(P + A) - (T)P. */
- tree a = gimple_assign_rhs2 (def_stmt2);
- if (TYPE_PRECISION (TREE_TYPE (rhs1))
- <= TYPE_PRECISION (TREE_TYPE (a))
- /* For integer types, if A has a smaller type
- than T the result depends on the possible
- overflow in P + A.
- E.g. T=size_t, A=(unsigned)429497295, P>0.
- However, if an overflow in P + A would cause
- undefined behavior, we can assume that there
- is no overflow. */
- || (INTEGRAL_TYPE_P (TREE_TYPE (p))
- && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (p)))
- /* For pointer types, if the conversion of A to the
- final type requires a sign- or zero-extension,
- then we have to punt - it is not defined which
- one is correct. */
- || (POINTER_TYPE_P (TREE_TYPE (p))
- && TREE_CODE (a) == INTEGER_CST
- && tree_int_cst_sign_bit (a) == 0))
- {
- if (issue_strict_overflow_warning
- (WARN_STRICT_OVERFLOW_MISC)
- && TYPE_PRECISION (TREE_TYPE (rhs1))
- > TYPE_PRECISION (TREE_TYPE (a))
- && INTEGRAL_TYPE_P (TREE_TYPE (p)))
- warning_at (gimple_location (stmt),
- OPT_Wstrict_overflow,
- "assuming signed overflow does not "
- "occur when assuming that "
- "(T)(P + A) - (T)P is always (T)A");
- if (useless_type_conversion_p (TREE_TYPE (rhs1),
- TREE_TYPE (a)))
- code = TREE_CODE (a);
- else
- code = NOP_EXPR;
- rhs1 = a;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1,
- rhs2);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- }
- }
- }
- }
- }
-
- if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
- if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
- {
- enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
- if (def_code == PLUS_EXPR
- || def_code == MINUS_EXPR)
- {
- tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
- tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
- if (operand_equal_p (def_rhs1, rhs1, 0)
- && code == MINUS_EXPR)
- {
- /* A - (A +- B) -> -+ B. */
- code = ((def_code == PLUS_EXPR)
- ? NEGATE_EXPR : TREE_CODE (def_rhs2));
- rhs1 = def_rhs2;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- else if (operand_equal_p (def_rhs2, rhs1, 0)
- && code != def_code)
- {
- /* A +- (B +- A) -> +- B. */
- code = ((code == PLUS_EXPR)
- ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
- rhs1 = def_rhs1;
- rhs2 = NULL_TREE;
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- else if (CONSTANT_CLASS_P (rhs1)
- && CONSTANT_CLASS_P (def_rhs1))
- {
- /* CST +- (CST +- A) -> CST +- A. */
- tree cst = fold_binary (code, TREE_TYPE (rhs2),
- rhs1, def_rhs1);
- if (cst && !TREE_OVERFLOW (cst))
- {
- code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
- gimple_assign_set_rhs_code (stmt, code);
- rhs1 = cst;
- gimple_assign_set_rhs1 (stmt, rhs1);
- rhs2 = def_rhs2;
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- }
- }
- else if (CONSTANT_CLASS_P (rhs1)
- && CONSTANT_CLASS_P (def_rhs2))
- {
- /* CST +- (A +- CST) -> CST +- A. */
- tree cst = fold_binary (def_code == code
- ? PLUS_EXPR : MINUS_EXPR,
- TREE_TYPE (rhs2),
- rhs1, def_rhs2);
- if (cst && !TREE_OVERFLOW (cst))
- {
- rhs1 = cst;
- gimple_assign_set_rhs1 (stmt, rhs1);
- rhs2 = def_rhs1;
- gimple_assign_set_rhs2 (stmt, rhs2);
- gimple_set_modified (stmt, true);
- }
- }
- }
- else if (def_code == BIT_NOT_EXPR)
- {
- tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
- if (code == PLUS_EXPR
- && operand_equal_p (def_rhs1, rhs1, 0))
- {
- /* A + ~A -> -1. */
- rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
- rhs2 = NULL_TREE;
- code = TREE_CODE (rhs1);
- gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- gimple_set_modified (stmt, true);
- }
- }
- }
- }
-
-out:
- if (gimple_modified_p (stmt))
- {
- fold_stmt_inplace (gsi);
- update_stmt (stmt);
- return true;
- }
-
- return false;
-}
-
-/* Combine two conversions in a row for the second conversion at *GSI.
- Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
- run. Else it returns 0. */
-
-static int
-combine_conversions (gimple_stmt_iterator *gsi)
-{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
- tree op0, lhs;
- enum tree_code code = gimple_assign_rhs_code (stmt);
- enum tree_code code2;
-
- gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
- || code == FLOAT_EXPR
- || code == FIX_TRUNC_EXPR);
-
- lhs = gimple_assign_lhs (stmt);
- op0 = gimple_assign_rhs1 (stmt);
- if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
- {
- gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
- return 1;
- }
-
- if (TREE_CODE (op0) != SSA_NAME)
- return 0;
-
- def_stmt = SSA_NAME_DEF_STMT (op0);
- if (!is_gimple_assign (def_stmt))
- return 0;
-
- code2 = gimple_assign_rhs_code (def_stmt);
-
- if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
- {
- tree defop0 = gimple_assign_rhs1 (def_stmt);
- tree type = TREE_TYPE (lhs);
- tree inside_type = TREE_TYPE (defop0);
- tree inter_type = TREE_TYPE (op0);
- int inside_int = INTEGRAL_TYPE_P (inside_type);
- unsigned int inside_prec = TYPE_PRECISION (inside_type);
- int inside_unsignedp = TYPE_UNSIGNED (inside_type);
- int inter_int = INTEGRAL_TYPE_P (inter_type);
- int inter_float = FLOAT_TYPE_P (inter_type);
- unsigned int inter_prec = TYPE_PRECISION (inter_type);
- int inter_unsignedp = TYPE_UNSIGNED (inter_type);
- int final_int = INTEGRAL_TYPE_P (type);
- unsigned int final_prec = TYPE_PRECISION (type);
-
- /* Don't propagate ssa names that occur in abnormal phis. */
- if (TREE_CODE (defop0) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
- return 0;
-
- /* A truncation to an unsigned type should be canonicalized as
- bitwise and of a mask. */
- if (final_int && inter_int && inside_int
- && final_prec == inside_prec
- && final_prec > inter_prec
- && inter_unsignedp)
- {
- tree tem;
- tem = fold_build2 (BIT_AND_EXPR, inside_type,
- defop0,
- wide_int_to_tree
- (inside_type,
- wi::mask (inter_prec, false,
- TYPE_PRECISION (inside_type))));
- if (!useless_type_conversion_p (type, inside_type))
- {
- tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
- GSI_SAME_STMT);
- gimple_assign_set_rhs1 (stmt, tem);
- }
- else
- gimple_assign_set_rhs_from_tree (gsi, tem);
- update_stmt (gsi_stmt (*gsi));
- return 1;
- }
-
- /* If we are converting an integer to a floating-point that can
- represent it exactly and back to an integer, we can skip the
- floating-point conversion. */
- if (inside_int && inter_float && final_int &&
- (unsigned) significand_size (TYPE_MODE (inter_type))
- >= inside_prec - !inside_unsignedp)
- {
- if (useless_type_conversion_p (type, inside_type))
- {
- gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
- gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
- update_stmt (stmt);
- return remove_prop_source_from_use (op0) ? 2 : 1;
- }
- else
- {
- gimple_assign_set_rhs1 (stmt, defop0);
- gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
- update_stmt (stmt);
- return remove_prop_source_from_use (op0) ? 2 : 1;
- }
- }
- }
-
- return 0;
-}
-
/* Combine an element access with a shuffle. Returns true if there were
any changes made, else it returns false. */
static bool
simplify_bitfield_ref (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op0, op1, op2;
tree elem_type;
unsigned idx, n, size;
static int
simplify_permutation (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op0, op1, op2, op3, arg0, arg1;
enum tree_code code;
bool single_use_op0 = false;
{
enum tree_code code2;
- gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
+ gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
if (!def_stmt2 || !can_propagate_from (def_stmt2))
return 0;
static bool
simplify_vector_constructor (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op2, orig, type, elem_type;
unsigned elem_size, nelts, i;
enum tree_code code;
for (i = 0; i < nelts; i++)
mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
op2 = build_vector (mask_type, mask_elts);
- gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
+ gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
}
update_stmt (gsi_stmt (*gsi));
return true;
}
-/* Simplify multiplications.
- Return true if a transformation applied, otherwise return false. */
-
-static bool
-simplify_mult (gimple_stmt_iterator *gsi)
-{
- gimple stmt = gsi_stmt (*gsi);
- tree arg1 = gimple_assign_rhs1 (stmt);
- tree arg2 = gimple_assign_rhs2 (stmt);
-
- if (TREE_CODE (arg1) != SSA_NAME)
- return false;
-
- gimple def_stmt = SSA_NAME_DEF_STMT (arg1);
- if (!is_gimple_assign (def_stmt))
- return false;
-
- /* Look through a sign-changing conversion. */
- if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
- {
- if (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (def_stmt)))
- != TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
- || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
- return false;
- def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
- if (!is_gimple_assign (def_stmt))
- return false;
- }
-
- if (gimple_assign_rhs_code (def_stmt) == EXACT_DIV_EXPR)
- {
- if (operand_equal_p (gimple_assign_rhs2 (def_stmt), arg2, 0))
- {
- tree res = gimple_assign_rhs1 (def_stmt);
- if (useless_type_conversion_p (TREE_TYPE (arg1), TREE_TYPE (res)))
- gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (res), res,
- NULL_TREE);
- else
- gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, res, NULL_TREE);
- gcc_assert (gsi_stmt (*gsi) == stmt);
- update_stmt (stmt);
- return true;
- }
- }
-
- return false;
-}
-
/* Primitive "lattice" function for gimple_simplify. */
lattice.quick_grow_cleared (num_ssa_names);
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int postorder_num = inverted_post_order_compute (postorder);
+ auto_vec<gimple *, 4> to_fixup;
to_purge = BITMAP_ALLOC (NULL);
for (int i = 0; i < postorder_num; ++i)
{
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs, rhs;
enum tree_code code;
else
gsi_next (&gsi);
}
- else if (TREE_CODE_CLASS (code) == tcc_comparison)
+ else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
+ && gimple_assign_load_p (stmt)
+ && !gimple_has_volatile_ops (stmt)
+ && (TREE_CODE (gimple_assign_rhs1 (stmt))
+ != TARGET_MEM_REF)
+ && !stmt_can_throw_internal (stmt))
+ {
+ /* Rewrite loads used only in real/imagpart extractions to
+ component-wise loads. */
+ use_operand_p use_p;
+ imm_use_iterator iter;
+ bool rewrite = true;
+ FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
+ {
+ gimple *use_stmt = USE_STMT (use_p);
+ if (is_gimple_debug (use_stmt))
+ continue;
+ if (!is_gimple_assign (use_stmt)
+ || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
+ && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
+ {
+ rewrite = false;
+ break;
+ }
+ }
+ if (rewrite)
+ {
+ gimple *use_stmt;
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
+ {
+ if (is_gimple_debug (use_stmt))
+ {
+ if (gimple_debug_bind_p (use_stmt))
+ {
+ gimple_debug_bind_reset_value (use_stmt);
+ update_stmt (use_stmt);
+ }
+ continue;
+ }
+
+ tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
+ TREE_TYPE (TREE_TYPE (rhs)),
+ unshare_expr (rhs));
+ gimple *new_stmt
+ = gimple_build_assign (gimple_assign_lhs (use_stmt),
+ new_rhs);
+
+ location_t loc = gimple_location (use_stmt);
+ gimple_set_location (new_stmt, loc);
+ gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
+ unlink_stmt_vdef (use_stmt);
+ gsi_remove (&gsi2, true);
+
+ gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
+ }
+
+ release_defs (stmt);
+ gsi_remove (&gsi, true);
+ }
+ else
+ gsi_next (&gsi);
+ }
+ else if (code == COMPLEX_EXPR)
{
- if (forward_propagate_comparison (&gsi))
- cfg_changed = true;
+ /* Rewrite stores of a single-use complex build expression
+ to component-wise stores. */
+ use_operand_p use_p;
+ gimple *use_stmt;
+ if (single_imm_use (lhs, &use_p, &use_stmt)
+ && gimple_store_p (use_stmt)
+ && !gimple_has_volatile_ops (use_stmt)
+ && is_gimple_assign (use_stmt)
+ && (TREE_CODE (gimple_assign_lhs (use_stmt))
+ != TARGET_MEM_REF))
+ {
+ tree use_lhs = gimple_assign_lhs (use_stmt);
+ tree new_lhs = build1 (REALPART_EXPR,
+ TREE_TYPE (TREE_TYPE (use_lhs)),
+ unshare_expr (use_lhs));
+ gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
+ location_t loc = gimple_location (use_stmt);
+ gimple_set_location (new_stmt, loc);
+ gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
+ gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
+ SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
+ gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
+ gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
+ gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
+
+ new_lhs = build1 (IMAGPART_EXPR,
+ TREE_TYPE (TREE_TYPE (use_lhs)),
+ unshare_expr (use_lhs));
+ gimple_assign_set_lhs (use_stmt, new_lhs);
+ gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
+ update_stmt (use_stmt);
+
+ release_defs (stmt);
+ gsi_remove (&gsi, true);
+ }
+ else
+ gsi_next (&gsi);
}
else
gsi_next (&gsi);
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *orig_stmt = stmt;
bool changed = false;
+ bool was_noreturn = (is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt));
/* Mark stmt as potentially needing revisiting. */
gimple_set_plf (stmt, GF_PLF_1, false);
stmt = gsi_stmt (gsi);
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
bitmap_set_bit (to_purge, bb->index);
+ if (!was_noreturn
+ && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
+ to_fixup.safe_push (stmt);
/* Cleanup the CFG if we simplified a condition to
true or false. */
- if (gimple_code (stmt) == GIMPLE_COND
- && (gimple_cond_true_p (stmt)
- || gimple_cond_false_p (stmt)))
- cfg_changed = true;
+ if (gcond *cond = dyn_cast <gcond *> (stmt))
+ if (gimple_cond_true_p (cond)
+ || gimple_cond_false_p (cond))
+ cfg_changed = true;
update_stmt (stmt);
}
|| code == VEC_COND_EXPR)
{
/* In this case the entire COND_EXPR is in rhs1. */
- if (forward_propagate_into_cond (&gsi)
- || combine_cond_exprs (&gsi))
+ if (forward_propagate_into_cond (&gsi))
{
changed = true;
stmt = gsi_stmt (gsi);
|| code == BIT_XOR_EXPR)
&& simplify_rotate (&gsi))
changed = true;
- else if (code == MULT_EXPR)
- {
- changed = simplify_mult (&gsi);
- if (changed
- && maybe_clean_or_replace_eh_stmt (stmt, stmt))
- bitmap_set_bit (to_purge, bb->index);
- }
- else if (code == PLUS_EXPR
- || code == MINUS_EXPR)
- {
- changed = associate_plusminus (&gsi);
- if (changed
- && maybe_clean_or_replace_eh_stmt (stmt, stmt))
- bitmap_set_bit (to_purge, bb->index);
- }
- else if (CONVERT_EXPR_CODE_P (code)
- || code == FLOAT_EXPR
- || code == FIX_TRUNC_EXPR)
- {
- int did_something = combine_conversions (&gsi);
- if (did_something == 2)
- cfg_changed = true;
-
- /* If we have a narrowing conversion to an integral
- type that is fed by a BIT_AND_EXPR, we might be
- able to remove the BIT_AND_EXPR if it merely
- masks off bits outside the final type (and nothing
- else. */
- if (! did_something)
- {
- tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
- tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
- if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
- && INTEGRAL_TYPE_P (outer_type)
- && INTEGRAL_TYPE_P (inner_type)
- && (TYPE_PRECISION (outer_type)
- <= TYPE_PRECISION (inner_type)))
- did_something = simplify_conversion_from_bitmask (&gsi);
- }
-
- changed = did_something != 0;
- }
else if (code == VEC_PERM_EXPR)
{
int did_something = simplify_permutation (&gsi);
}
case GIMPLE_SWITCH:
- changed = simplify_gimple_switch (stmt);
+ changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
break;
case GIMPLE_COND:
{
- int did_something;
- did_something = forward_propagate_into_gimple_cond (stmt);
+ int did_something
+ = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
if (did_something == 2)
cfg_changed = true;
changed = did_something != 0;
free (postorder);
lattice.release ();
+ /* Fixup stmts that became noreturn calls. This may require splitting
+ blocks and thus isn't possible during the walk. Do this
+ in reverse order so we don't inadvertedly remove a stmt we want to
+ fixup by visiting a dominating now noreturn call first. */
+ while (!to_fixup.is_empty ())
+ {
+ gimple *stmt = to_fixup.pop ();
+ if (dump_file && dump_flags & TDF_DETAILS)
+ {
+ fprintf (dump_file, "Fixing up noreturn call ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ fprintf (dump_file, "\n");
+ }
+ cfg_changed |= fixup_noreturn_call (stmt);
+ }
+
cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
BITMAP_FREE (to_purge);