--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O1 -fdump-tree-sccp-details" } */
+/* { dg-final { scan-tree-dump-times {final value replacement} 8 "sccp" } } */
+
+unsigned int
+__attribute__((noipa))
+foo (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 0; bit < 64; bit++)
+ tmp &= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+foo1 (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 63; bit >= 0; bit -=3)
+ tmp &= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+foo2 (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 0; bit < 64; bit++)
+ tmp |= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+foo3 (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 63; bit >= 0; bit -=3)
+ tmp |= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+foo4 (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 0; bit < 64; bit++)
+ tmp ^= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+foo5 (unsigned int tmp, unsigned int bit2)
+{
+ for (int bit = 0; bit < 63; bit++)
+ tmp ^= bit2;
+ return tmp;
+}
+
+unsigned int
+__attribute__((noipa))
+f (unsigned int tmp, int bit, unsigned int bit2)
+{
+ unsigned int res = tmp;
+ for (int i = 0; i < bit; i++)
+ res &= bit2;
+ return res;
+}
+
+unsigned int
+__attribute__((noipa))
+f1 (unsigned int tmp, int bit, unsigned int bit2)
+{
+ unsigned int res = tmp;
+ for (int i = 0; i < bit; i++)
+ res |= bit2;
+ return res;
+}
+
+unsigned int
+__attribute__((noipa))
+f2 (unsigned int tmp, int bit, unsigned int bit2)
+{
+ unsigned int res = tmp;
+ for (int i = 0; i < bit; i++)
+ res ^= bit2;
+ return res;
+}
+
--- /dev/null
+/* { dg-do run } */
+/* { dg-options "-O1" } */
+
+#include "pr105735-1.c"
+
+int main()
+{
+ unsigned int tmp = 0x1101;
+ unsigned int bit2 = 0x111101;
+ if (foo (tmp, bit2) != 0x1101)
+ __builtin_abort ();
+ if (foo1 (tmp, bit2) != 0x1101)
+ __builtin_abort ();
+ if (foo2 (tmp, bit2) != 0x111101)
+ __builtin_abort ();
+ if (foo3 (tmp, bit2) != 0x111101)
+ __builtin_abort ();
+ if (foo4 (tmp, bit2) != 0x1101)
+ __builtin_abort ();
+ if (foo5 (tmp, bit2) != 0x110000)
+ __builtin_abort ();
+ if (f (tmp, 64, bit2) != 0x1101)
+ __builtin_abort ();
+ if (f1 (tmp, 64, bit2) != 0x111101)
+ __builtin_abort ();
+ if (f2 (tmp, 64, bit2) != 0x1101)
+ __builtin_abort ();
+}
return fold_build2 (code1, type, inv, wide_int_to_tree (type, bits));
}
+/* Match.pd function to match bitop with invariant expression
+ .i.e.
+ tmp_7 = _0 & _1; */
+extern bool gimple_bitop_with_inv_p (tree, tree *, tree (*)(tree));
+
+/* Return the inductive expression of bitop with invariant if possible,
+ otherwise returns DEF. */
+static tree
+analyze_and_compute_bitop_with_inv_effect (class loop* loop, tree phidef,
+ tree niter)
+{
+ tree match_op[2],inv;
+ tree type = TREE_TYPE (phidef);
+ gphi* header_phi = NULL;
+ enum tree_code code;
+ /* match thing like op0 (match[0]), op1 (match[1]), phidef (PHIDEF)
+
+ op1 = PHI <phidef, inv>
+ phidef = op0 & op1
+ if op0 is an invariant, it could change to
+ phidef = op0 & inv. */
+ gimple *def;
+ def = SSA_NAME_DEF_STMT (phidef);
+ if (!(is_gimple_assign (def)
+ && ((code = gimple_assign_rhs_code (def)), true)
+ && (code == BIT_AND_EXPR || code == BIT_IOR_EXPR
+ || code == BIT_XOR_EXPR)))
+ return NULL_TREE;
+
+ match_op[0] = gimple_assign_rhs1 (def);
+ match_op[1] = gimple_assign_rhs2 (def);
+
+ if (TREE_CODE (match_op[1]) != SSA_NAME
+ || !expr_invariant_in_loop_p (loop, match_op[0])
+ || !(header_phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (match_op[1])))
+ || gimple_phi_num_args (header_phi) != 2)
+ return NULL_TREE;
+
+ if (PHI_ARG_DEF_FROM_EDGE (header_phi, loop_latch_edge (loop)) != phidef)
+ return NULL_TREE;
+
+ enum tree_code code1
+ = gimple_assign_rhs_code (def);
+
+ if (code1 == BIT_XOR_EXPR)
+ {
+ if (!tree_fits_uhwi_p (niter))
+ return NULL_TREE;
+ unsigned HOST_WIDE_INT niter_num;
+ niter_num = tree_to_uhwi (niter);
+ if (niter_num % 2 != 0)
+ match_op[0] = build_zero_cst (type);
+ }
+
+ inv = PHI_ARG_DEF_FROM_EDGE (header_phi, loop_preheader_edge (loop));
+ return fold_build2 (code1, type, inv, match_op[0]);
+}
+
/* Do final value replacement for LOOP, return true if we did anything. */
bool
bool folded_casts;
def = analyze_scalar_evolution_in_loop (ex_loop, loop, def,
&folded_casts);
- def = compute_overall_effect_of_inner_loop (ex_loop, def);
+
+ tree bitinv_def, bit_def;
+ unsigned HOST_WIDE_INT niter_num;
+
+ if (def != chrec_dont_know)
+ def = compute_overall_effect_of_inner_loop (ex_loop, def);
+
+ /* Handle bitop with invariant induction expression.
+
+ .i.e
+ for (int i =0 ;i < 32; i++)
+ tmp &= bit2;
+ if bit2 is an invariant in loop which could simple to
+ tmp &= bit2. */
+ else if ((bitinv_def
+ = analyze_and_compute_bitop_with_inv_effect (loop,
+ phidef, niter)))
+ def = bitinv_def;
/* Handle bitwise induction expression.
expressible, but in fact final value of RES can be replaced by
RES & CONSTANT where CONSTANT all ones with bit {0,3,6,9,... ,63}
being cleared, similar for BIT_IOR_EXPR/BIT_XOR_EXPR. */
- unsigned HOST_WIDE_INT niter_num;
- tree bit_def;
- if (tree_fits_uhwi_p (niter)
- && (niter_num = tree_to_uhwi (niter)) != 0
- && niter_num < TYPE_PRECISION (TREE_TYPE (phidef))
- && (bit_def
- = analyze_and_compute_bitwise_induction_effect (loop,
- phidef,
- niter_num)))
+ else if (tree_fits_uhwi_p (niter)
+ && (niter_num = tree_to_uhwi (niter)) != 0
+ && niter_num < TYPE_PRECISION (TREE_TYPE (phidef))
+ && (bit_def
+ = analyze_and_compute_bitwise_induction_effect (loop,
+ phidef,
+ niter_num)))
def = bit_def;
if (!tree_does_not_contain_chrecs (def)