1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "stor-layout.h"
32 #include "hard-reg-set.h"
35 #include "dominance.h"
37 #include "basic-block.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
43 #include "gimple-expr.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-ssa.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
57 #include "tree-pass.h"
58 #include "langhooks.h"
60 #include "diagnostic.h"
63 #include "insn-codes.h"
65 #include "tree-ssa-propagate.h"
66 #include "tree-ssa-dom.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-into-ssa.h"
72 /* This pass propagates the RHS of assignment statements into use
73 sites of the LHS of the assignment. It's basically a specialized
74 form of tree combination. It is hoped all of this can disappear
75 when we have a generalized tree combiner.
77 One class of common cases we handle is forward propagating a single use
78 variable into a COND_EXPR.
82 if (x) goto ... else goto ...
84 Will be transformed into:
87 if (a COND b) goto ... else goto ...
89 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
91 Or (assuming c1 and c2 are constants):
95 if (x EQ/NEQ c2) goto ... else goto ...
97 Will be transformed into:
100 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
102 Similarly for x = a - c1.
108 if (x) goto ... else goto ...
110 Will be transformed into:
113 if (a == 0) goto ... else goto ...
115 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
116 For these cases, we propagate A into all, possibly more than one,
117 COND_EXPRs that use X.
123 if (x) goto ... else goto ...
125 Will be transformed into:
128 if (a != 0) goto ... else goto ...
130 (Assuming a is an integral type and x is a boolean or x is an
131 integral and a is a boolean.)
133 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
134 For these cases, we propagate A into all, possibly more than one,
135 COND_EXPRs that use X.
137 In addition to eliminating the variable and the statement which assigns
138 a value to the variable, we may be able to later thread the jump without
139 adding insane complexity in the dominator optimizer.
141 Also note these transformations can cascade. We handle this by having
142 a worklist of COND_EXPR statements to examine. As we make a change to
143 a statement, we put it back on the worklist to examine on the next
144 iteration of the main loop.
146 A second class of propagation opportunities arises for ADDR_EXPR
157 ptr = (type1*)&type2var;
160 Will get turned into (if type1 and type2 are the same size
161 and neither have volatile on them):
162 res = VIEW_CONVERT_EXPR<type1>(type2var)
167 ptr2 = ptr + <constant>;
171 ptr2 = &x[constant/elementsize];
176 offset = index * element_size;
177 offset_p = (pointer) offset;
178 ptr2 = ptr + offset_p
180 Will get turned into:
188 Provided that decl has known alignment >= 2, will get turned into
192 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
193 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
196 This will (of course) be extended as other needs arise. */
198 static bool forward_propagate_addr_expr (tree, tree, bool);
200 /* Set to true if we delete dead edges during the optimization. */
201 static bool cfg_changed;
203 static tree rhs_to_tree (tree type, gimple stmt);
205 /* Get the next statement we can propagate NAME's value into skipping
206 trivial copies. Returns the statement that is suitable as a
207 propagation destination or NULL_TREE if there is no such one.
208 This only returns destinations in a single-use chain. FINAL_NAME_P
209 if non-NULL is written to the ssa name that represents the use. */
212 get_prop_dest_stmt (tree name, tree *final_name_p)
218 /* If name has multiple uses, bail out. */
219 if (!single_imm_use (name, &use, &use_stmt))
222 /* If this is not a trivial copy, we found it. */
223 if (!gimple_assign_ssa_name_copy_p (use_stmt)
224 || gimple_assign_rhs1 (use_stmt) != name)
227 /* Continue searching uses of the copy destination. */
228 name = gimple_assign_lhs (use_stmt);
232 *final_name_p = name;
237 /* Get the statement we can propagate from into NAME skipping
238 trivial copies. Returns the statement which defines the
239 propagation source or NULL_TREE if there is no such one.
240 If SINGLE_USE_ONLY is set considers only sources which have
241 a single use chain up to NAME. If SINGLE_USE_P is non-null,
242 it is set to whether the chain to NAME is a single use chain
243 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
246 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
248 bool single_use = true;
251 gimple def_stmt = SSA_NAME_DEF_STMT (name);
253 if (!has_single_use (name))
260 /* If name is defined by a PHI node or is the default def, bail out. */
261 if (!is_gimple_assign (def_stmt))
264 /* If def_stmt is a simple copy, continue looking. */
265 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
266 name = gimple_assign_rhs1 (def_stmt);
269 if (!single_use_only && single_use_p)
270 *single_use_p = single_use;
277 /* Checks if the destination ssa name in DEF_STMT can be used as
278 propagation source. Returns true if so, otherwise false. */
281 can_propagate_from (gimple def_stmt)
283 gcc_assert (is_gimple_assign (def_stmt));
285 /* If the rhs has side-effects we cannot propagate from it. */
286 if (gimple_has_volatile_ops (def_stmt))
289 /* If the rhs is a load we cannot propagate from it. */
290 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
291 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
294 /* Constants can be always propagated. */
295 if (gimple_assign_single_p (def_stmt)
296 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
299 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
300 if (stmt_references_abnormal_ssa_name (def_stmt))
303 /* If the definition is a conversion of a pointer to a function type,
304 then we can not apply optimizations as some targets require
305 function pointers to be canonicalized and in this case this
306 optimization could eliminate a necessary canonicalization. */
307 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
309 tree rhs = gimple_assign_rhs1 (def_stmt);
310 if (POINTER_TYPE_P (TREE_TYPE (rhs))
311 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
318 /* Remove a chain of dead statements starting at the definition of
319 NAME. The chain is linked via the first operand of the defining statements.
320 If NAME was replaced in its only use then this function can be used
321 to clean up dead stmts. The function handles already released SSA
323 Returns true if cleanup-cfg has to run. */
326 remove_prop_source_from_use (tree name)
328 gimple_stmt_iterator gsi;
330 bool cfg_changed = false;
335 if (SSA_NAME_IN_FREE_LIST (name)
336 || SSA_NAME_IS_DEFAULT_DEF (name)
337 || !has_zero_uses (name))
340 stmt = SSA_NAME_DEF_STMT (name);
341 if (gimple_code (stmt) == GIMPLE_PHI
342 || gimple_has_side_effects (stmt))
345 bb = gimple_bb (stmt);
346 gsi = gsi_for_stmt (stmt);
347 unlink_stmt_vdef (stmt);
348 if (gsi_remove (&gsi, true))
349 cfg_changed |= gimple_purge_dead_eh_edges (bb);
352 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
353 } while (name && TREE_CODE (name) == SSA_NAME);
358 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
359 converted to type TYPE.
361 This should disappear, but is needed so we can combine expressions and use
362 the fold() interfaces. Long term, we need to develop folding and combine
363 routines that deal with gimple exclusively . */
366 rhs_to_tree (tree type, gimple stmt)
368 location_t loc = gimple_location (stmt);
369 enum tree_code code = gimple_assign_rhs_code (stmt);
370 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
371 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
372 gimple_assign_rhs2 (stmt),
373 gimple_assign_rhs3 (stmt));
374 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
375 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
376 gimple_assign_rhs2 (stmt));
377 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
378 return build1 (code, type, gimple_assign_rhs1 (stmt));
379 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
380 return gimple_assign_rhs1 (stmt);
385 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
386 the folded result in a form suitable for COND_EXPR_COND or
387 NULL_TREE, if there is no suitable simplified form. If
388 INVARIANT_ONLY is true only gimple_min_invariant results are
389 considered simplified. */
392 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
393 tree op0, tree op1, bool invariant_only)
397 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
399 fold_defer_overflow_warnings ();
400 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
403 fold_undefer_overflow_warnings (false, NULL, 0);
407 /* Require that we got a boolean type out if we put one in. */
408 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
410 /* Canonicalize the combined condition for use in a COND_EXPR. */
411 t = canonicalize_cond_expr_cond (t);
413 /* Bail out if we required an invariant but didn't get one. */
414 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
416 fold_undefer_overflow_warnings (false, NULL, 0);
420 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
425 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
426 of its operand. Return a new comparison tree or NULL_TREE if there
427 were no simplifying combines. */
430 forward_propagate_into_comparison_1 (gimple stmt,
431 enum tree_code code, tree type,
434 tree tmp = NULL_TREE;
435 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
436 bool single_use0_p = false, single_use1_p = false;
438 /* For comparisons use the first operand, that is likely to
439 simplify comparisons against constants. */
440 if (TREE_CODE (op0) == SSA_NAME)
442 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
443 if (def_stmt && can_propagate_from (def_stmt))
445 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
446 tmp = combine_cond_expr_cond (stmt, code, type,
447 rhs0, op1, !single_use0_p);
453 /* If that wasn't successful, try the second operand. */
454 if (TREE_CODE (op1) == SSA_NAME)
456 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
457 if (def_stmt && can_propagate_from (def_stmt))
459 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
460 tmp = combine_cond_expr_cond (stmt, code, type,
461 op0, rhs1, !single_use1_p);
467 /* If that wasn't successful either, try both operands. */
468 if (rhs0 != NULL_TREE
469 && rhs1 != NULL_TREE)
470 tmp = combine_cond_expr_cond (stmt, code, type,
472 !(single_use0_p && single_use1_p));
477 /* Propagate from the ssa name definition statements of the assignment
478 from a comparison at *GSI into the conditional if that simplifies it.
479 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
480 otherwise returns 0. */
483 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
485 gimple stmt = gsi_stmt (*gsi);
487 bool cfg_changed = false;
488 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
489 tree rhs1 = gimple_assign_rhs1 (stmt);
490 tree rhs2 = gimple_assign_rhs2 (stmt);
492 /* Combine the comparison with defining statements. */
493 tmp = forward_propagate_into_comparison_1 (stmt,
494 gimple_assign_rhs_code (stmt),
496 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
498 gimple_assign_set_rhs_from_tree (gsi, tmp);
500 update_stmt (gsi_stmt (*gsi));
502 if (TREE_CODE (rhs1) == SSA_NAME)
503 cfg_changed |= remove_prop_source_from_use (rhs1);
504 if (TREE_CODE (rhs2) == SSA_NAME)
505 cfg_changed |= remove_prop_source_from_use (rhs2);
506 return cfg_changed ? 2 : 1;
512 /* Propagate from the ssa name definition statements of COND_EXPR
513 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
514 Returns zero if no statement was changed, one if there were
515 changes and two if cfg_cleanup needs to run.
517 This must be kept in sync with forward_propagate_into_cond. */
520 forward_propagate_into_gimple_cond (gimple stmt)
523 enum tree_code code = gimple_cond_code (stmt);
524 bool cfg_changed = false;
525 tree rhs1 = gimple_cond_lhs (stmt);
526 tree rhs2 = gimple_cond_rhs (stmt);
528 /* We can do tree combining on SSA_NAME and comparison expressions. */
529 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
532 tmp = forward_propagate_into_comparison_1 (stmt, code,
537 if (dump_file && tmp)
539 fprintf (dump_file, " Replaced '");
540 print_gimple_expr (dump_file, stmt, 0, 0);
541 fprintf (dump_file, "' with '");
542 print_generic_expr (dump_file, tmp, 0);
543 fprintf (dump_file, "'\n");
546 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
549 if (TREE_CODE (rhs1) == SSA_NAME)
550 cfg_changed |= remove_prop_source_from_use (rhs1);
551 if (TREE_CODE (rhs2) == SSA_NAME)
552 cfg_changed |= remove_prop_source_from_use (rhs2);
553 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
556 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
557 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
558 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
559 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
561 && integer_zerop (rhs2))
563 && integer_onep (rhs2))))
565 basic_block bb = gimple_bb (stmt);
566 gimple_cond_set_code (stmt, NE_EXPR);
567 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
568 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
569 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
577 /* Propagate from the ssa name definition statements of COND_EXPR
578 in the rhs of statement STMT into the conditional if that simplifies it.
579 Returns true zero if the stmt was changed. */
582 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
584 gimple stmt = gsi_stmt (*gsi_p);
585 tree tmp = NULL_TREE;
586 tree cond = gimple_assign_rhs1 (stmt);
587 enum tree_code code = gimple_assign_rhs_code (stmt);
590 /* We can do tree combining on SSA_NAME and comparison expressions. */
591 if (COMPARISON_CLASS_P (cond))
592 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
594 TREE_OPERAND (cond, 0),
595 TREE_OPERAND (cond, 1));
596 else if (TREE_CODE (cond) == SSA_NAME)
598 enum tree_code def_code;
600 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
601 if (!def_stmt || !can_propagate_from (def_stmt))
604 def_code = gimple_assign_rhs_code (def_stmt);
605 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
606 tmp = fold_build2_loc (gimple_location (def_stmt),
609 gimple_assign_rhs1 (def_stmt),
610 gimple_assign_rhs2 (def_stmt));
611 else if (code == COND_EXPR
612 && ((def_code == BIT_NOT_EXPR
613 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
614 || (def_code == BIT_XOR_EXPR
615 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
617 tmp = gimple_assign_rhs1 (def_stmt);
623 && is_gimple_condexpr (tmp))
625 if (dump_file && tmp)
627 fprintf (dump_file, " Replaced '");
628 print_generic_expr (dump_file, cond, 0);
629 fprintf (dump_file, "' with '");
630 print_generic_expr (dump_file, tmp, 0);
631 fprintf (dump_file, "'\n");
634 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
635 : integer_onep (tmp))
636 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
637 else if (integer_zerop (tmp))
638 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
641 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
644 tree t = gimple_assign_rhs2 (stmt);
645 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
646 gimple_assign_set_rhs3 (stmt, t);
649 stmt = gsi_stmt (*gsi_p);
658 /* Propagate from the ssa name definition statements of COND_EXPR
659 values in the rhs of statement STMT into the conditional arms
660 if that simplifies it.
661 Returns true if the stmt was changed. */
664 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
666 gimple stmt = gsi_stmt (*gsi_p);
667 tree cond, val1, val2;
668 bool changed = false;
670 cond = gimple_assign_rhs1 (stmt);
671 val1 = gimple_assign_rhs2 (stmt);
672 if (TREE_CODE (val1) == SSA_NAME)
674 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
675 if (is_gimple_assign (def_stmt)
676 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
677 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
679 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
680 gimple_assign_set_rhs2 (stmt, val1);
684 val2 = gimple_assign_rhs3 (stmt);
685 if (TREE_CODE (val2) == SSA_NAME)
687 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
688 if (is_gimple_assign (def_stmt)
689 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
690 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
692 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
693 gimple_assign_set_rhs3 (stmt, val2);
697 if (operand_equal_p (val1, val2, 0))
699 gimple_assign_set_rhs_from_tree (gsi_p, val1);
700 stmt = gsi_stmt (*gsi_p);
710 /* We've just substituted an ADDR_EXPR into stmt. Update all the
711 relevant data structures to match. */
714 tidy_after_forward_propagate_addr (gimple stmt)
716 /* We may have turned a trapping insn into a non-trapping insn. */
717 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
718 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
721 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
722 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
725 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
726 ADDR_EXPR <whatever>.
728 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
729 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
730 node or for recovery of array indexing from pointer arithmetic.
732 Return true if the propagation was successful (the propagation can
733 be not totally successful, yet things may have been changed). */
736 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
737 gimple_stmt_iterator *use_stmt_gsi,
740 tree lhs, rhs, rhs2, array_ref;
741 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
742 enum tree_code rhs_code;
745 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
747 lhs = gimple_assign_lhs (use_stmt);
748 rhs_code = gimple_assign_rhs_code (use_stmt);
749 rhs = gimple_assign_rhs1 (use_stmt);
751 /* Do not perform copy-propagation but recurse through copy chains. */
752 if (TREE_CODE (lhs) == SSA_NAME
753 && rhs_code == SSA_NAME)
754 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
756 /* The use statement could be a conversion. Recurse to the uses of the
757 lhs as copyprop does not copy through pointer to integer to pointer
758 conversions and FRE does not catch all cases either.
759 Treat the case of a single-use name and
760 a conversion to def_rhs type separate, though. */
761 if (TREE_CODE (lhs) == SSA_NAME
762 && CONVERT_EXPR_CODE_P (rhs_code))
764 /* If there is a point in a conversion chain where the types match
765 so we can remove a conversion re-materialize the address here
768 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
770 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
771 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
775 /* Else recurse if the conversion preserves the address value. */
776 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
777 || POINTER_TYPE_P (TREE_TYPE (lhs)))
778 && (TYPE_PRECISION (TREE_TYPE (lhs))
779 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
780 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
785 /* If this isn't a conversion chain from this on we only can propagate
786 into compatible pointer contexts. */
787 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
790 /* Propagate through constant pointer adjustments. */
791 if (TREE_CODE (lhs) == SSA_NAME
792 && rhs_code == POINTER_PLUS_EXPR
794 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
797 /* As we come here with non-invariant addresses in def_rhs we need
798 to make sure we can build a valid constant offsetted address
799 for further propagation. Simply rely on fold building that
800 and check after the fact. */
801 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
803 fold_convert (ptr_type_node,
804 gimple_assign_rhs2 (use_stmt)));
805 if (TREE_CODE (new_def_rhs) == MEM_REF
806 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
808 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
811 /* Recurse. If we could propagate into all uses of lhs do not
812 bother to replace into the current use but just pretend we did. */
813 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
814 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
817 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
818 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
819 new_def_rhs, NULL_TREE);
820 else if (is_gimple_min_invariant (new_def_rhs))
821 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
822 new_def_rhs, NULL_TREE);
825 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
826 update_stmt (use_stmt);
830 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
831 ADDR_EXPR will not appear on the LHS. */
832 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
833 while (handled_component_p (*lhsp))
834 lhsp = &TREE_OPERAND (*lhsp, 0);
837 /* Now see if the LHS node is a MEM_REF using NAME. If so,
838 propagate the ADDR_EXPR into the use of NAME and fold the result. */
839 if (TREE_CODE (lhs) == MEM_REF
840 && TREE_OPERAND (lhs, 0) == name)
843 HOST_WIDE_INT def_rhs_offset;
844 /* If the address is invariant we can always fold it. */
845 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
848 offset_int off = mem_ref_offset (lhs);
850 off += def_rhs_offset;
851 if (TREE_CODE (def_rhs_base) == MEM_REF)
853 off += mem_ref_offset (def_rhs_base);
854 new_ptr = TREE_OPERAND (def_rhs_base, 0);
857 new_ptr = build_fold_addr_expr (def_rhs_base);
858 TREE_OPERAND (lhs, 0) = new_ptr;
859 TREE_OPERAND (lhs, 1)
860 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
861 tidy_after_forward_propagate_addr (use_stmt);
862 /* Continue propagating into the RHS if this was not the only use. */
866 /* If the LHS is a plain dereference and the value type is the same as
867 that of the pointed-to type of the address we can put the
868 dereferenced address on the LHS preserving the original alias-type. */
869 else if (integer_zerop (TREE_OPERAND (lhs, 1))
870 && ((gimple_assign_lhs (use_stmt) == lhs
871 && useless_type_conversion_p
872 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
873 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
874 || types_compatible_p (TREE_TYPE (lhs),
875 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
876 /* Don't forward anything into clobber stmts if it would result
877 in the lhs no longer being a MEM_REF. */
878 && (!gimple_clobber_p (use_stmt)
879 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
881 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
882 tree new_offset, new_base, saved, new_lhs;
883 while (handled_component_p (*def_rhs_basep))
884 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
885 saved = *def_rhs_basep;
886 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
888 new_base = TREE_OPERAND (*def_rhs_basep, 0);
889 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
890 TREE_OPERAND (*def_rhs_basep, 1));
894 new_base = build_fold_addr_expr (*def_rhs_basep);
895 new_offset = TREE_OPERAND (lhs, 1);
897 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
898 new_base, new_offset);
899 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
900 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
901 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
902 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
904 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
905 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
906 *def_rhs_basep = saved;
907 tidy_after_forward_propagate_addr (use_stmt);
908 /* Continue propagating into the RHS if this was not the
914 /* We can have a struct assignment dereferencing our name twice.
915 Note that we didn't propagate into the lhs to not falsely
916 claim we did when propagating into the rhs. */
920 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
921 nodes from the RHS. */
922 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
923 if (TREE_CODE (*rhsp) == ADDR_EXPR)
924 rhsp = &TREE_OPERAND (*rhsp, 0);
925 while (handled_component_p (*rhsp))
926 rhsp = &TREE_OPERAND (*rhsp, 0);
929 /* Now see if the RHS node is a MEM_REF using NAME. If so,
930 propagate the ADDR_EXPR into the use of NAME and fold the result. */
931 if (TREE_CODE (rhs) == MEM_REF
932 && TREE_OPERAND (rhs, 0) == name)
935 HOST_WIDE_INT def_rhs_offset;
936 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
939 offset_int off = mem_ref_offset (rhs);
941 off += def_rhs_offset;
942 if (TREE_CODE (def_rhs_base) == MEM_REF)
944 off += mem_ref_offset (def_rhs_base);
945 new_ptr = TREE_OPERAND (def_rhs_base, 0);
948 new_ptr = build_fold_addr_expr (def_rhs_base);
949 TREE_OPERAND (rhs, 0) = new_ptr;
950 TREE_OPERAND (rhs, 1)
951 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
952 fold_stmt_inplace (use_stmt_gsi);
953 tidy_after_forward_propagate_addr (use_stmt);
956 /* If the RHS is a plain dereference and the value type is the same as
957 that of the pointed-to type of the address we can put the
958 dereferenced address on the RHS preserving the original alias-type. */
959 else if (integer_zerop (TREE_OPERAND (rhs, 1))
960 && ((gimple_assign_rhs1 (use_stmt) == rhs
961 && useless_type_conversion_p
962 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
963 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
964 || types_compatible_p (TREE_TYPE (rhs),
965 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
967 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
968 tree new_offset, new_base, saved, new_rhs;
969 while (handled_component_p (*def_rhs_basep))
970 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
971 saved = *def_rhs_basep;
972 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
974 new_base = TREE_OPERAND (*def_rhs_basep, 0);
975 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
976 TREE_OPERAND (*def_rhs_basep, 1));
980 new_base = build_fold_addr_expr (*def_rhs_basep);
981 new_offset = TREE_OPERAND (rhs, 1);
983 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
984 new_base, new_offset);
985 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
986 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
987 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
988 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
990 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
991 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
992 *def_rhs_basep = saved;
993 fold_stmt_inplace (use_stmt_gsi);
994 tidy_after_forward_propagate_addr (use_stmt);
999 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1000 is nothing to do. */
1001 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1002 || gimple_assign_rhs1 (use_stmt) != name)
1005 /* The remaining cases are all for turning pointer arithmetic into
1006 array indexing. They only apply when we have the address of
1007 element zero in an array. If that is not the case then there
1008 is nothing to do. */
1009 array_ref = TREE_OPERAND (def_rhs, 0);
1010 if ((TREE_CODE (array_ref) != ARRAY_REF
1011 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1012 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1013 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1016 rhs2 = gimple_assign_rhs2 (use_stmt);
1017 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1018 if (TREE_CODE (rhs2) == INTEGER_CST)
1020 tree new_rhs = build1_loc (gimple_location (use_stmt),
1021 ADDR_EXPR, TREE_TYPE (def_rhs),
1022 fold_build2 (MEM_REF,
1023 TREE_TYPE (TREE_TYPE (def_rhs)),
1024 unshare_expr (def_rhs),
1025 fold_convert (ptr_type_node,
1027 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1028 use_stmt = gsi_stmt (*use_stmt_gsi);
1029 update_stmt (use_stmt);
1030 tidy_after_forward_propagate_addr (use_stmt);
1037 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1039 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1040 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1041 node or for recovery of array indexing from pointer arithmetic.
1043 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1044 the single use in the previous invocation. Pass true when calling
1047 Returns true, if all uses have been propagated into. */
1050 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1052 imm_use_iterator iter;
1055 bool single_use_p = parent_single_use_p && has_single_use (name);
1057 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1062 /* If the use is not in a simple assignment statement, then
1063 there is nothing we can do. */
1064 if (!is_gimple_assign (use_stmt))
1066 if (!is_gimple_debug (use_stmt))
1071 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1072 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1074 /* If the use has moved to a different statement adjust
1075 the update machinery for the old statement too. */
1076 if (use_stmt != gsi_stmt (gsi))
1078 update_stmt (use_stmt);
1079 use_stmt = gsi_stmt (gsi);
1081 update_stmt (use_stmt);
1084 /* Remove intermediate now unused copy and conversion chains. */
1085 use_rhs = gimple_assign_rhs1 (use_stmt);
1087 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1088 && TREE_CODE (use_rhs) == SSA_NAME
1089 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1091 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1092 release_defs (use_stmt);
1093 gsi_remove (&gsi, true);
1097 return all && has_zero_uses (name);
1101 /* Forward propagate the comparison defined in *DEFGSI like
1102 cond_1 = x CMP y to uses of the form
1106 Returns true if stmt is now unused. Advance DEFGSI to the next
1110 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1112 gimple stmt = gsi_stmt (*defgsi);
1113 tree name = gimple_assign_lhs (stmt);
1115 tree tmp = NULL_TREE;
1116 gimple_stmt_iterator gsi;
1117 enum tree_code code;
1120 /* Don't propagate ssa names that occur in abnormal phis. */
1121 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1122 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1123 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1127 /* Do not un-cse comparisons. But propagate through copies. */
1128 use_stmt = get_prop_dest_stmt (name, &name);
1130 || !is_gimple_assign (use_stmt))
1133 code = gimple_assign_rhs_code (use_stmt);
1134 lhs = gimple_assign_lhs (use_stmt);
1135 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1138 /* We can propagate the condition into a statement that
1139 computes the logical negation of the comparison result. */
1140 if ((code == BIT_NOT_EXPR
1141 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1142 || (code == BIT_XOR_EXPR
1143 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1145 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1146 bool nans = HONOR_NANS (TYPE_MODE (type));
1147 enum tree_code inv_code;
1148 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1149 if (inv_code == ERROR_MARK)
1152 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1153 gimple_assign_rhs2 (stmt));
1158 gsi = gsi_for_stmt (use_stmt);
1159 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1160 use_stmt = gsi_stmt (gsi);
1161 update_stmt (use_stmt);
1163 if (dump_file && (dump_flags & TDF_DETAILS))
1165 fprintf (dump_file, " Replaced '");
1166 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1167 fprintf (dump_file, "' with '");
1168 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1169 fprintf (dump_file, "'\n");
1172 /* When we remove stmt now the iterator defgsi goes off it's current
1173 sequence, hence advance it now. */
1176 /* Remove defining statements. */
1177 return remove_prop_source_from_use (name);
1185 /* GSI_P points to a statement which performs a narrowing integral
1188 Look for cases like:
1198 If T is narrower than X's type and C merely masks off bits outside
1199 of (T) and nothing else.
1201 Normally we'd let DCE remove the dead statement. But no DCE runs
1202 after the last forwprop/combine pass, so we remove the obviously
1203 dead code ourselves.
1205 Return TRUE if a change was made, FALSE otherwise. */
1208 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1210 gimple stmt = gsi_stmt (*gsi_p);
1211 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1213 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1214 the only use of the BIT_AND_EXPR result is the conversion. */
1215 if (is_gimple_assign (rhs_def_stmt)
1216 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1217 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1219 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1220 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1221 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1223 /* Now verify suitability of the BIT_AND_EXPR's operands.
1224 The first must be an SSA_NAME that we can propagate and the
1225 second must be an integer constant that masks out all the
1226 bits outside the final result's type, but nothing else. */
1227 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1228 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1229 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1230 && operand_equal_p (rhs_def_operand2,
1231 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1232 TYPE_PRECISION (lhs_type)),
1235 /* This is an optimizable case. Replace the source operand
1236 in the conversion with the first source operand of the
1238 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1239 stmt = gsi_stmt (*gsi_p);
1242 /* There is no DCE after the last forwprop pass. It's
1243 easy to clean up the first order effects here. */
1244 gimple_stmt_iterator si;
1245 si = gsi_for_stmt (rhs_def_stmt);
1246 gsi_remove (&si, true);
1247 release_defs (rhs_def_stmt);
1256 /* Helper function for simplify_gimple_switch. Remove case labels that
1257 have values outside the range of the new type. */
1260 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1262 unsigned int branch_num = gimple_switch_num_labels (stmt);
1263 auto_vec<tree> labels (branch_num);
1264 unsigned int i, len;
1266 /* Collect the existing case labels in a VEC, and preprocess it as if
1267 we are gimplifying a GENERIC SWITCH_EXPR. */
1268 for (i = 1; i < branch_num; i++)
1269 labels.quick_push (gimple_switch_label (stmt, i));
1270 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1272 /* If any labels were removed, replace the existing case labels
1273 in the GIMPLE_SWITCH statement with the correct ones.
1274 Note that the type updates were done in-place on the case labels,
1275 so we only have to replace the case labels in the GIMPLE_SWITCH
1276 if the number of labels changed. */
1277 len = labels.length ();
1278 if (len < branch_num - 1)
1280 bitmap target_blocks;
1284 /* Corner case: *all* case labels have been removed as being
1285 out-of-range for INDEX_TYPE. Push one label and let the
1286 CFG cleanups deal with this further. */
1291 label = CASE_LABEL (gimple_switch_default_label (stmt));
1292 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1293 labels.quick_push (elt);
1297 for (i = 0; i < labels.length (); i++)
1298 gimple_switch_set_label (stmt, i + 1, labels[i]);
1299 for (i++ ; i < branch_num; i++)
1300 gimple_switch_set_label (stmt, i, NULL_TREE);
1301 gimple_switch_set_num_labels (stmt, len + 1);
1303 /* Cleanup any edges that are now dead. */
1304 target_blocks = BITMAP_ALLOC (NULL);
1305 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1307 tree elt = gimple_switch_label (stmt, i);
1308 basic_block target = label_to_block (CASE_LABEL (elt));
1309 bitmap_set_bit (target_blocks, target->index);
1311 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1313 if (! bitmap_bit_p (target_blocks, e->dest->index))
1317 free_dominance_info (CDI_DOMINATORS);
1322 BITMAP_FREE (target_blocks);
1326 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1327 the condition which we may be able to optimize better. */
1330 simplify_gimple_switch (gimple stmt)
1332 /* The optimization that we really care about is removing unnecessary
1333 casts. That will let us do much better in propagating the inferred
1334 constant at the switch target. */
1335 tree cond = gimple_switch_index (stmt);
1336 if (TREE_CODE (cond) == SSA_NAME)
1338 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1339 if (gimple_assign_cast_p (def_stmt))
1341 tree def = gimple_assign_rhs1 (def_stmt);
1342 if (TREE_CODE (def) != SSA_NAME)
1345 /* If we have an extension or sign-change that preserves the
1346 values we check against then we can copy the source value into
1348 tree ti = TREE_TYPE (def);
1349 if (INTEGRAL_TYPE_P (ti)
1350 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1352 size_t n = gimple_switch_num_labels (stmt);
1353 tree min = NULL_TREE, max = NULL_TREE;
1356 min = CASE_LOW (gimple_switch_label (stmt, 1));
1357 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1358 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1360 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1362 if ((!min || int_fits_type_p (min, ti))
1363 && (!max || int_fits_type_p (max, ti)))
1365 gimple_switch_set_index (stmt, def);
1366 simplify_gimple_switch_label_vec (stmt, ti);
1377 /* For pointers p2 and p1 return p2 - p1 if the
1378 difference is known and constant, otherwise return NULL. */
1381 constant_pointer_difference (tree p1, tree p2)
1384 #define CPD_ITERATIONS 5
1385 tree exps[2][CPD_ITERATIONS];
1386 tree offs[2][CPD_ITERATIONS];
1389 for (i = 0; i < 2; i++)
1391 tree p = i ? p1 : p2;
1392 tree off = size_zero_node;
1394 enum tree_code code;
1396 /* For each of p1 and p2 we need to iterate at least
1397 twice, to handle ADDR_EXPR directly in p1/p2,
1398 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1399 on definition's stmt RHS. Iterate a few extra times. */
1403 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1405 if (TREE_CODE (p) == ADDR_EXPR)
1407 tree q = TREE_OPERAND (p, 0);
1408 HOST_WIDE_INT offset;
1409 tree base = get_addr_base_and_unit_offset (q, &offset);
1414 off = size_binop (PLUS_EXPR, off, size_int (offset));
1416 if (TREE_CODE (q) == MEM_REF
1417 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1419 p = TREE_OPERAND (q, 0);
1420 off = size_binop (PLUS_EXPR, off,
1421 wide_int_to_tree (sizetype,
1422 mem_ref_offset (q)));
1431 if (TREE_CODE (p) != SSA_NAME)
1435 if (j == CPD_ITERATIONS)
1437 stmt = SSA_NAME_DEF_STMT (p);
1438 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1440 code = gimple_assign_rhs_code (stmt);
1441 if (code == POINTER_PLUS_EXPR)
1443 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1445 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1446 p = gimple_assign_rhs1 (stmt);
1448 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1449 p = gimple_assign_rhs1 (stmt);
1457 for (i = 0; i < cnt[0]; i++)
1458 for (j = 0; j < cnt[1]; j++)
1459 if (exps[0][i] == exps[1][j])
1460 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1465 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1467 memcpy (p, "abcd", 4);
1468 memset (p + 4, ' ', 3);
1470 memcpy (p, "abcd ", 7);
1471 call if the latter can be stored by pieces during expansion. */
1474 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1476 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1477 tree vuse = gimple_vuse (stmt2);
1480 stmt1 = SSA_NAME_DEF_STMT (vuse);
1482 switch (DECL_FUNCTION_CODE (callee2))
1484 case BUILT_IN_MEMSET:
1485 if (gimple_call_num_args (stmt2) != 3
1486 || gimple_call_lhs (stmt2)
1488 || BITS_PER_UNIT != 8)
1493 tree ptr1, src1, str1, off1, len1, lhs1;
1494 tree ptr2 = gimple_call_arg (stmt2, 0);
1495 tree val2 = gimple_call_arg (stmt2, 1);
1496 tree len2 = gimple_call_arg (stmt2, 2);
1497 tree diff, vdef, new_str_cst;
1499 unsigned int ptr1_align;
1500 unsigned HOST_WIDE_INT src_len;
1502 use_operand_p use_p;
1504 if (!tree_fits_shwi_p (val2)
1505 || !tree_fits_uhwi_p (len2))
1507 if (is_gimple_call (stmt1))
1509 /* If first stmt is a call, it needs to be memcpy
1510 or mempcpy, with string literal as second argument and
1512 callee1 = gimple_call_fndecl (stmt1);
1513 if (callee1 == NULL_TREE
1514 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1515 || gimple_call_num_args (stmt1) != 3)
1517 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1518 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1520 ptr1 = gimple_call_arg (stmt1, 0);
1521 src1 = gimple_call_arg (stmt1, 1);
1522 len1 = gimple_call_arg (stmt1, 2);
1523 lhs1 = gimple_call_lhs (stmt1);
1524 if (!tree_fits_uhwi_p (len1))
1526 str1 = string_constant (src1, &off1);
1527 if (str1 == NULL_TREE)
1529 if (!tree_fits_uhwi_p (off1)
1530 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1531 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1532 - tree_to_uhwi (off1)) > 0
1533 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1534 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1535 != TYPE_MODE (char_type_node))
1538 else if (gimple_assign_single_p (stmt1))
1540 /* Otherwise look for length 1 memcpy optimized into
1542 ptr1 = gimple_assign_lhs (stmt1);
1543 src1 = gimple_assign_rhs1 (stmt1);
1544 if (TREE_CODE (ptr1) != MEM_REF
1545 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1546 || !tree_fits_shwi_p (src1))
1548 ptr1 = build_fold_addr_expr (ptr1);
1549 callee1 = NULL_TREE;
1550 len1 = size_one_node;
1552 off1 = size_zero_node;
1558 diff = constant_pointer_difference (ptr1, ptr2);
1559 if (diff == NULL && lhs1 != NULL)
1561 diff = constant_pointer_difference (lhs1, ptr2);
1562 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1564 diff = size_binop (PLUS_EXPR, diff,
1565 fold_convert (sizetype, len1));
1567 /* If the difference between the second and first destination pointer
1568 is not constant, or is bigger than memcpy length, bail out. */
1570 || !tree_fits_uhwi_p (diff)
1571 || tree_int_cst_lt (len1, diff))
1574 /* Use maximum of difference plus memset length and memcpy length
1575 as the new memcpy length, if it is too big, bail out. */
1576 src_len = tree_to_uhwi (diff);
1577 src_len += tree_to_uhwi (len2);
1578 if (src_len < tree_to_uhwi (len1))
1579 src_len = tree_to_uhwi (len1);
1583 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1584 with bigger length will return different result. */
1585 if (lhs1 != NULL_TREE
1586 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1587 && (TREE_CODE (lhs1) != SSA_NAME
1588 || !single_imm_use (lhs1, &use_p, &use_stmt)
1589 || use_stmt != stmt2))
1592 /* If anything reads memory in between memcpy and memset
1593 call, the modified memcpy call might change it. */
1594 vdef = gimple_vdef (stmt1);
1596 && (!single_imm_use (vdef, &use_p, &use_stmt)
1597 || use_stmt != stmt2))
1600 ptr1_align = get_pointer_alignment (ptr1);
1601 /* Construct the new source string literal. */
1602 src_buf = XALLOCAVEC (char, src_len + 1);
1605 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1606 tree_to_uhwi (len1));
1608 src_buf[0] = tree_to_shwi (src1);
1609 memset (src_buf + tree_to_uhwi (diff),
1610 tree_to_shwi (val2), tree_to_uhwi (len2));
1611 src_buf[src_len] = '\0';
1612 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1613 handle embedded '\0's. */
1614 if (strlen (src_buf) != src_len)
1616 rtl_profile_for_bb (gimple_bb (stmt2));
1617 /* If the new memcpy wouldn't be emitted by storing the literal
1618 by pieces, this optimization might enlarge .rodata too much,
1619 as commonly used string literals couldn't be shared any
1621 if (!can_store_by_pieces (src_len,
1622 builtin_strncpy_read_str,
1623 src_buf, ptr1_align, false))
1626 new_str_cst = build_string_literal (src_len, src_buf);
1629 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1631 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1632 gimple_call_set_lhs (stmt1, NULL_TREE);
1633 gimple_call_set_arg (stmt1, 1, new_str_cst);
1634 gimple_call_set_arg (stmt1, 2,
1635 build_int_cst (TREE_TYPE (len1), src_len));
1636 update_stmt (stmt1);
1637 unlink_stmt_vdef (stmt2);
1638 gsi_remove (gsi_p, true);
1639 release_defs (stmt2);
1640 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1641 release_ssa_name (lhs1);
1646 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1647 assignment, remove STMT1 and change memset call into
1649 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1651 if (!is_gimple_val (ptr1))
1652 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1653 true, GSI_SAME_STMT);
1654 gimple_call_set_fndecl (stmt2,
1655 builtin_decl_explicit (BUILT_IN_MEMCPY));
1656 gimple_call_set_arg (stmt2, 0, ptr1);
1657 gimple_call_set_arg (stmt2, 1, new_str_cst);
1658 gimple_call_set_arg (stmt2, 2,
1659 build_int_cst (TREE_TYPE (len2), src_len));
1660 unlink_stmt_vdef (stmt1);
1661 gsi_remove (&gsi, true);
1662 release_defs (stmt1);
1663 update_stmt (stmt2);
1674 /* Given a ssa_name in NAME see if it was defined by an assignment and
1675 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1676 to the second operand on the rhs. */
1679 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1682 enum tree_code code1;
1686 enum gimple_rhs_class grhs_class;
1688 code1 = TREE_CODE (name);
1691 grhs_class = get_gimple_rhs_class (code1);
1693 if (code1 == SSA_NAME)
1695 def = SSA_NAME_DEF_STMT (name);
1697 if (def && is_gimple_assign (def)
1698 && can_propagate_from (def))
1700 code1 = gimple_assign_rhs_code (def);
1701 arg11 = gimple_assign_rhs1 (def);
1702 arg21 = gimple_assign_rhs2 (def);
1703 arg31 = gimple_assign_rhs2 (def);
1706 else if (grhs_class == GIMPLE_TERNARY_RHS
1707 || GIMPLE_BINARY_RHS
1709 || GIMPLE_SINGLE_RHS)
1710 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1716 /* Ignore arg3 currently. */
1720 /* Recognize rotation patterns. Return true if a transformation
1721 applied, otherwise return false.
1723 We are looking for X with unsigned type T with bitsize B, OP being
1724 +, | or ^, some type T2 wider than T and
1725 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1726 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1727 (X << Y) OP (X >> (B - Y))
1728 (X << (int) Y) OP (X >> (int) (B - Y))
1729 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1730 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1731 (X << Y) | (X >> ((-Y) & (B - 1)))
1732 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1733 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1734 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1736 and transform these into:
1740 Note, in the patterns with T2 type, the type of OP operands
1741 might be even a signed type, but should have precision B. */
1744 simplify_rotate (gimple_stmt_iterator *gsi)
1746 gimple stmt = gsi_stmt (*gsi);
1747 tree arg[2], rtype, rotcnt = NULL_TREE;
1748 tree def_arg1[2], def_arg2[2];
1749 enum tree_code def_code[2];
1752 bool swapped_p = false;
1755 arg[0] = gimple_assign_rhs1 (stmt);
1756 arg[1] = gimple_assign_rhs2 (stmt);
1757 rtype = TREE_TYPE (arg[0]);
1759 /* Only create rotates in complete modes. Other cases are not
1760 expanded properly. */
1761 if (!INTEGRAL_TYPE_P (rtype)
1762 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1765 for (i = 0; i < 2; i++)
1766 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1768 /* Look through narrowing conversions. */
1769 if (CONVERT_EXPR_CODE_P (def_code[0])
1770 && CONVERT_EXPR_CODE_P (def_code[1])
1771 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1772 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1773 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1774 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1775 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1776 && has_single_use (arg[0])
1777 && has_single_use (arg[1]))
1779 for (i = 0; i < 2; i++)
1781 arg[i] = def_arg1[i];
1782 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1786 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1787 for (i = 0; i < 2; i++)
1788 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1790 else if (!has_single_use (arg[i]))
1792 if (def_code[0] == def_code[1])
1795 /* If we've looked through narrowing conversions before, look through
1796 widening conversions from unsigned type with the same precision
1798 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1799 for (i = 0; i < 2; i++)
1802 enum tree_code code;
1803 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1804 if (!CONVERT_EXPR_CODE_P (code)
1805 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1806 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1810 /* Both shifts have to use the same first operand. */
1811 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1813 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1816 /* CNT1 + CNT2 == B case above. */
1817 if (tree_fits_uhwi_p (def_arg2[0])
1818 && tree_fits_uhwi_p (def_arg2[1])
1819 && tree_to_uhwi (def_arg2[0])
1820 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1821 rotcnt = def_arg2[0];
1822 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1823 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1827 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1828 enum tree_code cdef_code[2];
1829 /* Look through conversion of the shift count argument.
1830 The C/C++ FE cast any shift count argument to integer_type_node.
1831 The only problem might be if the shift count type maximum value
1832 is equal or smaller than number of bits in rtype. */
1833 for (i = 0; i < 2; i++)
1835 def_arg2_alt[i] = def_arg2[i];
1836 defcodefor_name (def_arg2[i], &cdef_code[i],
1837 &cdef_arg1[i], &cdef_arg2[i]);
1838 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1839 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1840 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1841 > floor_log2 (TYPE_PRECISION (rtype))
1842 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1843 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1845 def_arg2_alt[i] = cdef_arg1[i];
1846 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1847 &cdef_arg1[i], &cdef_arg2[i]);
1850 for (i = 0; i < 2; i++)
1851 /* Check for one shift count being Y and the other B - Y,
1852 with optional casts. */
1853 if (cdef_code[i] == MINUS_EXPR
1854 && tree_fits_shwi_p (cdef_arg1[i])
1855 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1856 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1859 enum tree_code code;
1861 if (cdef_arg2[i] == def_arg2[1 - i]
1862 || cdef_arg2[i] == def_arg2_alt[1 - i])
1864 rotcnt = cdef_arg2[i];
1867 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1868 if (CONVERT_EXPR_CODE_P (code)
1869 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1870 && TYPE_PRECISION (TREE_TYPE (tem))
1871 > floor_log2 (TYPE_PRECISION (rtype))
1872 && TYPE_PRECISION (TREE_TYPE (tem))
1873 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1874 && (tem == def_arg2[1 - i]
1875 || tem == def_arg2_alt[1 - i]))
1881 /* The above sequence isn't safe for Y being 0,
1882 because then one of the shifts triggers undefined behavior.
1883 This alternative is safe even for rotation count of 0.
1884 One shift count is Y and the other (-Y) & (B - 1). */
1885 else if (cdef_code[i] == BIT_AND_EXPR
1886 && tree_fits_shwi_p (cdef_arg2[i])
1887 && tree_to_shwi (cdef_arg2[i])
1888 == TYPE_PRECISION (rtype) - 1
1889 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1890 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1893 enum tree_code code;
1895 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1896 if (CONVERT_EXPR_CODE_P (code)
1897 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1898 && TYPE_PRECISION (TREE_TYPE (tem))
1899 > floor_log2 (TYPE_PRECISION (rtype))
1900 && TYPE_PRECISION (TREE_TYPE (tem))
1901 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1902 defcodefor_name (tem, &code, &tem, NULL);
1904 if (code == NEGATE_EXPR)
1906 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1911 defcodefor_name (tem, &code, &tem, NULL);
1912 if (CONVERT_EXPR_CODE_P (code)
1913 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1914 && TYPE_PRECISION (TREE_TYPE (tem))
1915 > floor_log2 (TYPE_PRECISION (rtype))
1916 && TYPE_PRECISION (TREE_TYPE (tem))
1917 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1918 && (tem == def_arg2[1 - i]
1919 || tem == def_arg2_alt[1 - i]))
1926 if (rotcnt == NULL_TREE)
1931 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1932 TREE_TYPE (rotcnt)))
1934 g = gimple_build_assign_with_ops (NOP_EXPR,
1935 make_ssa_name (TREE_TYPE (def_arg2[0]),
1938 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1939 rotcnt = gimple_assign_lhs (g);
1941 lhs = gimple_assign_lhs (stmt);
1942 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1943 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
1944 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1945 ? LROTATE_EXPR : RROTATE_EXPR,
1946 lhs, def_arg1[0], rotcnt);
1947 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1949 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1950 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
1953 gsi_replace (gsi, g, false);
1957 /* Perform re-associations of the plus or minus statement STMT that are
1958 always permitted. Returns true if the CFG was changed. */
1961 associate_plusminus (gimple_stmt_iterator *gsi)
1963 gimple stmt = gsi_stmt (*gsi);
1964 tree rhs1 = gimple_assign_rhs1 (stmt);
1965 tree rhs2 = gimple_assign_rhs2 (stmt);
1966 enum tree_code code = gimple_assign_rhs_code (stmt);
1969 /* We can't reassociate at all for saturating types. */
1970 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1973 /* First contract negates. */
1978 /* A +- (-B) -> A -+ B. */
1979 if (TREE_CODE (rhs2) == SSA_NAME)
1981 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1982 if (is_gimple_assign (def_stmt)
1983 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1984 && can_propagate_from (def_stmt))
1986 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1987 gimple_assign_set_rhs_code (stmt, code);
1988 rhs2 = gimple_assign_rhs1 (def_stmt);
1989 gimple_assign_set_rhs2 (stmt, rhs2);
1990 gimple_set_modified (stmt, true);
1995 /* (-A) + B -> B - A. */
1996 if (TREE_CODE (rhs1) == SSA_NAME
1997 && code == PLUS_EXPR)
1999 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2000 if (is_gimple_assign (def_stmt)
2001 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2002 && can_propagate_from (def_stmt))
2005 gimple_assign_set_rhs_code (stmt, code);
2007 gimple_assign_set_rhs1 (stmt, rhs1);
2008 rhs2 = gimple_assign_rhs1 (def_stmt);
2009 gimple_assign_set_rhs2 (stmt, rhs2);
2010 gimple_set_modified (stmt, true);
2017 /* We can't reassociate floating-point or fixed-point plus or minus
2018 because of saturation to +-Inf. */
2019 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2020 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2023 /* Second match patterns that allow contracting a plus-minus pair
2024 irrespective of overflow issues.
2026 (A +- B) - A -> +- B
2028 (CST +- A) +- CST -> CST +- A
2029 (A +- CST) +- CST -> A +- CST
2032 A - (A +- B) -> -+ B
2033 A +- (B +- A) -> +- B
2034 CST +- (CST +- A) -> CST +- A
2035 CST +- (A +- CST) -> CST +- A
2037 (T)(P + A) - (T)P -> (T)A
2039 via commutating the addition and contracting operations to zero
2040 by reassociation. */
2042 if (TREE_CODE (rhs1) == SSA_NAME)
2044 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2045 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2047 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2048 if (def_code == PLUS_EXPR
2049 || def_code == MINUS_EXPR)
2051 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2052 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2053 if (operand_equal_p (def_rhs1, rhs2, 0)
2054 && code == MINUS_EXPR)
2056 /* (A +- B) - A -> +- B. */
2057 code = ((def_code == PLUS_EXPR)
2058 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2061 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2062 gcc_assert (gsi_stmt (*gsi) == stmt);
2063 gimple_set_modified (stmt, true);
2065 else if (operand_equal_p (def_rhs2, rhs2, 0)
2066 && code != def_code)
2068 /* (A +- B) -+ B -> A. */
2069 code = TREE_CODE (def_rhs1);
2072 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2073 gcc_assert (gsi_stmt (*gsi) == stmt);
2074 gimple_set_modified (stmt, true);
2076 else if (CONSTANT_CLASS_P (rhs2)
2077 && CONSTANT_CLASS_P (def_rhs1))
2079 /* (CST +- A) +- CST -> CST +- A. */
2080 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2082 if (cst && !TREE_OVERFLOW (cst))
2085 gimple_assign_set_rhs_code (stmt, code);
2087 gimple_assign_set_rhs1 (stmt, rhs1);
2089 gimple_assign_set_rhs2 (stmt, rhs2);
2090 gimple_set_modified (stmt, true);
2093 else if (CONSTANT_CLASS_P (rhs2)
2094 && CONSTANT_CLASS_P (def_rhs2))
2096 /* (A +- CST) +- CST -> A +- CST. */
2097 enum tree_code mix = (code == def_code)
2098 ? PLUS_EXPR : MINUS_EXPR;
2099 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2101 if (cst && !TREE_OVERFLOW (cst))
2104 gimple_assign_set_rhs_code (stmt, code);
2106 gimple_assign_set_rhs1 (stmt, rhs1);
2108 gimple_assign_set_rhs2 (stmt, rhs2);
2109 gimple_set_modified (stmt, true);
2113 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2115 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2116 if (operand_equal_p (def_rhs1, rhs2, 0))
2119 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2121 code = TREE_CODE (rhs1);
2122 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2123 gcc_assert (gsi_stmt (*gsi) == stmt);
2124 gimple_set_modified (stmt, true);
2126 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2127 && integer_onep (rhs2))
2128 || (TREE_CODE (rhs2) == COMPLEX_CST
2129 && integer_onep (TREE_REALPART (rhs2))
2130 && integer_onep (TREE_IMAGPART (rhs2))))
2136 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2137 gcc_assert (gsi_stmt (*gsi) == stmt);
2138 gimple_set_modified (stmt, true);
2141 else if (code == MINUS_EXPR
2142 && CONVERT_EXPR_CODE_P (def_code)
2143 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2144 && TREE_CODE (rhs2) == SSA_NAME)
2146 /* (T)(P + A) - (T)P -> (T)A. */
2147 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs2);
2148 if (is_gimple_assign (def_stmt2)
2149 && can_propagate_from (def_stmt2)
2150 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2))
2151 && TREE_CODE (gimple_assign_rhs1 (def_stmt2)) == SSA_NAME)
2153 /* Now we have (T)X - (T)P. */
2154 tree p = gimple_assign_rhs1 (def_stmt2);
2155 def_stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
2156 if (is_gimple_assign (def_stmt2)
2157 && can_propagate_from (def_stmt2)
2158 && (gimple_assign_rhs_code (def_stmt2) == POINTER_PLUS_EXPR
2159 || gimple_assign_rhs_code (def_stmt2) == PLUS_EXPR)
2160 && gimple_assign_rhs1 (def_stmt2) == p)
2162 /* And finally (T)(P + A) - (T)P. */
2163 tree a = gimple_assign_rhs2 (def_stmt2);
2164 if (TYPE_PRECISION (TREE_TYPE (rhs1))
2165 <= TYPE_PRECISION (TREE_TYPE (a))
2166 /* For integer types, if A has a smaller type
2167 than T the result depends on the possible
2169 E.g. T=size_t, A=(unsigned)429497295, P>0.
2170 However, if an overflow in P + A would cause
2171 undefined behavior, we can assume that there
2173 || (INTEGRAL_TYPE_P (TREE_TYPE (p))
2174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (p)))
2175 /* For pointer types, if the conversion of A to the
2176 final type requires a sign- or zero-extension,
2177 then we have to punt - it is not defined which
2179 || (POINTER_TYPE_P (TREE_TYPE (p))
2180 && TREE_CODE (a) == INTEGER_CST
2181 && tree_int_cst_sign_bit (a) == 0))
2183 if (issue_strict_overflow_warning
2184 (WARN_STRICT_OVERFLOW_MISC)
2185 && TYPE_PRECISION (TREE_TYPE (rhs1))
2186 > TYPE_PRECISION (TREE_TYPE (a))
2187 && INTEGRAL_TYPE_P (TREE_TYPE (p)))
2188 warning_at (gimple_location (stmt),
2189 OPT_Wstrict_overflow,
2190 "assuming signed overflow does not "
2191 "occur when assuming that "
2192 "(T)(P + A) - (T)P is always (T)A");
2193 if (useless_type_conversion_p (TREE_TYPE (rhs1),
2195 code = TREE_CODE (a);
2200 gimple_assign_set_rhs_with_ops (gsi, code, rhs1,
2202 gcc_assert (gsi_stmt (*gsi) == stmt);
2203 gimple_set_modified (stmt, true);
2211 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2213 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2214 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2216 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2217 if (def_code == PLUS_EXPR
2218 || def_code == MINUS_EXPR)
2220 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2221 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2222 if (operand_equal_p (def_rhs1, rhs1, 0)
2223 && code == MINUS_EXPR)
2225 /* A - (A +- B) -> -+ B. */
2226 code = ((def_code == PLUS_EXPR)
2227 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2230 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2231 gcc_assert (gsi_stmt (*gsi) == stmt);
2232 gimple_set_modified (stmt, true);
2234 else if (operand_equal_p (def_rhs2, rhs1, 0)
2235 && code != def_code)
2237 /* A +- (B +- A) -> +- B. */
2238 code = ((code == PLUS_EXPR)
2239 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2242 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2243 gcc_assert (gsi_stmt (*gsi) == stmt);
2244 gimple_set_modified (stmt, true);
2246 else if (CONSTANT_CLASS_P (rhs1)
2247 && CONSTANT_CLASS_P (def_rhs1))
2249 /* CST +- (CST +- A) -> CST +- A. */
2250 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2252 if (cst && !TREE_OVERFLOW (cst))
2254 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2255 gimple_assign_set_rhs_code (stmt, code);
2257 gimple_assign_set_rhs1 (stmt, rhs1);
2259 gimple_assign_set_rhs2 (stmt, rhs2);
2260 gimple_set_modified (stmt, true);
2263 else if (CONSTANT_CLASS_P (rhs1)
2264 && CONSTANT_CLASS_P (def_rhs2))
2266 /* CST +- (A +- CST) -> CST +- A. */
2267 tree cst = fold_binary (def_code == code
2268 ? PLUS_EXPR : MINUS_EXPR,
2271 if (cst && !TREE_OVERFLOW (cst))
2274 gimple_assign_set_rhs1 (stmt, rhs1);
2276 gimple_assign_set_rhs2 (stmt, rhs2);
2277 gimple_set_modified (stmt, true);
2281 else if (def_code == BIT_NOT_EXPR)
2283 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2284 if (code == PLUS_EXPR
2285 && operand_equal_p (def_rhs1, rhs1, 0))
2288 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2290 code = TREE_CODE (rhs1);
2291 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2292 gcc_assert (gsi_stmt (*gsi) == stmt);
2293 gimple_set_modified (stmt, true);
2300 if (gimple_modified_p (stmt))
2302 fold_stmt_inplace (gsi);
2310 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2311 true if anything changed, false otherwise. */
2314 associate_pointerplus_align (gimple_stmt_iterator *gsi)
2316 gimple stmt = gsi_stmt (*gsi);
2318 tree ptr, rhs, algn;
2321 tem = (sizetype) ptr;
2325 and produce the simpler and easier to analyze with respect to alignment
2326 ... = ptr & ~algn; */
2327 ptr = gimple_assign_rhs1 (stmt);
2328 rhs = gimple_assign_rhs2 (stmt);
2329 if (TREE_CODE (rhs) != SSA_NAME)
2331 def_stmt = SSA_NAME_DEF_STMT (rhs);
2332 if (!is_gimple_assign (def_stmt)
2333 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2335 rhs = gimple_assign_rhs1 (def_stmt);
2336 if (TREE_CODE (rhs) != SSA_NAME)
2338 def_stmt = SSA_NAME_DEF_STMT (rhs);
2339 if (!is_gimple_assign (def_stmt)
2340 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2342 rhs = gimple_assign_rhs1 (def_stmt);
2343 algn = gimple_assign_rhs2 (def_stmt);
2344 if (TREE_CODE (rhs) != SSA_NAME
2345 || TREE_CODE (algn) != INTEGER_CST)
2347 def_stmt = SSA_NAME_DEF_STMT (rhs);
2348 if (!is_gimple_assign (def_stmt)
2349 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2351 if (gimple_assign_rhs1 (def_stmt) != ptr)
2354 algn = wide_int_to_tree (TREE_TYPE (ptr), wi::bit_not (algn));
2355 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2356 fold_stmt_inplace (gsi);
2362 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2363 true if anything changed, false otherwise. */
2366 associate_pointerplus_diff (gimple_stmt_iterator *gsi)
2368 gimple stmt = gsi_stmt (*gsi);
2376 tem4 = (unsigned long) tem3;
2380 ptr1 = gimple_assign_rhs1 (stmt);
2381 rhs = gimple_assign_rhs2 (stmt);
2382 if (TREE_CODE (rhs) != SSA_NAME)
2384 gimple minus = SSA_NAME_DEF_STMT (rhs);
2385 /* Conditionally look through a sign-changing conversion. */
2386 if (is_gimple_assign (minus)
2387 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (minus))
2388 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (minus)))
2389 == TYPE_PRECISION (TREE_TYPE (rhs)))
2390 && TREE_CODE (gimple_assign_rhs1 (minus)) == SSA_NAME)
2391 minus = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (minus));
2392 if (!is_gimple_assign (minus))
2394 if (gimple_assign_rhs_code (minus) != MINUS_EXPR)
2396 rhs = gimple_assign_rhs2 (minus);
2397 if (TREE_CODE (rhs) != SSA_NAME)
2399 def_stmt = SSA_NAME_DEF_STMT (rhs);
2400 if (!is_gimple_assign (def_stmt)
2401 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
2402 || gimple_assign_rhs1 (def_stmt) != ptr1)
2404 rhs = gimple_assign_rhs1 (minus);
2405 if (TREE_CODE (rhs) != SSA_NAME)
2407 def_stmt = SSA_NAME_DEF_STMT (rhs);
2408 if (!is_gimple_assign (def_stmt)
2409 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2411 rhs = gimple_assign_rhs1 (def_stmt);
2412 if (! useless_type_conversion_p (TREE_TYPE (ptr1), TREE_TYPE (rhs)))
2415 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (rhs), rhs, NULL_TREE);
2421 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2422 true if anything changed, false otherwise. */
2425 associate_pointerplus (gimple_stmt_iterator *gsi)
2427 gimple stmt = gsi_stmt (*gsi);
2429 tree ptr, off1, off2;
2431 if (associate_pointerplus_align (gsi)
2432 || associate_pointerplus_diff (gsi))
2435 /* Associate (p +p off1) +p off2 as (p +p (off1 + off2)). */
2436 ptr = gimple_assign_rhs1 (stmt);
2437 off1 = gimple_assign_rhs2 (stmt);
2438 if (TREE_CODE (ptr) != SSA_NAME
2439 || !has_single_use (ptr))
2441 def_stmt = SSA_NAME_DEF_STMT (ptr);
2442 if (!is_gimple_assign (def_stmt)
2443 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
2444 || !can_propagate_from (def_stmt))
2446 ptr = gimple_assign_rhs1 (def_stmt);
2447 off2 = gimple_assign_rhs2 (def_stmt);
2448 if (!types_compatible_p (TREE_TYPE (off1), TREE_TYPE (off2)))
2451 tree off = make_ssa_name (TREE_TYPE (off1), NULL);
2452 gimple ostmt = gimple_build_assign_with_ops (PLUS_EXPR, off, off1, off2);
2453 gsi_insert_before (gsi, ostmt, GSI_SAME_STMT);
2455 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR, ptr, off);
2461 /* Combine two conversions in a row for the second conversion at *GSI.
2462 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2463 run. Else it returns 0. */
2466 combine_conversions (gimple_stmt_iterator *gsi)
2468 gimple stmt = gsi_stmt (*gsi);
2471 enum tree_code code = gimple_assign_rhs_code (stmt);
2472 enum tree_code code2;
2474 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2475 || code == FLOAT_EXPR
2476 || code == FIX_TRUNC_EXPR);
2478 lhs = gimple_assign_lhs (stmt);
2479 op0 = gimple_assign_rhs1 (stmt);
2480 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2482 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2486 if (TREE_CODE (op0) != SSA_NAME)
2489 def_stmt = SSA_NAME_DEF_STMT (op0);
2490 if (!is_gimple_assign (def_stmt))
2493 code2 = gimple_assign_rhs_code (def_stmt);
2495 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2497 tree defop0 = gimple_assign_rhs1 (def_stmt);
2498 tree type = TREE_TYPE (lhs);
2499 tree inside_type = TREE_TYPE (defop0);
2500 tree inter_type = TREE_TYPE (op0);
2501 int inside_int = INTEGRAL_TYPE_P (inside_type);
2502 int inside_ptr = POINTER_TYPE_P (inside_type);
2503 int inside_float = FLOAT_TYPE_P (inside_type);
2504 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2505 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2506 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2507 int inter_int = INTEGRAL_TYPE_P (inter_type);
2508 int inter_ptr = POINTER_TYPE_P (inter_type);
2509 int inter_float = FLOAT_TYPE_P (inter_type);
2510 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2511 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2512 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2513 int final_int = INTEGRAL_TYPE_P (type);
2514 int final_ptr = POINTER_TYPE_P (type);
2515 int final_float = FLOAT_TYPE_P (type);
2516 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2517 unsigned int final_prec = TYPE_PRECISION (type);
2518 int final_unsignedp = TYPE_UNSIGNED (type);
2520 /* Don't propagate ssa names that occur in abnormal phis. */
2521 if (TREE_CODE (defop0) == SSA_NAME
2522 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2525 /* In addition to the cases of two conversions in a row
2526 handled below, if we are converting something to its own
2527 type via an object of identical or wider precision, neither
2528 conversion is needed. */
2529 if (useless_type_conversion_p (type, inside_type)
2530 && (((inter_int || inter_ptr) && final_int)
2531 || (inter_float && final_float))
2532 && inter_prec >= final_prec)
2534 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2535 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2537 return remove_prop_source_from_use (op0) ? 2 : 1;
2540 /* Likewise, if the intermediate and initial types are either both
2541 float or both integer, we don't need the middle conversion if the
2542 former is wider than the latter and doesn't change the signedness
2543 (for integers). Avoid this if the final type is a pointer since
2544 then we sometimes need the middle conversion. Likewise if the
2545 final type has a precision not equal to the size of its mode. */
2546 if (((inter_int && inside_int)
2547 || (inter_float && inside_float)
2548 || (inter_vec && inside_vec))
2549 && inter_prec >= inside_prec
2550 && (inter_float || inter_vec
2551 || inter_unsignedp == inside_unsignedp)
2552 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2553 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2555 && (! final_vec || inter_prec == inside_prec))
2557 gimple_assign_set_rhs1 (stmt, defop0);
2559 return remove_prop_source_from_use (op0) ? 2 : 1;
2562 /* If we have a sign-extension of a zero-extended value, we can
2563 replace that by a single zero-extension. Likewise if the
2564 final conversion does not change precision we can drop the
2565 intermediate conversion. */
2566 if (inside_int && inter_int && final_int
2567 && ((inside_prec < inter_prec && inter_prec < final_prec
2568 && inside_unsignedp && !inter_unsignedp)
2569 || final_prec == inter_prec))
2571 gimple_assign_set_rhs1 (stmt, defop0);
2573 return remove_prop_source_from_use (op0) ? 2 : 1;
2576 /* Two conversions in a row are not needed unless:
2577 - some conversion is floating-point (overstrict for now), or
2578 - some conversion is a vector (overstrict for now), or
2579 - the intermediate type is narrower than both initial and
2581 - the intermediate type and innermost type differ in signedness,
2582 and the outermost type is wider than the intermediate, or
2583 - the initial type is a pointer type and the precisions of the
2584 intermediate and final types differ, or
2585 - the final type is a pointer type and the precisions of the
2586 initial and intermediate types differ. */
2587 if (! inside_float && ! inter_float && ! final_float
2588 && ! inside_vec && ! inter_vec && ! final_vec
2589 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2590 && ! (inside_int && inter_int
2591 && inter_unsignedp != inside_unsignedp
2592 && inter_prec < final_prec)
2593 && ((inter_unsignedp && inter_prec > inside_prec)
2594 == (final_unsignedp && final_prec > inter_prec))
2595 && ! (inside_ptr && inter_prec != final_prec)
2596 && ! (final_ptr && inside_prec != inter_prec)
2597 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2598 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2600 gimple_assign_set_rhs1 (stmt, defop0);
2602 return remove_prop_source_from_use (op0) ? 2 : 1;
2605 /* A truncation to an unsigned type should be canonicalized as
2606 bitwise and of a mask. */
2607 if (final_int && inter_int && inside_int
2608 && final_prec == inside_prec
2609 && final_prec > inter_prec
2613 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2617 wi::mask (inter_prec, false,
2618 TYPE_PRECISION (inside_type))));
2619 if (!useless_type_conversion_p (type, inside_type))
2621 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2623 gimple_assign_set_rhs1 (stmt, tem);
2626 gimple_assign_set_rhs_from_tree (gsi, tem);
2627 update_stmt (gsi_stmt (*gsi));
2631 /* If we are converting an integer to a floating-point that can
2632 represent it exactly and back to an integer, we can skip the
2633 floating-point conversion. */
2634 if (inside_int && inter_float && final_int &&
2635 (unsigned) significand_size (TYPE_MODE (inter_type))
2636 >= inside_prec - !inside_unsignedp)
2638 if (useless_type_conversion_p (type, inside_type))
2640 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2641 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2643 return remove_prop_source_from_use (op0) ? 2 : 1;
2647 gimple_assign_set_rhs1 (stmt, defop0);
2648 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2650 return remove_prop_source_from_use (op0) ? 2 : 1;
2658 /* Combine an element access with a shuffle. Returns true if there were
2659 any changes made, else it returns false. */
2662 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2664 gimple stmt = gsi_stmt (*gsi);
2666 tree op, op0, op1, op2;
2668 unsigned idx, n, size;
2669 enum tree_code code;
2671 op = gimple_assign_rhs1 (stmt);
2672 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2674 op0 = TREE_OPERAND (op, 0);
2675 if (TREE_CODE (op0) != SSA_NAME
2676 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2679 def_stmt = get_prop_source_stmt (op0, false, NULL);
2680 if (!def_stmt || !can_propagate_from (def_stmt))
2683 op1 = TREE_OPERAND (op, 1);
2684 op2 = TREE_OPERAND (op, 2);
2685 code = gimple_assign_rhs_code (def_stmt);
2687 if (code == CONSTRUCTOR)
2689 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
2690 gimple_assign_rhs1 (def_stmt), op1, op2);
2691 if (!tem || !valid_gimple_rhs_p (tem))
2693 gimple_assign_set_rhs_from_tree (gsi, tem);
2694 update_stmt (gsi_stmt (*gsi));
2698 elem_type = TREE_TYPE (TREE_TYPE (op0));
2699 if (TREE_TYPE (op) != elem_type)
2702 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2703 n = TREE_INT_CST_LOW (op1) / size;
2706 idx = TREE_INT_CST_LOW (op2) / size;
2708 if (code == VEC_PERM_EXPR)
2710 tree p, m, index, tem;
2712 m = gimple_assign_rhs3 (def_stmt);
2713 if (TREE_CODE (m) != VECTOR_CST)
2715 nelts = VECTOR_CST_NELTS (m);
2716 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
2720 p = gimple_assign_rhs1 (def_stmt);
2724 p = gimple_assign_rhs2 (def_stmt);
2727 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
2728 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
2729 unshare_expr (p), op1, index);
2730 gimple_assign_set_rhs1 (stmt, tem);
2732 update_stmt (gsi_stmt (*gsi));
2739 /* Determine whether applying the 2 permutations (mask1 then mask2)
2740 gives back one of the input. */
2743 is_combined_permutation_identity (tree mask1, tree mask2)
2746 unsigned int nelts, i, j;
2747 bool maybe_identity1 = true;
2748 bool maybe_identity2 = true;
2750 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2751 && TREE_CODE (mask2) == VECTOR_CST);
2752 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2753 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
2755 nelts = VECTOR_CST_NELTS (mask);
2756 for (i = 0; i < nelts; i++)
2758 tree val = VECTOR_CST_ELT (mask, i);
2759 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2760 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2762 maybe_identity2 = false;
2763 else if (j == i + nelts)
2764 maybe_identity1 = false;
2768 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2771 /* Combine a shuffle with its arguments. Returns 1 if there were any
2772 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2775 simplify_permutation (gimple_stmt_iterator *gsi)
2777 gimple stmt = gsi_stmt (*gsi);
2779 tree op0, op1, op2, op3, arg0, arg1;
2780 enum tree_code code;
2781 bool single_use_op0 = false;
2783 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
2785 op0 = gimple_assign_rhs1 (stmt);
2786 op1 = gimple_assign_rhs2 (stmt);
2787 op2 = gimple_assign_rhs3 (stmt);
2789 if (TREE_CODE (op2) != VECTOR_CST)
2792 if (TREE_CODE (op0) == VECTOR_CST)
2797 else if (TREE_CODE (op0) == SSA_NAME)
2799 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
2800 if (!def_stmt || !can_propagate_from (def_stmt))
2803 code = gimple_assign_rhs_code (def_stmt);
2804 arg0 = gimple_assign_rhs1 (def_stmt);
2809 /* Two consecutive shuffles. */
2810 if (code == VEC_PERM_EXPR)
2817 op3 = gimple_assign_rhs3 (def_stmt);
2818 if (TREE_CODE (op3) != VECTOR_CST)
2820 ident = is_combined_permutation_identity (op3, op2);
2823 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2824 : gimple_assign_rhs2 (def_stmt);
2825 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2826 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2827 gimple_set_num_ops (stmt, 2);
2829 return remove_prop_source_from_use (op0) ? 2 : 1;
2832 /* Shuffle of a constructor. */
2833 else if (code == CONSTRUCTOR || code == VECTOR_CST)
2839 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2842 if (TREE_CODE (op1) == VECTOR_CST)
2844 else if (TREE_CODE (op1) == SSA_NAME)
2846 enum tree_code code2;
2848 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2849 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2852 code2 = gimple_assign_rhs_code (def_stmt2);
2853 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2855 arg1 = gimple_assign_rhs1 (def_stmt2);
2862 /* Already used twice in this statement. */
2863 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2867 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2869 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2871 gimple_assign_set_rhs_from_tree (gsi, opt);
2872 update_stmt (gsi_stmt (*gsi));
2873 if (TREE_CODE (op0) == SSA_NAME)
2874 ret = remove_prop_source_from_use (op0);
2875 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2876 ret |= remove_prop_source_from_use (op1);
2883 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2886 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2888 gimple stmt = gsi_stmt (*gsi);
2890 tree op, op2, orig, type, elem_type;
2891 unsigned elem_size, nelts, i;
2892 enum tree_code code;
2893 constructor_elt *elt;
2897 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2899 op = gimple_assign_rhs1 (stmt);
2900 type = TREE_TYPE (op);
2901 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2903 nelts = TYPE_VECTOR_SUBPARTS (type);
2904 elem_type = TREE_TYPE (type);
2905 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2907 sel = XALLOCAVEC (unsigned char, nelts);
2910 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2917 if (TREE_CODE (elt->value) != SSA_NAME)
2919 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2922 code = gimple_assign_rhs_code (def_stmt);
2923 if (code != BIT_FIELD_REF)
2925 op1 = gimple_assign_rhs1 (def_stmt);
2926 ref = TREE_OPERAND (op1, 0);
2934 if (TREE_CODE (ref) != SSA_NAME)
2936 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2940 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2942 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2943 if (sel[i] != i) maybe_ident = false;
2949 gimple_assign_set_rhs_from_tree (gsi, orig);
2952 tree mask_type, *mask_elts;
2954 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2957 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2959 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2960 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2961 != GET_MODE_SIZE (TYPE_MODE (type)))
2963 mask_elts = XALLOCAVEC (tree, nelts);
2964 for (i = 0; i < nelts; i++)
2965 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2966 op2 = build_vector (mask_type, mask_elts);
2967 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
2969 update_stmt (gsi_stmt (*gsi));
2973 /* Simplify multiplications.
2974 Return true if a transformation applied, otherwise return false. */
2977 simplify_mult (gimple_stmt_iterator *gsi)
2979 gimple stmt = gsi_stmt (*gsi);
2980 tree arg1 = gimple_assign_rhs1 (stmt);
2981 tree arg2 = gimple_assign_rhs2 (stmt);
2983 if (TREE_CODE (arg1) != SSA_NAME)
2986 gimple def_stmt = SSA_NAME_DEF_STMT (arg1);
2987 if (!is_gimple_assign (def_stmt))
2990 /* Look through a sign-changing conversion. */
2991 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2993 if (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (def_stmt)))
2994 != TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
2995 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
2997 def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
2998 if (!is_gimple_assign (def_stmt))
3002 if (gimple_assign_rhs_code (def_stmt) == EXACT_DIV_EXPR)
3004 if (operand_equal_p (gimple_assign_rhs2 (def_stmt), arg2, 0))
3006 tree res = gimple_assign_rhs1 (def_stmt);
3007 if (useless_type_conversion_p (TREE_TYPE (arg1), TREE_TYPE (res)))
3008 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (res), res,
3011 gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, res, NULL_TREE);
3012 gcc_assert (gsi_stmt (*gsi) == stmt);
3022 /* Const-and-copy lattice for fold_all_stmts. */
3023 static vec<tree> lattice;
3025 /* Primitive "lattice" function for gimple_simplify. */
3028 fwprop_ssa_val (tree name)
3030 /* First valueize NAME. */
3031 if (TREE_CODE (name) == SSA_NAME
3032 && SSA_NAME_VERSION (name) < lattice.length ())
3034 tree val = lattice[SSA_NAME_VERSION (name)];
3038 /* We continue matching along SSA use-def edges for SSA names
3039 that are not single-use. Currently there are no patterns
3040 that would cause any issues with that. */
3044 /* Fold all stmts using fold_stmt following only single-use chains
3045 and using a simple const-and-copy lattice. */
3048 fold_all_stmts (struct function *fun)
3050 bool cfg_changed = false;
3052 /* Combine stmts with the stmts defining their operands. Do that
3053 in an order that guarantees visiting SSA defs before SSA uses. */
3054 lattice.create (num_ssa_names);
3055 lattice.quick_grow_cleared (num_ssa_names);
3056 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3057 int postorder_num = inverted_post_order_compute (postorder);
3058 for (int i = 0; i < postorder_num; ++i)
3060 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
3061 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3062 !gsi_end_p (gsi); gsi_next (&gsi))
3064 gimple stmt = gsi_stmt (gsi);
3065 gimple orig_stmt = stmt;
3067 if (fold_stmt (&gsi, fwprop_ssa_val))
3069 stmt = gsi_stmt (gsi);
3070 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt)
3071 && gimple_purge_dead_eh_edges (bb))
3073 /* Cleanup the CFG if we simplified a condition to
3075 if (gimple_code (stmt) == GIMPLE_COND
3076 && (gimple_cond_true_p (stmt)
3077 || gimple_cond_false_p (stmt)))
3082 /* Fill up the lattice. */
3083 if (gimple_assign_single_p (stmt))
3085 tree lhs = gimple_assign_lhs (stmt);
3086 tree rhs = gimple_assign_rhs1 (stmt);
3087 if (TREE_CODE (lhs) == SSA_NAME)
3089 if (TREE_CODE (rhs) == SSA_NAME)
3090 lattice[SSA_NAME_VERSION (lhs)] = fwprop_ssa_val (rhs);
3091 else if (is_gimple_min_invariant (rhs))
3092 lattice[SSA_NAME_VERSION (lhs)] = rhs;
3094 lattice[SSA_NAME_VERSION (lhs)] = lhs;
3105 /* Main entry point for the forward propagation and statement combine
3110 const pass_data pass_data_forwprop =
3112 GIMPLE_PASS, /* type */
3113 "forwprop", /* name */
3114 OPTGROUP_NONE, /* optinfo_flags */
3115 TV_TREE_FORWPROP, /* tv_id */
3116 ( PROP_cfg | PROP_ssa ), /* properties_required */
3117 0, /* properties_provided */
3118 0, /* properties_destroyed */
3119 0, /* todo_flags_start */
3120 TODO_update_ssa, /* todo_flags_finish */
3123 class pass_forwprop : public gimple_opt_pass
3126 pass_forwprop (gcc::context *ctxt)
3127 : gimple_opt_pass (pass_data_forwprop, ctxt)
3130 /* opt_pass methods: */
3131 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3132 virtual bool gate (function *) { return flag_tree_forwprop; }
3133 virtual unsigned int execute (function *);
3135 }; // class pass_forwprop
3138 pass_forwprop::execute (function *fun)
3141 unsigned int todoflags = 0;
3143 cfg_changed = false;
3145 FOR_EACH_BB_FN (bb, fun)
3147 gimple_stmt_iterator gsi;
3149 /* Apply forward propagation to all stmts in the basic-block.
3150 Note we update GSI within the loop as necessary. */
3151 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3153 gimple stmt = gsi_stmt (gsi);
3155 enum tree_code code;
3157 if (!is_gimple_assign (stmt))
3163 lhs = gimple_assign_lhs (stmt);
3164 rhs = gimple_assign_rhs1 (stmt);
3165 code = gimple_assign_rhs_code (stmt);
3166 if (TREE_CODE (lhs) != SSA_NAME
3167 || has_zero_uses (lhs))
3173 /* If this statement sets an SSA_NAME to an address,
3174 try to propagate the address into the uses of the SSA_NAME. */
3175 if (code == ADDR_EXPR
3176 /* Handle pointer conversions on invariant addresses
3177 as well, as this is valid gimple. */
3178 || (CONVERT_EXPR_CODE_P (code)
3179 && TREE_CODE (rhs) == ADDR_EXPR
3180 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3182 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3185 || decl_address_invariant_p (base))
3186 && !stmt_references_abnormal_ssa_name (stmt)
3187 && forward_propagate_addr_expr (lhs, rhs, true))
3189 release_defs (stmt);
3190 gsi_remove (&gsi, true);
3195 else if (code == POINTER_PLUS_EXPR)
3197 tree off = gimple_assign_rhs2 (stmt);
3198 if (TREE_CODE (off) == INTEGER_CST
3199 && can_propagate_from (stmt)
3200 && !simple_iv_increment_p (stmt)
3201 /* ??? Better adjust the interface to that function
3202 instead of building new trees here. */
3203 && forward_propagate_addr_expr
3205 build1_loc (gimple_location (stmt),
3206 ADDR_EXPR, TREE_TYPE (rhs),
3207 fold_build2 (MEM_REF,
3208 TREE_TYPE (TREE_TYPE (rhs)),
3210 fold_convert (ptr_type_node,
3213 release_defs (stmt);
3214 gsi_remove (&gsi, true);
3216 else if (is_gimple_min_invariant (rhs))
3218 /* Make sure to fold &a[0] + off_1 here. */
3219 fold_stmt_inplace (&gsi);
3221 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3227 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3229 if (forward_propagate_comparison (&gsi))
3236 /* Combine stmts with the stmts defining their operands.
3237 Note we update GSI within the loop as necessary. */
3238 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3240 gimple stmt = gsi_stmt (gsi);
3241 bool changed = false;
3243 /* Mark stmt as potentially needing revisiting. */
3244 gimple_set_plf (stmt, GF_PLF_1, false);
3246 switch (gimple_code (stmt))
3250 tree rhs1 = gimple_assign_rhs1 (stmt);
3251 enum tree_code code = gimple_assign_rhs_code (stmt);
3253 if (code == COND_EXPR
3254 || code == VEC_COND_EXPR)
3256 /* In this case the entire COND_EXPR is in rhs1. */
3257 if (forward_propagate_into_cond (&gsi)
3258 || combine_cond_exprs (&gsi))
3261 stmt = gsi_stmt (gsi);
3264 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3267 did_something = forward_propagate_into_comparison (&gsi);
3268 if (did_something == 2)
3270 changed = did_something != 0;
3272 else if ((code == PLUS_EXPR
3273 || code == BIT_IOR_EXPR
3274 || code == BIT_XOR_EXPR)
3275 && simplify_rotate (&gsi))
3277 else if (code == MULT_EXPR)
3279 changed = simplify_mult (&gsi);
3281 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3282 && gimple_purge_dead_eh_edges (bb))
3285 else if (code == PLUS_EXPR
3286 || code == MINUS_EXPR)
3288 changed = associate_plusminus (&gsi);
3290 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3291 && gimple_purge_dead_eh_edges (bb))
3294 else if (code == POINTER_PLUS_EXPR)
3295 changed = associate_pointerplus (&gsi);
3296 else if (CONVERT_EXPR_CODE_P (code)
3297 || code == FLOAT_EXPR
3298 || code == FIX_TRUNC_EXPR)
3300 int did_something = combine_conversions (&gsi);
3301 if (did_something == 2)
3304 /* If we have a narrowing conversion to an integral
3305 type that is fed by a BIT_AND_EXPR, we might be
3306 able to remove the BIT_AND_EXPR if it merely
3307 masks off bits outside the final type (and nothing
3309 if (! did_something)
3311 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3312 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3313 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3314 && INTEGRAL_TYPE_P (outer_type)
3315 && INTEGRAL_TYPE_P (inner_type)
3316 && (TYPE_PRECISION (outer_type)
3317 <= TYPE_PRECISION (inner_type)))
3318 did_something = simplify_conversion_from_bitmask (&gsi);
3321 changed = did_something != 0;
3323 else if (code == VEC_PERM_EXPR)
3325 int did_something = simplify_permutation (&gsi);
3326 if (did_something == 2)
3328 changed = did_something != 0;
3330 else if (code == BIT_FIELD_REF)
3331 changed = simplify_bitfield_ref (&gsi);
3332 else if (code == CONSTRUCTOR
3333 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3334 changed = simplify_vector_constructor (&gsi);
3339 changed = simplify_gimple_switch (stmt);
3345 did_something = forward_propagate_into_gimple_cond (stmt);
3346 if (did_something == 2)
3348 changed = did_something != 0;
3354 tree callee = gimple_call_fndecl (stmt);
3355 if (callee != NULL_TREE
3356 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3357 changed = simplify_builtin_call (&gsi, callee);
3366 /* If the stmt changed then re-visit it and the statements
3367 inserted before it. */
3368 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3369 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3371 if (gsi_end_p (gsi))
3372 gsi = gsi_start_bb (bb);
3378 /* Stmt no longer needs to be revisited. */
3379 gimple_set_plf (stmt, GF_PLF_1, true);
3385 /* At the end fold all statements. */
3386 cfg_changed |= fold_all_stmts (fun);
3389 todoflags |= TODO_cleanup_cfg;
3397 make_pass_forwprop (gcc::context *ctxt)
3399 return new pass_forwprop (ctxt);