1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
198 *final_name_p = name;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
303 Returns true if cleanup-cfg has to run. */
306 remove_prop_source_from_use (tree name)
308 gimple_stmt_iterator gsi;
310 bool cfg_changed = false;
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 gsi_remove (&gsi, true);
330 cfg_changed |= gimple_purge_dead_eh_edges (bb);
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
346 rhs_to_tree (tree type, gimple stmt)
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
372 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
379 t = fold_binary_loc (loc, code, type, op0, op1);
383 /* Require that we got a boolean type out if we put one in. */
384 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
386 /* Canonicalize the combined condition for use in a COND_EXPR. */
387 t = canonicalize_cond_expr_cond (t);
389 /* Bail out if we required an invariant but didn't get one. */
390 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
396 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
397 of its operand. Return a new comparison tree or NULL_TREE if there
398 were no simplifying combines. */
401 forward_propagate_into_comparison_1 (location_t loc,
402 enum tree_code code, tree type,
405 tree tmp = NULL_TREE;
406 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
407 bool single_use0_p = false, single_use1_p = false;
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 if (TREE_CODE (op0) == SSA_NAME)
413 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
417 tmp = combine_cond_expr_cond (loc, code, type,
418 rhs0, op1, !single_use0_p);
424 /* If that wasn't successful, try the second operand. */
425 if (TREE_CODE (op1) == SSA_NAME)
427 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
428 if (def_stmt && can_propagate_from (def_stmt))
430 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
431 tmp = combine_cond_expr_cond (loc, code, type,
432 op0, rhs1, !single_use1_p);
438 /* If that wasn't successful either, try both operands. */
439 if (rhs0 != NULL_TREE
440 && rhs1 != NULL_TREE)
441 tmp = combine_cond_expr_cond (loc, code, type,
443 !(single_use0_p && single_use1_p));
448 /* Propagate from the ssa name definition statements of the assignment
449 from a comparison at *GSI into the conditional if that simplifies it.
450 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
451 otherwise returns 0. */
454 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
456 gimple stmt = gsi_stmt (*gsi);
458 bool cfg_changed = false;
459 tree rhs1 = gimple_assign_rhs1 (stmt);
460 tree rhs2 = gimple_assign_rhs2 (stmt);
462 /* Combine the comparison with defining statements. */
463 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
464 gimple_assign_rhs_code (stmt),
466 (gimple_assign_lhs (stmt)),
470 gimple_assign_set_rhs_from_tree (gsi, tmp);
471 fold_stmt_inplace (stmt);
474 if (TREE_CODE (rhs1) == SSA_NAME)
475 cfg_changed |= remove_prop_source_from_use (rhs1);
476 if (TREE_CODE (rhs2) == SSA_NAME)
477 cfg_changed |= remove_prop_source_from_use (rhs2);
478 return cfg_changed ? 2 : 1;
484 /* Propagate from the ssa name definition statements of COND_EXPR
485 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
486 Returns zero if no statement was changed, one if there were
487 changes and two if cfg_cleanup needs to run.
489 This must be kept in sync with forward_propagate_into_cond. */
492 forward_propagate_into_gimple_cond (gimple stmt)
494 location_t loc = gimple_location (stmt);
496 enum tree_code code = gimple_cond_code (stmt);
497 bool cfg_changed = false;
498 tree rhs1 = gimple_cond_lhs (stmt);
499 tree rhs2 = gimple_cond_rhs (stmt);
501 /* We can do tree combining on SSA_NAME and comparison expressions. */
502 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
505 tmp = forward_propagate_into_comparison_1 (loc, code,
510 if (dump_file && tmp)
512 fprintf (dump_file, " Replaced '");
513 print_gimple_expr (dump_file, stmt, 0, 0);
514 fprintf (dump_file, "' with '");
515 print_generic_expr (dump_file, tmp, 0);
516 fprintf (dump_file, "'\n");
519 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
522 if (TREE_CODE (rhs1) == SSA_NAME)
523 cfg_changed |= remove_prop_source_from_use (rhs1);
524 if (TREE_CODE (rhs2) == SSA_NAME)
525 cfg_changed |= remove_prop_source_from_use (rhs2);
526 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
533 /* Propagate from the ssa name definition statements of COND_EXPR
534 in the rhs of statement STMT into the conditional if that simplifies it.
535 Returns zero if no statement was changed, one if there were
536 changes and two if cfg_cleanup needs to run.
538 This must be kept in sync with forward_propagate_into_gimple_cond. */
541 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
543 gimple stmt = gsi_stmt (*gsi_p);
544 location_t loc = gimple_location (stmt);
545 tree tmp = NULL_TREE;
546 tree cond = gimple_assign_rhs1 (stmt);
548 /* We can do tree combining on SSA_NAME and comparison expressions. */
549 if (COMPARISON_CLASS_P (cond))
550 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
552 TREE_OPERAND (cond, 0),
553 TREE_OPERAND (cond, 1));
554 else if (TREE_CODE (cond) == SSA_NAME)
556 tree name = cond, rhs0;
557 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
558 if (!def_stmt || !can_propagate_from (def_stmt))
561 rhs0 = gimple_assign_rhs1 (def_stmt);
562 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
563 build_int_cst (TREE_TYPE (rhs0), 0),
569 if (dump_file && tmp)
571 fprintf (dump_file, " Replaced '");
572 print_generic_expr (dump_file, cond, 0);
573 fprintf (dump_file, "' with '");
574 print_generic_expr (dump_file, tmp, 0);
575 fprintf (dump_file, "'\n");
578 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
579 stmt = gsi_stmt (*gsi_p);
582 return is_gimple_min_invariant (tmp) ? 2 : 1;
588 /* We've just substituted an ADDR_EXPR into stmt. Update all the
589 relevant data structures to match. */
592 tidy_after_forward_propagate_addr (gimple stmt)
594 /* We may have turned a trapping insn into a non-trapping insn. */
595 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
596 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
599 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
600 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
603 /* DEF_RHS contains the address of the 0th element in an array.
604 USE_STMT uses type of DEF_RHS to compute the address of an
605 arbitrary element within the array. The (variable) byte offset
606 of the element is contained in OFFSET.
608 We walk back through the use-def chains of OFFSET to verify that
609 it is indeed computing the offset of an element within the array
610 and extract the index corresponding to the given byte offset.
612 We then try to fold the entire address expression into a form
615 If we are successful, we replace the right hand side of USE_STMT
616 with the new address computation. */
619 forward_propagate_addr_into_variable_array_index (tree offset,
621 gimple_stmt_iterator *use_stmt_gsi)
624 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
627 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
628 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
629 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
630 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
633 if (!host_integerp (tunit, 1))
636 /* Get the offset's defining statement. */
637 offset_def = SSA_NAME_DEF_STMT (offset);
639 /* Try to find an expression for a proper index. This is either a
640 multiplication expression by the element size or just the ssa name we came
641 along in case the element size is one. In that case, however, we do not
642 allow multiplications because they can be computing index to a higher
643 level dimension (PR 37861). */
644 if (integer_onep (tunit))
646 if (is_gimple_assign (offset_def)
647 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
654 /* The statement which defines OFFSET before type conversion
655 must be a simple GIMPLE_ASSIGN. */
656 if (!is_gimple_assign (offset_def))
659 /* The RHS of the statement which defines OFFSET must be a
660 multiplication of an object by the size of the array elements.
661 This implicitly verifies that the size of the array elements
663 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
667 /* The first operand to the MULT_EXPR is the desired index. */
668 index = gimple_assign_rhs1 (offset_def);
670 /* If we have idx * tunit + CST * tunit re-associate that. */
671 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
672 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
673 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
674 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
675 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
676 gimple_assign_rhs2 (offset_def),
677 tunit)) != NULL_TREE)
679 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
680 if (is_gimple_assign (offset_def2)
681 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
682 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
683 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
685 index = fold_build2 (gimple_assign_rhs_code (offset_def),
687 gimple_assign_rhs1 (offset_def2), tmp);
696 /* Replace the pointer addition with array indexing. */
697 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
698 true, GSI_SAME_STMT);
699 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
701 new_rhs = unshare_expr (def_rhs);
702 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
706 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
707 unshare_expr (TREE_OPERAND (def_rhs, 0)),
708 index, integer_zero_node, NULL_TREE);
709 new_rhs = build_fold_addr_expr (new_rhs);
710 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
711 TREE_TYPE (new_rhs)))
713 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
714 NULL_TREE, true, GSI_SAME_STMT);
715 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
719 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
720 use_stmt = gsi_stmt (*use_stmt_gsi);
722 /* That should have created gimple, so there is no need to
723 record information to undo the propagation. */
724 fold_stmt_inplace (use_stmt);
725 tidy_after_forward_propagate_addr (use_stmt);
729 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
730 ADDR_EXPR <whatever>.
732 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
733 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
734 node or for recovery of array indexing from pointer arithmetic.
736 Return true if the propagation was successful (the propagation can
737 be not totally successful, yet things may have been changed). */
740 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
741 gimple_stmt_iterator *use_stmt_gsi,
744 tree lhs, rhs, rhs2, array_ref;
745 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
746 enum tree_code rhs_code;
749 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
751 lhs = gimple_assign_lhs (use_stmt);
752 rhs_code = gimple_assign_rhs_code (use_stmt);
753 rhs = gimple_assign_rhs1 (use_stmt);
755 /* Trivial cases. The use statement could be a trivial copy or a
756 useless conversion. Recurse to the uses of the lhs as copyprop does
757 not copy through different variant pointers and FRE does not catch
758 all useless conversions. Treat the case of a single-use name and
759 a conversion to def_rhs type separate, though. */
760 if (TREE_CODE (lhs) == SSA_NAME
761 && ((rhs_code == SSA_NAME && rhs == name)
762 || CONVERT_EXPR_CODE_P (rhs_code)))
764 /* Only recurse if we don't deal with a single use or we cannot
765 do the propagation to the current statement. In particular
766 we can end up with a conversion needed for a non-invariant
767 address which we cannot do in a single statement. */
769 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
770 && (!is_gimple_min_invariant (def_rhs)
771 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
772 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
773 && (TYPE_PRECISION (TREE_TYPE (lhs))
774 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
775 return forward_propagate_addr_expr (lhs, def_rhs);
777 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
778 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
779 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
781 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
785 /* Propagate through constant pointer adjustments. */
786 if (TREE_CODE (lhs) == SSA_NAME
787 && rhs_code == POINTER_PLUS_EXPR
789 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
792 /* As we come here with non-invariant addresses in def_rhs we need
793 to make sure we can build a valid constant offsetted address
794 for further propagation. Simply rely on fold building that
795 and check after the fact. */
796 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
798 fold_convert (ptr_type_node,
799 gimple_assign_rhs2 (use_stmt)));
800 if (TREE_CODE (new_def_rhs) == MEM_REF
801 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
803 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
806 /* Recurse. If we could propagate into all uses of lhs do not
807 bother to replace into the current use but just pretend we did. */
808 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
809 && forward_propagate_addr_expr (lhs, new_def_rhs))
812 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
813 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
814 new_def_rhs, NULL_TREE);
815 else if (is_gimple_min_invariant (new_def_rhs))
816 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
817 new_def_rhs, NULL_TREE);
820 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
821 update_stmt (use_stmt);
825 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
826 ADDR_EXPR will not appear on the LHS. */
827 lhs = gimple_assign_lhs (use_stmt);
828 while (handled_component_p (lhs))
829 lhs = TREE_OPERAND (lhs, 0);
831 /* Now see if the LHS node is a MEM_REF using NAME. If so,
832 propagate the ADDR_EXPR into the use of NAME and fold the result. */
833 if (TREE_CODE (lhs) == MEM_REF
834 && TREE_OPERAND (lhs, 0) == name)
837 HOST_WIDE_INT def_rhs_offset;
838 /* If the address is invariant we can always fold it. */
839 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
842 double_int off = mem_ref_offset (lhs);
844 off = double_int_add (off,
845 shwi_to_double_int (def_rhs_offset));
846 if (TREE_CODE (def_rhs_base) == MEM_REF)
848 off = double_int_add (off, mem_ref_offset (def_rhs_base));
849 new_ptr = TREE_OPERAND (def_rhs_base, 0);
852 new_ptr = build_fold_addr_expr (def_rhs_base);
853 TREE_OPERAND (lhs, 0) = new_ptr;
854 TREE_OPERAND (lhs, 1)
855 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
856 tidy_after_forward_propagate_addr (use_stmt);
857 /* Continue propagating into the RHS if this was not the only use. */
861 /* If the LHS is a plain dereference and the value type is the same as
862 that of the pointed-to type of the address we can put the
863 dereferenced address on the LHS preserving the original alias-type. */
864 else if (gimple_assign_lhs (use_stmt) == lhs
865 && useless_type_conversion_p
866 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
867 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
869 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
870 tree new_offset, new_base, saved;
871 while (handled_component_p (*def_rhs_basep))
872 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
873 saved = *def_rhs_basep;
874 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
876 new_base = TREE_OPERAND (*def_rhs_basep, 0);
878 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
879 TREE_OPERAND (*def_rhs_basep, 1));
883 new_base = build_fold_addr_expr (*def_rhs_basep);
884 new_offset = TREE_OPERAND (lhs, 1);
886 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
887 new_base, new_offset);
888 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
889 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
890 gimple_assign_set_lhs (use_stmt,
891 unshare_expr (TREE_OPERAND (def_rhs, 0)));
892 *def_rhs_basep = saved;
893 tidy_after_forward_propagate_addr (use_stmt);
894 /* Continue propagating into the RHS if this was not the
900 /* We can have a struct assignment dereferencing our name twice.
901 Note that we didn't propagate into the lhs to not falsely
902 claim we did when propagating into the rhs. */
906 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
907 nodes from the RHS. */
908 rhs = gimple_assign_rhs1 (use_stmt);
909 if (TREE_CODE (rhs) == ADDR_EXPR)
910 rhs = TREE_OPERAND (rhs, 0);
911 while (handled_component_p (rhs))
912 rhs = TREE_OPERAND (rhs, 0);
914 /* Now see if the RHS node is a MEM_REF using NAME. If so,
915 propagate the ADDR_EXPR into the use of NAME and fold the result. */
916 if (TREE_CODE (rhs) == MEM_REF
917 && TREE_OPERAND (rhs, 0) == name)
920 HOST_WIDE_INT def_rhs_offset;
921 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
924 double_int off = mem_ref_offset (rhs);
926 off = double_int_add (off,
927 shwi_to_double_int (def_rhs_offset));
928 if (TREE_CODE (def_rhs_base) == MEM_REF)
930 off = double_int_add (off, mem_ref_offset (def_rhs_base));
931 new_ptr = TREE_OPERAND (def_rhs_base, 0);
934 new_ptr = build_fold_addr_expr (def_rhs_base);
935 TREE_OPERAND (rhs, 0) = new_ptr;
936 TREE_OPERAND (rhs, 1)
937 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
938 fold_stmt_inplace (use_stmt);
939 tidy_after_forward_propagate_addr (use_stmt);
942 /* If the RHS is a plain dereference and the value type is the same as
943 that of the pointed-to type of the address we can put the
944 dereferenced address on the RHS preserving the original alias-type. */
945 else if (gimple_assign_rhs1 (use_stmt) == rhs
946 && useless_type_conversion_p
947 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
948 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
950 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
951 tree new_offset, new_base, saved;
952 while (handled_component_p (*def_rhs_basep))
953 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
954 saved = *def_rhs_basep;
955 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
957 new_base = TREE_OPERAND (*def_rhs_basep, 0);
959 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
960 TREE_OPERAND (*def_rhs_basep, 1));
964 new_base = build_fold_addr_expr (*def_rhs_basep);
965 new_offset = TREE_OPERAND (rhs, 1);
967 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
968 new_base, new_offset);
969 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
970 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
971 gimple_assign_set_rhs1 (use_stmt,
972 unshare_expr (TREE_OPERAND (def_rhs, 0)));
973 *def_rhs_basep = saved;
974 fold_stmt_inplace (use_stmt);
975 tidy_after_forward_propagate_addr (use_stmt);
980 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
982 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
983 || gimple_assign_rhs1 (use_stmt) != name)
986 /* The remaining cases are all for turning pointer arithmetic into
987 array indexing. They only apply when we have the address of
988 element zero in an array. If that is not the case then there
990 array_ref = TREE_OPERAND (def_rhs, 0);
991 if ((TREE_CODE (array_ref) != ARRAY_REF
992 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
993 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
994 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
997 rhs2 = gimple_assign_rhs2 (use_stmt);
998 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
999 of the elements in X into &x[C1 + C2/element size]. */
1000 if (TREE_CODE (rhs2) == INTEGER_CST)
1002 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1003 TREE_TYPE (def_rhs),
1007 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1008 new_rhs = unshare_expr (new_rhs);
1009 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1011 if (!is_gimple_min_invariant (new_rhs))
1012 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1014 true, GSI_SAME_STMT);
1015 new_rhs = fold_convert (type, new_rhs);
1017 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1018 use_stmt = gsi_stmt (*use_stmt_gsi);
1019 update_stmt (use_stmt);
1020 tidy_after_forward_propagate_addr (use_stmt);
1025 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1026 converting a multiplication of an index by the size of the
1027 array elements, then the result is converted into the proper
1028 type for the arithmetic. */
1029 if (TREE_CODE (rhs2) == SSA_NAME
1030 && (TREE_CODE (array_ref) != ARRAY_REF
1031 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1032 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1033 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1034 different type than their operands. */
1035 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1036 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1041 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1043 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1044 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1045 node or for recovery of array indexing from pointer arithmetic.
1046 Returns true, if all uses have been propagated into. */
1049 forward_propagate_addr_expr (tree name, tree rhs)
1051 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1052 imm_use_iterator iter;
1055 bool single_use_p = has_single_use (name);
1057 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1062 /* If the use is not in a simple assignment statement, then
1063 there is nothing we can do. */
1064 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1066 if (!is_gimple_debug (use_stmt))
1071 /* If the use is in a deeper loop nest, then we do not want
1072 to propagate non-invariant ADDR_EXPRs into the loop as that
1073 is likely adding expression evaluations into the loop. */
1074 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1075 && !is_gimple_min_invariant (rhs))
1082 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1083 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1085 /* If the use has moved to a different statement adjust
1086 the update machinery for the old statement too. */
1087 if (use_stmt != gsi_stmt (gsi))
1089 update_stmt (use_stmt);
1090 use_stmt = gsi_stmt (gsi);
1093 update_stmt (use_stmt);
1097 /* Remove intermediate now unused copy and conversion chains. */
1098 use_rhs = gimple_assign_rhs1 (use_stmt);
1100 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1101 && TREE_CODE (use_rhs) == SSA_NAME
1102 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1104 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1105 release_defs (use_stmt);
1106 gsi_remove (&gsi, true);
1110 return all && has_zero_uses (name);
1114 /* Forward propagate the comparison defined in STMT like
1115 cond_1 = x CMP y to uses of the form
1119 Returns true if stmt is now unused. */
1122 forward_propagate_comparison (gimple stmt)
1124 tree name = gimple_assign_lhs (stmt);
1126 tree tmp = NULL_TREE;
1127 gimple_stmt_iterator gsi;
1128 enum tree_code code;
1131 /* Don't propagate ssa names that occur in abnormal phis. */
1132 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1133 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1134 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1135 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1138 /* Do not un-cse comparisons. But propagate through copies. */
1139 use_stmt = get_prop_dest_stmt (name, &name);
1141 || !is_gimple_assign (use_stmt))
1144 code = gimple_assign_rhs_code (use_stmt);
1145 lhs = gimple_assign_lhs (use_stmt);
1146 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1149 /* We can propagate the condition into a statement that
1150 computes the logical negation of the comparison result. */
1151 if ((code == BIT_NOT_EXPR
1152 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1153 || (code == BIT_XOR_EXPR
1154 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1156 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1157 bool nans = HONOR_NANS (TYPE_MODE (type));
1158 enum tree_code inv_code;
1159 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1160 if (inv_code == ERROR_MARK)
1163 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1164 gimple_assign_rhs2 (stmt));
1169 gsi = gsi_for_stmt (use_stmt);
1170 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1171 use_stmt = gsi_stmt (gsi);
1172 update_stmt (use_stmt);
1174 if (dump_file && (dump_flags & TDF_DETAILS))
1176 fprintf (dump_file, " Replaced '");
1177 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1178 fprintf (dump_file, "' with '");
1179 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1180 fprintf (dump_file, "'\n");
1183 /* Remove defining statements. */
1184 return remove_prop_source_from_use (name);
1188 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1189 If so, we can change STMT into lhs = y which can later be copy
1190 propagated. Similarly for negation.
1192 This could trivially be formulated as a forward propagation
1193 to immediate uses. However, we already had an implementation
1194 from DOM which used backward propagation via the use-def links.
1196 It turns out that backward propagation is actually faster as
1197 there's less work to do for each NOT/NEG expression we find.
1198 Backwards propagation needs to look at the statement in a single
1199 backlink. Forward propagation needs to look at potentially more
1200 than one forward link.
1202 Returns true when the statement was changed. */
1205 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1207 gimple stmt = gsi_stmt (*gsi_p);
1208 tree rhs = gimple_assign_rhs1 (stmt);
1209 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1211 /* See if the RHS_DEF_STMT has the same form as our statement. */
1212 if (is_gimple_assign (rhs_def_stmt)
1213 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1215 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1217 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1218 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1219 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1221 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1222 stmt = gsi_stmt (*gsi_p);
1231 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1232 the condition which we may be able to optimize better. */
1235 simplify_gimple_switch (gimple stmt)
1237 tree cond = gimple_switch_index (stmt);
1241 /* The optimization that we really care about is removing unnecessary
1242 casts. That will let us do much better in propagating the inferred
1243 constant at the switch target. */
1244 if (TREE_CODE (cond) == SSA_NAME)
1246 def_stmt = SSA_NAME_DEF_STMT (cond);
1247 if (is_gimple_assign (def_stmt))
1249 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1254 def = gimple_assign_rhs1 (def_stmt);
1256 /* ??? Why was Jeff testing this? We are gimple... */
1257 gcc_checking_assert (is_gimple_val (def));
1259 to = TREE_TYPE (cond);
1260 ti = TREE_TYPE (def);
1262 /* If we have an extension that preserves value, then we
1263 can copy the source value into the switch. */
1265 need_precision = TYPE_PRECISION (ti);
1267 if (! INTEGRAL_TYPE_P (ti))
1269 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1271 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1272 need_precision += 1;
1273 if (TYPE_PRECISION (to) < need_precision)
1278 gimple_switch_set_index (stmt, def);
1289 /* For pointers p2 and p1 return p2 - p1 if the
1290 difference is known and constant, otherwise return NULL. */
1293 constant_pointer_difference (tree p1, tree p2)
1296 #define CPD_ITERATIONS 5
1297 tree exps[2][CPD_ITERATIONS];
1298 tree offs[2][CPD_ITERATIONS];
1301 for (i = 0; i < 2; i++)
1303 tree p = i ? p1 : p2;
1304 tree off = size_zero_node;
1306 enum tree_code code;
1308 /* For each of p1 and p2 we need to iterate at least
1309 twice, to handle ADDR_EXPR directly in p1/p2,
1310 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1311 on definition's stmt RHS. Iterate a few extra times. */
1315 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1317 if (TREE_CODE (p) == ADDR_EXPR)
1319 tree q = TREE_OPERAND (p, 0);
1320 HOST_WIDE_INT offset;
1321 tree base = get_addr_base_and_unit_offset (q, &offset);
1326 off = size_binop (PLUS_EXPR, off, size_int (offset));
1328 if (TREE_CODE (q) == MEM_REF
1329 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1331 p = TREE_OPERAND (q, 0);
1332 off = size_binop (PLUS_EXPR, off,
1333 double_int_to_tree (sizetype,
1334 mem_ref_offset (q)));
1343 if (TREE_CODE (p) != SSA_NAME)
1347 if (j == CPD_ITERATIONS)
1349 stmt = SSA_NAME_DEF_STMT (p);
1350 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1352 code = gimple_assign_rhs_code (stmt);
1353 if (code == POINTER_PLUS_EXPR)
1355 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1357 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1358 p = gimple_assign_rhs1 (stmt);
1360 else if (code == ADDR_EXPR || code == NOP_EXPR)
1361 p = gimple_assign_rhs1 (stmt);
1369 for (i = 0; i < cnt[0]; i++)
1370 for (j = 0; j < cnt[1]; j++)
1371 if (exps[0][i] == exps[1][j])
1372 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1377 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1379 memcpy (p, "abcd", 4);
1380 memset (p + 4, ' ', 3);
1382 memcpy (p, "abcd ", 7);
1383 call if the latter can be stored by pieces during expansion. */
1386 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1388 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1389 tree vuse = gimple_vuse (stmt2);
1392 stmt1 = SSA_NAME_DEF_STMT (vuse);
1394 switch (DECL_FUNCTION_CODE (callee2))
1396 case BUILT_IN_MEMSET:
1397 if (gimple_call_num_args (stmt2) != 3
1398 || gimple_call_lhs (stmt2)
1400 || BITS_PER_UNIT != 8)
1405 tree ptr1, src1, str1, off1, len1, lhs1;
1406 tree ptr2 = gimple_call_arg (stmt2, 0);
1407 tree val2 = gimple_call_arg (stmt2, 1);
1408 tree len2 = gimple_call_arg (stmt2, 2);
1409 tree diff, vdef, new_str_cst;
1411 unsigned int ptr1_align;
1412 unsigned HOST_WIDE_INT src_len;
1414 use_operand_p use_p;
1416 if (!host_integerp (val2, 0)
1417 || !host_integerp (len2, 1))
1419 if (is_gimple_call (stmt1))
1421 /* If first stmt is a call, it needs to be memcpy
1422 or mempcpy, with string literal as second argument and
1424 callee1 = gimple_call_fndecl (stmt1);
1425 if (callee1 == NULL_TREE
1426 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1427 || gimple_call_num_args (stmt1) != 3)
1429 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1430 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1432 ptr1 = gimple_call_arg (stmt1, 0);
1433 src1 = gimple_call_arg (stmt1, 1);
1434 len1 = gimple_call_arg (stmt1, 2);
1435 lhs1 = gimple_call_lhs (stmt1);
1436 if (!host_integerp (len1, 1))
1438 str1 = string_constant (src1, &off1);
1439 if (str1 == NULL_TREE)
1441 if (!host_integerp (off1, 1)
1442 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1443 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1444 - tree_low_cst (off1, 1)) > 0
1445 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1446 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1447 != TYPE_MODE (char_type_node))
1450 else if (gimple_assign_single_p (stmt1))
1452 /* Otherwise look for length 1 memcpy optimized into
1454 ptr1 = gimple_assign_lhs (stmt1);
1455 src1 = gimple_assign_rhs1 (stmt1);
1456 if (TREE_CODE (ptr1) != MEM_REF
1457 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1458 || !host_integerp (src1, 0))
1460 ptr1 = build_fold_addr_expr (ptr1);
1461 callee1 = NULL_TREE;
1462 len1 = size_one_node;
1464 off1 = size_zero_node;
1470 diff = constant_pointer_difference (ptr1, ptr2);
1471 if (diff == NULL && lhs1 != NULL)
1473 diff = constant_pointer_difference (lhs1, ptr2);
1474 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1476 diff = size_binop (PLUS_EXPR, diff,
1477 fold_convert (sizetype, len1));
1479 /* If the difference between the second and first destination pointer
1480 is not constant, or is bigger than memcpy length, bail out. */
1482 || !host_integerp (diff, 1)
1483 || tree_int_cst_lt (len1, diff))
1486 /* Use maximum of difference plus memset length and memcpy length
1487 as the new memcpy length, if it is too big, bail out. */
1488 src_len = tree_low_cst (diff, 1);
1489 src_len += tree_low_cst (len2, 1);
1490 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1491 src_len = tree_low_cst (len1, 1);
1495 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1496 with bigger length will return different result. */
1497 if (lhs1 != NULL_TREE
1498 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1499 && (TREE_CODE (lhs1) != SSA_NAME
1500 || !single_imm_use (lhs1, &use_p, &use_stmt)
1501 || use_stmt != stmt2))
1504 /* If anything reads memory in between memcpy and memset
1505 call, the modified memcpy call might change it. */
1506 vdef = gimple_vdef (stmt1);
1508 && (!single_imm_use (vdef, &use_p, &use_stmt)
1509 || use_stmt != stmt2))
1512 ptr1_align = get_pointer_alignment (ptr1);
1513 /* Construct the new source string literal. */
1514 src_buf = XALLOCAVEC (char, src_len + 1);
1517 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1518 tree_low_cst (len1, 1));
1520 src_buf[0] = tree_low_cst (src1, 0);
1521 memset (src_buf + tree_low_cst (diff, 1),
1522 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1523 src_buf[src_len] = '\0';
1524 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1525 handle embedded '\0's. */
1526 if (strlen (src_buf) != src_len)
1528 rtl_profile_for_bb (gimple_bb (stmt2));
1529 /* If the new memcpy wouldn't be emitted by storing the literal
1530 by pieces, this optimization might enlarge .rodata too much,
1531 as commonly used string literals couldn't be shared any
1533 if (!can_store_by_pieces (src_len,
1534 builtin_strncpy_read_str,
1535 src_buf, ptr1_align, false))
1538 new_str_cst = build_string_literal (src_len, src_buf);
1541 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1543 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1544 gimple_call_set_lhs (stmt1, NULL_TREE);
1545 gimple_call_set_arg (stmt1, 1, new_str_cst);
1546 gimple_call_set_arg (stmt1, 2,
1547 build_int_cst (TREE_TYPE (len1), src_len));
1548 update_stmt (stmt1);
1549 unlink_stmt_vdef (stmt2);
1550 gsi_remove (gsi_p, true);
1551 release_defs (stmt2);
1552 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1553 release_ssa_name (lhs1);
1558 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1559 assignment, remove STMT1 and change memset call into
1561 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1563 if (!is_gimple_val (ptr1))
1564 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1565 true, GSI_SAME_STMT);
1566 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1567 gimple_call_set_arg (stmt2, 0, ptr1);
1568 gimple_call_set_arg (stmt2, 1, new_str_cst);
1569 gimple_call_set_arg (stmt2, 2,
1570 build_int_cst (TREE_TYPE (len2), src_len));
1571 unlink_stmt_vdef (stmt1);
1572 gsi_remove (&gsi, true);
1573 release_defs (stmt1);
1574 update_stmt (stmt2);
1585 /* Checks if expression has type of one-bit precision, or is a known
1586 truth-valued expression. */
1588 truth_valued_ssa_name (tree name)
1591 tree type = TREE_TYPE (name);
1593 if (!INTEGRAL_TYPE_P (type))
1595 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1596 necessarily one and so ~X is not equal to !X. */
1597 if (TYPE_PRECISION (type) == 1)
1599 def = SSA_NAME_DEF_STMT (name);
1600 if (is_gimple_assign (def))
1601 return truth_value_p (gimple_assign_rhs_code (def));
1605 /* Helper routine for simplify_bitwise_binary_1 function.
1606 Return for the SSA name NAME the expression X if it mets condition
1607 NAME = !X. Otherwise return NULL_TREE.
1608 Detected patterns for NAME = !X are:
1609 !X and X == 0 for X with integral type.
1610 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1612 lookup_logical_inverted_value (tree name)
1615 enum tree_code code;
1618 /* If name has none-intergal type, or isn't a SSA_NAME, then
1620 if (TREE_CODE (name) != SSA_NAME
1621 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1623 def = SSA_NAME_DEF_STMT (name);
1624 if (!is_gimple_assign (def))
1627 code = gimple_assign_rhs_code (def);
1628 op1 = gimple_assign_rhs1 (def);
1631 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1632 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1633 if (code == EQ_EXPR || code == NE_EXPR
1634 || code == BIT_XOR_EXPR)
1635 op2 = gimple_assign_rhs2 (def);
1640 if (truth_valued_ssa_name (name))
1644 /* Check if we have X == 0 and X has an integral type. */
1645 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1647 if (integer_zerop (op2))
1651 /* Check if we have X != 1 and X is a truth-valued. */
1652 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1654 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1658 /* Check if we have X ^ 1 and X is truth valued. */
1659 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1669 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1670 operations CODE, if one operand has the logically inverted
1671 value of the other. */
1673 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1674 tree arg1, tree arg2)
1678 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1679 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1680 && code != BIT_XOR_EXPR)
1683 /* First check if operands ARG1 and ARG2 are equal. If so
1684 return NULL_TREE as this optimization is handled fold_stmt. */
1687 /* See if we have in arguments logical-not patterns. */
1688 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1690 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1695 if (code == BIT_AND_EXPR)
1696 return fold_convert (type, integer_zero_node);
1697 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1698 if (truth_valued_ssa_name (anot))
1699 return fold_convert (type, integer_one_node);
1701 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1705 /* Simplify bitwise binary operations.
1706 Return true if a transformation applied, otherwise return false. */
1709 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1711 gimple stmt = gsi_stmt (*gsi);
1712 tree arg1 = gimple_assign_rhs1 (stmt);
1713 tree arg2 = gimple_assign_rhs2 (stmt);
1714 enum tree_code code = gimple_assign_rhs_code (stmt);
1716 gimple def1 = NULL, def2 = NULL;
1717 tree def1_arg1, def2_arg1;
1718 enum tree_code def1_code, def2_code;
1720 def1_code = TREE_CODE (arg1);
1722 if (TREE_CODE (arg1) == SSA_NAME)
1724 def1 = SSA_NAME_DEF_STMT (arg1);
1725 if (is_gimple_assign (def1))
1727 def1_code = gimple_assign_rhs_code (def1);
1728 def1_arg1 = gimple_assign_rhs1 (def1);
1732 def2_code = TREE_CODE (arg2);
1734 if (TREE_CODE (arg2) == SSA_NAME)
1736 def2 = SSA_NAME_DEF_STMT (arg2);
1737 if (is_gimple_assign (def2))
1739 def2_code = gimple_assign_rhs_code (def2);
1740 def2_arg1 = gimple_assign_rhs1 (def2);
1744 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1745 if (TREE_CODE (arg2) == INTEGER_CST
1746 && CONVERT_EXPR_CODE_P (def1_code)
1747 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1748 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1751 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1753 gimple_build_assign_with_ops (code, tem, def1_arg1,
1754 fold_convert_loc (gimple_location (stmt),
1755 TREE_TYPE (def1_arg1),
1757 tem = make_ssa_name (tem, newop);
1758 gimple_assign_set_lhs (newop, tem);
1759 gimple_set_location (newop, gimple_location (stmt));
1760 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1761 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1762 tem, NULL_TREE, NULL_TREE);
1763 update_stmt (gsi_stmt (*gsi));
1767 /* For bitwise binary operations apply operand conversions to the
1768 binary operation result instead of to the operands. This allows
1769 to combine successive conversions and bitwise binary operations. */
1770 if (CONVERT_EXPR_CODE_P (def1_code)
1771 && CONVERT_EXPR_CODE_P (def2_code)
1772 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1773 /* Make sure that the conversion widens the operands, or has same
1774 precision, or that it changes the operation to a bitfield
1776 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1777 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1778 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1780 || (TYPE_PRECISION (TREE_TYPE (arg1))
1781 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1784 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1786 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1787 tem = make_ssa_name (tem, newop);
1788 gimple_assign_set_lhs (newop, tem);
1789 gimple_set_location (newop, gimple_location (stmt));
1790 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1791 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1792 tem, NULL_TREE, NULL_TREE);
1793 update_stmt (gsi_stmt (*gsi));
1797 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1798 if (code == BIT_AND_EXPR
1799 && def1_code == BIT_IOR_EXPR
1800 && TREE_CODE (arg2) == INTEGER_CST
1801 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1803 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1804 arg2, gimple_assign_rhs2 (def1));
1807 if (integer_zerop (cst))
1809 gimple_assign_set_rhs1 (stmt, def1_arg1);
1813 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1814 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1815 tem, def1_arg1, arg2);
1816 tem = make_ssa_name (tem, newop);
1817 gimple_assign_set_lhs (newop, tem);
1818 gimple_set_location (newop, gimple_location (stmt));
1819 /* Make sure to re-process the new stmt as it's walking upwards. */
1820 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1821 gimple_assign_set_rhs1 (stmt, tem);
1822 gimple_assign_set_rhs2 (stmt, cst);
1823 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1828 /* Combine successive equal operations with constants. */
1829 if ((code == BIT_AND_EXPR
1830 || code == BIT_IOR_EXPR
1831 || code == BIT_XOR_EXPR)
1832 && def1_code == code
1833 && TREE_CODE (arg2) == INTEGER_CST
1834 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1836 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1837 arg2, gimple_assign_rhs2 (def1));
1838 gimple_assign_set_rhs1 (stmt, def1_arg1);
1839 gimple_assign_set_rhs2 (stmt, cst);
1844 /* Canonicalize X ^ ~0 to ~X. */
1845 if (code == BIT_XOR_EXPR
1846 && TREE_CODE (arg2) == INTEGER_CST
1847 && integer_all_onesp (arg2))
1849 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1850 gcc_assert (gsi_stmt (*gsi) == stmt);
1855 /* Try simple folding for X op !X, and X op X. */
1856 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1857 if (res != NULL_TREE)
1859 gimple_assign_set_rhs_from_tree (gsi, res);
1860 update_stmt (gsi_stmt (*gsi));
1868 /* Perform re-associations of the plus or minus statement STMT that are
1869 always permitted. Returns true if the CFG was changed. */
1872 associate_plusminus (gimple stmt)
1874 tree rhs1 = gimple_assign_rhs1 (stmt);
1875 tree rhs2 = gimple_assign_rhs2 (stmt);
1876 enum tree_code code = gimple_assign_rhs_code (stmt);
1877 gimple_stmt_iterator gsi;
1880 /* We can't reassociate at all for saturating types. */
1881 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1884 /* First contract negates. */
1889 /* A +- (-B) -> A -+ B. */
1890 if (TREE_CODE (rhs2) == SSA_NAME)
1892 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1893 if (is_gimple_assign (def_stmt)
1894 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1895 && can_propagate_from (def_stmt))
1897 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1898 gimple_assign_set_rhs_code (stmt, code);
1899 rhs2 = gimple_assign_rhs1 (def_stmt);
1900 gimple_assign_set_rhs2 (stmt, rhs2);
1901 gimple_set_modified (stmt, true);
1906 /* (-A) + B -> B - A. */
1907 if (TREE_CODE (rhs1) == SSA_NAME
1908 && code == PLUS_EXPR)
1910 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1911 if (is_gimple_assign (def_stmt)
1912 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1913 && can_propagate_from (def_stmt))
1916 gimple_assign_set_rhs_code (stmt, code);
1918 gimple_assign_set_rhs1 (stmt, rhs1);
1919 rhs2 = gimple_assign_rhs1 (def_stmt);
1920 gimple_assign_set_rhs2 (stmt, rhs2);
1921 gimple_set_modified (stmt, true);
1928 /* We can't reassociate floating-point or fixed-point plus or minus
1929 because of saturation to +-Inf. */
1930 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1931 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1934 /* Second match patterns that allow contracting a plus-minus pair
1935 irrespective of overflow issues.
1937 (A +- B) - A -> +- B
1939 (CST +- A) +- CST -> CST +- A
1940 (A + CST) +- CST -> A + CST
1943 A - (A +- B) -> -+ B
1944 A +- (B +- A) -> +- B
1945 CST +- (CST +- A) -> CST +- A
1946 CST +- (A +- CST) -> CST +- A
1949 via commutating the addition and contracting operations to zero
1950 by reassociation. */
1952 gsi = gsi_for_stmt (stmt);
1953 if (TREE_CODE (rhs1) == SSA_NAME)
1955 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1956 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1958 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1959 if (def_code == PLUS_EXPR
1960 || def_code == MINUS_EXPR)
1962 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1963 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1964 if (operand_equal_p (def_rhs1, rhs2, 0)
1965 && code == MINUS_EXPR)
1967 /* (A +- B) - A -> +- B. */
1968 code = ((def_code == PLUS_EXPR)
1969 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1972 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1973 gcc_assert (gsi_stmt (gsi) == stmt);
1974 gimple_set_modified (stmt, true);
1976 else if (operand_equal_p (def_rhs2, rhs2, 0)
1977 && code != def_code)
1979 /* (A +- B) -+ B -> A. */
1980 code = TREE_CODE (def_rhs1);
1983 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1984 gcc_assert (gsi_stmt (gsi) == stmt);
1985 gimple_set_modified (stmt, true);
1987 else if (TREE_CODE (rhs2) == INTEGER_CST
1988 && TREE_CODE (def_rhs1) == INTEGER_CST)
1990 /* (CST +- A) +- CST -> CST +- A. */
1991 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1993 if (cst && !TREE_OVERFLOW (cst))
1996 gimple_assign_set_rhs_code (stmt, code);
1998 gimple_assign_set_rhs1 (stmt, rhs1);
2000 gimple_assign_set_rhs2 (stmt, rhs2);
2001 gimple_set_modified (stmt, true);
2004 else if (TREE_CODE (rhs2) == INTEGER_CST
2005 && TREE_CODE (def_rhs2) == INTEGER_CST
2006 && def_code == PLUS_EXPR)
2008 /* (A + CST) +- CST -> A + CST. */
2009 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2011 if (cst && !TREE_OVERFLOW (cst))
2014 gimple_assign_set_rhs_code (stmt, code);
2016 gimple_assign_set_rhs1 (stmt, rhs1);
2018 gimple_assign_set_rhs2 (stmt, rhs2);
2019 gimple_set_modified (stmt, true);
2023 else if (def_code == BIT_NOT_EXPR
2024 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2026 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2027 if (code == PLUS_EXPR
2028 && operand_equal_p (def_rhs1, rhs2, 0))
2032 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2034 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2035 gcc_assert (gsi_stmt (gsi) == stmt);
2036 gimple_set_modified (stmt, true);
2038 else if (code == PLUS_EXPR
2039 && integer_onep (rhs1))
2045 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2046 gcc_assert (gsi_stmt (gsi) == stmt);
2047 gimple_set_modified (stmt, true);
2053 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2055 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2056 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2058 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2059 if (def_code == PLUS_EXPR
2060 || def_code == MINUS_EXPR)
2062 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2063 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2064 if (operand_equal_p (def_rhs1, rhs1, 0)
2065 && code == MINUS_EXPR)
2067 /* A - (A +- B) -> -+ B. */
2068 code = ((def_code == PLUS_EXPR)
2069 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2072 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2073 gcc_assert (gsi_stmt (gsi) == stmt);
2074 gimple_set_modified (stmt, true);
2076 else if (operand_equal_p (def_rhs2, rhs1, 0)
2077 && code != def_code)
2079 /* A +- (B +- A) -> +- B. */
2080 code = ((code == PLUS_EXPR)
2081 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2084 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2085 gcc_assert (gsi_stmt (gsi) == stmt);
2086 gimple_set_modified (stmt, true);
2088 else if (TREE_CODE (rhs1) == INTEGER_CST
2089 && TREE_CODE (def_rhs1) == INTEGER_CST)
2091 /* CST +- (CST +- A) -> CST +- A. */
2092 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2094 if (cst && !TREE_OVERFLOW (cst))
2096 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2097 gimple_assign_set_rhs_code (stmt, code);
2099 gimple_assign_set_rhs1 (stmt, rhs1);
2101 gimple_assign_set_rhs2 (stmt, rhs2);
2102 gimple_set_modified (stmt, true);
2105 else if (TREE_CODE (rhs1) == INTEGER_CST
2106 && TREE_CODE (def_rhs2) == INTEGER_CST)
2108 /* CST +- (A +- CST) -> CST +- A. */
2109 tree cst = fold_binary (def_code == code
2110 ? PLUS_EXPR : MINUS_EXPR,
2113 if (cst && !TREE_OVERFLOW (cst))
2116 gimple_assign_set_rhs1 (stmt, rhs1);
2118 gimple_assign_set_rhs2 (stmt, rhs2);
2119 gimple_set_modified (stmt, true);
2123 else if (def_code == BIT_NOT_EXPR
2124 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2126 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2127 if (code == PLUS_EXPR
2128 && operand_equal_p (def_rhs1, rhs1, 0))
2132 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2134 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2135 gcc_assert (gsi_stmt (gsi) == stmt);
2136 gimple_set_modified (stmt, true);
2143 if (gimple_modified_p (stmt))
2145 fold_stmt_inplace (stmt);
2147 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2148 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2155 /* Combine two conversions in a row for the second conversion at *GSI.
2156 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2157 run. Else it returns 0. */
2160 combine_conversions (gimple_stmt_iterator *gsi)
2162 gimple stmt = gsi_stmt (*gsi);
2165 enum tree_code code = gimple_assign_rhs_code (stmt);
2167 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2168 || code == FLOAT_EXPR
2169 || code == FIX_TRUNC_EXPR);
2171 lhs = gimple_assign_lhs (stmt);
2172 op0 = gimple_assign_rhs1 (stmt);
2173 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2175 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2179 if (TREE_CODE (op0) != SSA_NAME)
2182 def_stmt = SSA_NAME_DEF_STMT (op0);
2183 if (!is_gimple_assign (def_stmt))
2186 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2188 tree defop0 = gimple_assign_rhs1 (def_stmt);
2189 tree type = TREE_TYPE (lhs);
2190 tree inside_type = TREE_TYPE (defop0);
2191 tree inter_type = TREE_TYPE (op0);
2192 int inside_int = INTEGRAL_TYPE_P (inside_type);
2193 int inside_ptr = POINTER_TYPE_P (inside_type);
2194 int inside_float = FLOAT_TYPE_P (inside_type);
2195 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2196 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2197 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2198 int inter_int = INTEGRAL_TYPE_P (inter_type);
2199 int inter_ptr = POINTER_TYPE_P (inter_type);
2200 int inter_float = FLOAT_TYPE_P (inter_type);
2201 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2202 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2203 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2204 int final_int = INTEGRAL_TYPE_P (type);
2205 int final_ptr = POINTER_TYPE_P (type);
2206 int final_float = FLOAT_TYPE_P (type);
2207 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2208 unsigned int final_prec = TYPE_PRECISION (type);
2209 int final_unsignedp = TYPE_UNSIGNED (type);
2211 /* In addition to the cases of two conversions in a row
2212 handled below, if we are converting something to its own
2213 type via an object of identical or wider precision, neither
2214 conversion is needed. */
2215 if (useless_type_conversion_p (type, inside_type)
2216 && (((inter_int || inter_ptr) && final_int)
2217 || (inter_float && final_float))
2218 && inter_prec >= final_prec)
2220 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2221 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2223 return remove_prop_source_from_use (op0) ? 2 : 1;
2226 /* Likewise, if the intermediate and initial types are either both
2227 float or both integer, we don't need the middle conversion if the
2228 former is wider than the latter and doesn't change the signedness
2229 (for integers). Avoid this if the final type is a pointer since
2230 then we sometimes need the middle conversion. Likewise if the
2231 final type has a precision not equal to the size of its mode. */
2232 if (((inter_int && inside_int)
2233 || (inter_float && inside_float)
2234 || (inter_vec && inside_vec))
2235 && inter_prec >= inside_prec
2236 && (inter_float || inter_vec
2237 || inter_unsignedp == inside_unsignedp)
2238 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2239 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2241 && (! final_vec || inter_prec == inside_prec))
2243 gimple_assign_set_rhs1 (stmt, defop0);
2245 return remove_prop_source_from_use (op0) ? 2 : 1;
2248 /* If we have a sign-extension of a zero-extended value, we can
2249 replace that by a single zero-extension. */
2250 if (inside_int && inter_int && final_int
2251 && inside_prec < inter_prec && inter_prec < final_prec
2252 && inside_unsignedp && !inter_unsignedp)
2254 gimple_assign_set_rhs1 (stmt, defop0);
2256 return remove_prop_source_from_use (op0) ? 2 : 1;
2259 /* Two conversions in a row are not needed unless:
2260 - some conversion is floating-point (overstrict for now), or
2261 - some conversion is a vector (overstrict for now), or
2262 - the intermediate type is narrower than both initial and
2264 - the intermediate type and innermost type differ in signedness,
2265 and the outermost type is wider than the intermediate, or
2266 - the initial type is a pointer type and the precisions of the
2267 intermediate and final types differ, or
2268 - the final type is a pointer type and the precisions of the
2269 initial and intermediate types differ. */
2270 if (! inside_float && ! inter_float && ! final_float
2271 && ! inside_vec && ! inter_vec && ! final_vec
2272 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2273 && ! (inside_int && inter_int
2274 && inter_unsignedp != inside_unsignedp
2275 && inter_prec < final_prec)
2276 && ((inter_unsignedp && inter_prec > inside_prec)
2277 == (final_unsignedp && final_prec > inter_prec))
2278 && ! (inside_ptr && inter_prec != final_prec)
2279 && ! (final_ptr && inside_prec != inter_prec)
2280 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2281 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2283 gimple_assign_set_rhs1 (stmt, defop0);
2285 return remove_prop_source_from_use (op0) ? 2 : 1;
2288 /* A truncation to an unsigned type should be canonicalized as
2289 bitwise and of a mask. */
2290 if (final_int && inter_int && inside_int
2291 && final_prec == inside_prec
2292 && final_prec > inter_prec
2296 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2299 (inside_type, double_int_mask (inter_prec)));
2300 if (!useless_type_conversion_p (type, inside_type))
2302 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2304 gimple_assign_set_rhs1 (stmt, tem);
2307 gimple_assign_set_rhs_from_tree (gsi, tem);
2308 update_stmt (gsi_stmt (*gsi));
2316 /* Main entry point for the forward propagation and statement combine
2320 ssa_forward_propagate_and_combine (void)
2323 unsigned int todoflags = 0;
2325 cfg_changed = false;
2329 gimple_stmt_iterator gsi, prev;
2330 bool prev_initialized;
2332 /* Apply forward propagation to all stmts in the basic-block.
2333 Note we update GSI within the loop as necessary. */
2334 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2336 gimple stmt = gsi_stmt (gsi);
2338 enum tree_code code;
2340 if (!is_gimple_assign (stmt))
2346 lhs = gimple_assign_lhs (stmt);
2347 rhs = gimple_assign_rhs1 (stmt);
2348 code = gimple_assign_rhs_code (stmt);
2349 if (TREE_CODE (lhs) != SSA_NAME
2350 || has_zero_uses (lhs))
2356 /* If this statement sets an SSA_NAME to an address,
2357 try to propagate the address into the uses of the SSA_NAME. */
2358 if (code == ADDR_EXPR
2359 /* Handle pointer conversions on invariant addresses
2360 as well, as this is valid gimple. */
2361 || (CONVERT_EXPR_CODE_P (code)
2362 && TREE_CODE (rhs) == ADDR_EXPR
2363 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2365 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2368 || decl_address_invariant_p (base))
2369 && !stmt_references_abnormal_ssa_name (stmt)
2370 && forward_propagate_addr_expr (lhs, rhs))
2372 release_defs (stmt);
2373 todoflags |= TODO_remove_unused_locals;
2374 gsi_remove (&gsi, true);
2379 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
2381 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2382 /* ??? Better adjust the interface to that function
2383 instead of building new trees here. */
2384 && forward_propagate_addr_expr
2388 fold_build2 (MEM_REF,
2389 TREE_TYPE (TREE_TYPE (rhs)),
2393 gimple_assign_rhs2 (stmt))))))
2395 release_defs (stmt);
2396 todoflags |= TODO_remove_unused_locals;
2397 gsi_remove (&gsi, true);
2399 else if (is_gimple_min_invariant (rhs))
2401 /* Make sure to fold &a[0] + off_1 here. */
2402 fold_stmt_inplace (stmt);
2404 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2410 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2412 if (forward_propagate_comparison (stmt))
2420 /* Combine stmts with the stmts defining their operands.
2421 Note we update GSI within the loop as necessary. */
2422 prev_initialized = false;
2423 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2425 gimple stmt = gsi_stmt (gsi);
2426 bool changed = false;
2428 switch (gimple_code (stmt))
2432 tree rhs1 = gimple_assign_rhs1 (stmt);
2433 enum tree_code code = gimple_assign_rhs_code (stmt);
2435 if ((code == BIT_NOT_EXPR
2436 || code == NEGATE_EXPR)
2437 && TREE_CODE (rhs1) == SSA_NAME)
2438 changed = simplify_not_neg_expr (&gsi);
2439 else if (code == COND_EXPR)
2441 /* In this case the entire COND_EXPR is in rhs1. */
2443 fold_defer_overflow_warnings ();
2444 did_something = forward_propagate_into_cond (&gsi);
2445 stmt = gsi_stmt (gsi);
2446 if (did_something == 2)
2448 fold_undefer_overflow_warnings
2449 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2450 WARN_STRICT_OVERFLOW_CONDITIONAL);
2451 changed = did_something != 0;
2453 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2455 bool no_warning = gimple_no_warning_p (stmt);
2457 fold_defer_overflow_warnings ();
2458 did_something = forward_propagate_into_comparison (&gsi);
2459 if (did_something == 2)
2461 fold_undefer_overflow_warnings
2462 (!no_warning && changed,
2463 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2464 changed = did_something != 0;
2466 else if (code == BIT_AND_EXPR
2467 || code == BIT_IOR_EXPR
2468 || code == BIT_XOR_EXPR)
2469 changed = simplify_bitwise_binary (&gsi);
2470 else if (code == PLUS_EXPR
2471 || code == MINUS_EXPR)
2472 changed = associate_plusminus (stmt);
2473 else if (CONVERT_EXPR_CODE_P (code)
2474 || code == FLOAT_EXPR
2475 || code == FIX_TRUNC_EXPR)
2477 int did_something = combine_conversions (&gsi);
2478 if (did_something == 2)
2480 changed = did_something != 0;
2486 changed = simplify_gimple_switch (stmt);
2492 fold_defer_overflow_warnings ();
2493 did_something = forward_propagate_into_gimple_cond (stmt);
2494 if (did_something == 2)
2496 fold_undefer_overflow_warnings
2497 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2498 changed = did_something != 0;
2504 tree callee = gimple_call_fndecl (stmt);
2505 if (callee != NULL_TREE
2506 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2507 changed = simplify_builtin_call (&gsi, callee);
2516 /* If the stmt changed then re-visit it and the statements
2517 inserted before it. */
2518 if (!prev_initialized)
2519 gsi = gsi_start_bb (bb);
2529 prev_initialized = true;
2536 todoflags |= TODO_cleanup_cfg;
2543 gate_forwprop (void)
2545 return flag_tree_forwprop;
2548 struct gimple_opt_pass pass_forwprop =
2552 "forwprop", /* name */
2553 gate_forwprop, /* gate */
2554 ssa_forward_propagate_and_combine, /* execute */
2557 0, /* static_pass_number */
2558 TV_TREE_FORWPROP, /* tv_id */
2559 PROP_cfg | PROP_ssa, /* properties_required */
2560 0, /* properties_provided */
2561 0, /* properties_destroyed */
2562 0, /* todo_flags_start */
2565 | TODO_verify_ssa /* todo_flags_finish */