1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 STRIP_USELESS_TYPE_CONVERSION (cval);
143 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
146 tree ptr = TREE_OPERAND (cval, 0);
147 if (is_gimple_min_invariant (ptr))
148 cval = build1_loc (EXPR_LOCATION (cval),
149 ADDR_EXPR, TREE_TYPE (ptr),
150 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
152 fold_convert (ptr_type_node,
153 TREE_OPERAND (cval, 1))));
155 if (TREE_CODE (cval) == ADDR_EXPR)
157 tree base = NULL_TREE;
158 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
160 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
162 TREE_OPERAND (cval, 0) = base;
165 base = get_base_address (TREE_OPERAND (cval, 0));
169 if ((TREE_CODE (base) == VAR_DECL
170 || TREE_CODE (base) == FUNCTION_DECL)
171 && !can_refer_decl_in_current_unit_p (base, from_decl))
173 if (TREE_CODE (base) == VAR_DECL)
174 TREE_ADDRESSABLE (base) = 1;
175 else if (TREE_CODE (base) == FUNCTION_DECL)
177 /* Make sure we create a cgraph node for functions we'll reference.
178 They can be non-existent if the reference comes from an entry
179 of an external vtable for example. */
180 cgraph_get_create_node (base);
182 /* Fixup types in global initializers. */
183 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
184 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
189 /* If SYM is a constant variable with known value, return the value.
190 NULL_TREE is returned otherwise. */
193 get_symbol_constant_value (tree sym)
195 if (const_value_known_p (sym))
197 tree val = DECL_INITIAL (sym);
200 val = canonicalize_constructor_val (val, sym);
201 if (val && is_gimple_min_invariant (val))
206 /* Variables declared 'const' without an initializer
207 have zero as the initializer if they may not be
208 overridden at link or run time. */
210 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
211 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
212 return build_zero_cst (TREE_TYPE (sym));
220 /* Subroutine of fold_stmt. We perform several simplifications of the
221 memory reference tree EXPR and make sure to re-gimplify them properly
222 after propagation of constant addresses. IS_LHS is true if the
223 reference is supposed to be an lvalue. */
226 maybe_fold_reference (tree expr, bool is_lhs)
231 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
232 || TREE_CODE (expr) == REALPART_EXPR
233 || TREE_CODE (expr) == IMAGPART_EXPR)
234 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
235 return fold_unary_loc (EXPR_LOCATION (expr),
238 TREE_OPERAND (expr, 0));
239 else if (TREE_CODE (expr) == BIT_FIELD_REF
240 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
241 return fold_ternary_loc (EXPR_LOCATION (expr),
244 TREE_OPERAND (expr, 0),
245 TREE_OPERAND (expr, 1),
246 TREE_OPERAND (expr, 2));
248 while (handled_component_p (*t))
249 t = &TREE_OPERAND (*t, 0);
251 /* Canonicalize MEM_REFs invariant address operand. Do this first
252 to avoid feeding non-canonical MEM_REFs elsewhere. */
253 if (TREE_CODE (*t) == MEM_REF
254 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
256 bool volatile_p = TREE_THIS_VOLATILE (*t);
257 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
258 TREE_OPERAND (*t, 0),
259 TREE_OPERAND (*t, 1));
262 TREE_THIS_VOLATILE (tem) = volatile_p;
264 tem = maybe_fold_reference (expr, is_lhs);
272 && (result = fold_const_aggregate_ref (expr))
273 && is_gimple_min_invariant (result))
276 /* Fold back MEM_REFs to reference trees. */
277 if (TREE_CODE (*t) == MEM_REF
278 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
279 && integer_zerop (TREE_OPERAND (*t, 1))
280 && (TREE_THIS_VOLATILE (*t)
281 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
282 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
283 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
284 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
285 /* We have to look out here to not drop a required conversion
286 from the rhs to the lhs if is_lhs, but we don't have the
287 rhs here to verify that. Thus require strict type
289 && types_compatible_p (TREE_TYPE (*t),
290 TREE_TYPE (TREE_OPERAND
291 (TREE_OPERAND (*t, 0), 0))))
294 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
295 tem = maybe_fold_reference (expr, is_lhs);
300 else if (TREE_CODE (*t) == TARGET_MEM_REF)
302 tree tem = maybe_fold_tmr (*t);
306 tem = maybe_fold_reference (expr, is_lhs);
317 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
318 replacement rhs for the statement or NULL_TREE if no simplification
319 could be made. It is assumed that the operands have been previously
323 fold_gimple_assign (gimple_stmt_iterator *si)
325 gimple stmt = gsi_stmt (*si);
326 enum tree_code subcode = gimple_assign_rhs_code (stmt);
327 location_t loc = gimple_location (stmt);
329 tree result = NULL_TREE;
331 switch (get_gimple_rhs_class (subcode))
333 case GIMPLE_SINGLE_RHS:
335 tree rhs = gimple_assign_rhs1 (stmt);
337 if (REFERENCE_CLASS_P (rhs))
338 return maybe_fold_reference (rhs, false);
340 else if (TREE_CODE (rhs) == ADDR_EXPR)
342 tree ref = TREE_OPERAND (rhs, 0);
343 tree tem = maybe_fold_reference (ref, true);
345 && TREE_CODE (tem) == MEM_REF
346 && integer_zerop (TREE_OPERAND (tem, 1)))
347 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
349 result = fold_convert (TREE_TYPE (rhs),
350 build_fold_addr_expr_loc (loc, tem));
351 else if (TREE_CODE (ref) == MEM_REF
352 && integer_zerop (TREE_OPERAND (ref, 1)))
353 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
356 else if (TREE_CODE (rhs) == CONSTRUCTOR
357 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
358 && (CONSTRUCTOR_NELTS (rhs)
359 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
361 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
366 if (TREE_CODE (val) != INTEGER_CST
367 && TREE_CODE (val) != REAL_CST
368 && TREE_CODE (val) != FIXED_CST)
371 return build_vector_from_ctor (TREE_TYPE (rhs),
372 CONSTRUCTOR_ELTS (rhs));
375 else if (DECL_P (rhs))
376 return unshare_expr (get_symbol_constant_value (rhs));
378 /* If we couldn't fold the RHS, hand over to the generic
380 if (result == NULL_TREE)
383 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
384 that may have been added by fold, and "useless" type
385 conversions that might now be apparent due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
388 if (result != rhs && valid_gimple_rhs_p (result))
395 case GIMPLE_UNARY_RHS:
397 tree rhs = gimple_assign_rhs1 (stmt);
399 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
402 /* If the operation was a conversion do _not_ mark a
403 resulting constant with TREE_OVERFLOW if the original
404 constant was not. These conversions have implementation
405 defined behavior and retaining the TREE_OVERFLOW flag
406 here would confuse later passes such as VRP. */
407 if (CONVERT_EXPR_CODE_P (subcode)
408 && TREE_CODE (result) == INTEGER_CST
409 && TREE_CODE (rhs) == INTEGER_CST)
410 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
419 case GIMPLE_BINARY_RHS:
420 /* Try to canonicalize for boolean-typed X the comparisons
421 X == 0, X == 1, X != 0, and X != 1. */
422 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
423 || gimple_assign_rhs_code (stmt) == NE_EXPR)
425 tree lhs = gimple_assign_lhs (stmt);
426 tree op1 = gimple_assign_rhs1 (stmt);
427 tree op2 = gimple_assign_rhs2 (stmt);
428 tree type = TREE_TYPE (op1);
430 /* Check whether the comparison operands are of the same boolean
431 type as the result type is.
432 Check that second operand is an integer-constant with value
434 if (TREE_CODE (op2) == INTEGER_CST
435 && (integer_zerop (op2) || integer_onep (op2))
436 && useless_type_conversion_p (TREE_TYPE (lhs), type))
438 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
439 bool is_logical_not = false;
441 /* X == 0 and X != 1 is a logical-not.of X
442 X == 1 and X != 0 is X */
443 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
444 || (cmp_code == NE_EXPR && integer_onep (op2)))
445 is_logical_not = true;
447 if (is_logical_not == false)
449 /* Only for one-bit precision typed X the transformation
450 !X -> ~X is valied. */
451 else if (TYPE_PRECISION (type) == 1)
452 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
454 /* Otherwise we use !X -> X ^ 1. */
456 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
457 type, op1, build_int_cst (type, 1));
463 result = fold_binary_loc (loc, subcode,
464 TREE_TYPE (gimple_assign_lhs (stmt)),
465 gimple_assign_rhs1 (stmt),
466 gimple_assign_rhs2 (stmt));
470 STRIP_USELESS_TYPE_CONVERSION (result);
471 if (valid_gimple_rhs_p (result))
476 case GIMPLE_TERNARY_RHS:
477 /* Try to fold a conditional expression. */
478 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
480 tree op0 = gimple_assign_rhs1 (stmt);
483 location_t cond_loc = gimple_location (stmt);
485 if (COMPARISON_CLASS_P (op0))
487 fold_defer_overflow_warnings ();
488 tem = fold_binary_loc (cond_loc,
489 TREE_CODE (op0), TREE_TYPE (op0),
490 TREE_OPERAND (op0, 0),
491 TREE_OPERAND (op0, 1));
492 /* This is actually a conditional expression, not a GIMPLE
493 conditional statement, however, the valid_gimple_rhs_p
494 test still applies. */
495 set = (tem && is_gimple_condexpr (tem)
496 && valid_gimple_rhs_p (tem));
497 fold_undefer_overflow_warnings (set, stmt, 0);
499 else if (is_gimple_min_invariant (op0))
508 result = fold_build3_loc (cond_loc, COND_EXPR,
509 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
510 gimple_assign_rhs2 (stmt),
511 gimple_assign_rhs3 (stmt));
515 result = fold_ternary_loc (loc, subcode,
516 TREE_TYPE (gimple_assign_lhs (stmt)),
517 gimple_assign_rhs1 (stmt),
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
523 STRIP_USELESS_TYPE_CONVERSION (result);
524 if (valid_gimple_rhs_p (result))
529 case GIMPLE_INVALID_RHS:
536 /* Attempt to fold a conditional statement. Return true if any changes were
537 made. We only attempt to fold the condition expression, and do not perform
538 any transformation that would require alteration of the cfg. It is
539 assumed that the operands have been previously folded. */
542 fold_gimple_cond (gimple stmt)
544 tree result = fold_binary_loc (gimple_location (stmt),
545 gimple_cond_code (stmt),
547 gimple_cond_lhs (stmt),
548 gimple_cond_rhs (stmt));
552 STRIP_USELESS_TYPE_CONVERSION (result);
553 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
555 gimple_cond_set_condition_from_tree (stmt, result);
563 /* Convert EXPR into a GIMPLE value suitable for substitution on the
564 RHS of an assignment. Insert the necessary statements before
565 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
566 is replaced. If the call is expected to produces a result, then it
567 is replaced by an assignment of the new RHS to the result variable.
568 If the result is to be ignored, then the call is replaced by a
569 GIMPLE_NOP. A proper VDEF chain is retained by making the first
570 VUSE and the last VDEF of the whole sequence be the same as the replaced
571 statement and using new SSA names for stores in between. */
574 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
577 gimple stmt, new_stmt;
578 gimple_stmt_iterator i;
579 gimple_seq stmts = NULL;
580 struct gimplify_ctx gctx;
584 stmt = gsi_stmt (*si_p);
586 gcc_assert (is_gimple_call (stmt));
588 push_gimplify_context (&gctx);
589 gctx.into_ssa = gimple_in_ssa_p (cfun);
591 lhs = gimple_call_lhs (stmt);
592 if (lhs == NULL_TREE)
594 gimplify_and_add (expr, &stmts);
595 /* We can end up with folding a memcpy of an empty class assignment
596 which gets optimized away by C++ gimplification. */
597 if (gimple_seq_empty_p (stmts))
599 pop_gimplify_context (NULL);
600 if (gimple_in_ssa_p (cfun))
602 unlink_stmt_vdef (stmt);
605 gsi_remove (si_p, true);
611 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
612 new_stmt = gimple_build_assign (lhs, tmp);
613 i = gsi_last (stmts);
614 gsi_insert_after_without_update (&i, new_stmt,
615 GSI_CONTINUE_LINKING);
618 pop_gimplify_context (NULL);
620 if (gimple_has_location (stmt))
621 annotate_all_with_location (stmts, gimple_location (stmt));
623 /* First iterate over the replacement statements backward, assigning
624 virtual operands to their defining statements. */
626 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
628 new_stmt = gsi_stmt (i);
629 if ((gimple_assign_single_p (new_stmt)
630 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
631 || (is_gimple_call (new_stmt)
632 && (gimple_call_flags (new_stmt)
633 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
637 vdef = gimple_vdef (stmt);
639 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
640 gimple_set_vdef (new_stmt, vdef);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
642 SSA_NAME_DEF_STMT (vdef) = new_stmt;
643 laststore = new_stmt;
647 /* Second iterate over the statements forward, assigning virtual
648 operands to their uses. */
649 reaching_vuse = gimple_vuse (stmt);
650 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
652 new_stmt = gsi_stmt (i);
653 /* If the new statement possibly has a VUSE, update it with exact SSA
654 name we know will reach this one. */
655 if (gimple_has_mem_ops (new_stmt))
656 gimple_set_vuse (new_stmt, reaching_vuse);
657 gimple_set_modified (new_stmt, true);
658 if (gimple_vdef (new_stmt))
659 reaching_vuse = gimple_vdef (new_stmt);
662 /* If the new sequence does not do a store release the virtual
663 definition of the original statement. */
665 && reaching_vuse == gimple_vuse (stmt))
667 tree vdef = gimple_vdef (stmt);
669 && TREE_CODE (vdef) == SSA_NAME)
671 unlink_stmt_vdef (stmt);
672 release_ssa_name (vdef);
676 /* Finally replace the original statement with the sequence. */
677 gsi_replace_with_seq (si_p, stmts, false);
680 /* Return the string length, maximum string length or maximum value of
682 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
683 is not NULL and, for TYPE == 0, its value is not equal to the length
684 we determine or if we are unable to determine the length or value,
685 return false. VISITED is a bitmap of visited variables.
686 TYPE is 0 if string length should be returned, 1 for maximum string
687 length and 2 for maximum value ARG can have. */
690 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
695 if (TREE_CODE (arg) != SSA_NAME)
697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
698 if (TREE_CODE (arg) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
702 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
703 if (TREE_CODE (aop0) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
705 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
706 length, visited, type);
712 if (TREE_CODE (val) != INTEGER_CST
713 || tree_int_cst_sgn (val) < 0)
717 val = c_strlen (arg, 1);
725 if (TREE_CODE (*length) != INTEGER_CST
726 || TREE_CODE (val) != INTEGER_CST)
729 if (tree_int_cst_lt (*length, val))
733 else if (simple_cst_equal (val, *length) != 1)
741 /* If ARG is registered for SSA update we cannot look at its defining
743 if (name_registered_for_update_p (arg))
746 /* If we were already here, break the infinite cycle. */
747 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
751 def_stmt = SSA_NAME_DEF_STMT (var);
753 switch (gimple_code (def_stmt))
756 /* The RHS of the statement defining VAR must either have a
757 constant length or come from another SSA_NAME with a constant
759 if (gimple_assign_single_p (def_stmt)
760 || gimple_assign_unary_nop_p (def_stmt))
762 tree rhs = gimple_assign_rhs1 (def_stmt);
763 return get_maxval_strlen (rhs, length, visited, type);
765 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
767 tree op2 = gimple_assign_rhs2 (def_stmt);
768 tree op3 = gimple_assign_rhs3 (def_stmt);
769 return get_maxval_strlen (op2, length, visited, type)
770 && get_maxval_strlen (op3, length, visited, type);
776 /* All the arguments of the PHI node must have the same constant
780 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
782 tree arg = gimple_phi_arg (def_stmt, i)->def;
784 /* If this PHI has itself as an argument, we cannot
785 determine the string length of this argument. However,
786 if we can find a constant string length for the other
787 PHI args then we can still be sure that this is a
788 constant string length. So be optimistic and just
789 continue with the next argument. */
790 if (arg == gimple_phi_result (def_stmt))
793 if (!get_maxval_strlen (arg, length, visited, type))
805 /* Fold builtin call in statement STMT. Returns a simplified tree.
806 We may return a non-constant expression, including another call
807 to a different function and with different arguments, e.g.,
808 substituting memcpy for strcpy when the string length is known.
809 Note that some builtins expand into inline code that may not
810 be valid in GIMPLE. Callers must take care. */
813 gimple_fold_builtin (gimple stmt)
821 location_t loc = gimple_location (stmt);
823 gcc_assert (is_gimple_call (stmt));
825 ignore = (gimple_call_lhs (stmt) == NULL);
827 /* First try the generic builtin folder. If that succeeds, return the
829 result = fold_call_stmt (stmt, ignore);
837 /* Ignore MD builtins. */
838 callee = gimple_call_fndecl (stmt);
839 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
842 /* Give up for always_inline inline builtins until they are
844 if (avoid_folding_inline_builtin (callee))
847 /* If the builtin could not be folded, and it has no argument list,
849 nargs = gimple_call_num_args (stmt);
853 /* Limit the work only for builtins we know how to simplify. */
854 switch (DECL_FUNCTION_CODE (callee))
856 case BUILT_IN_STRLEN:
858 case BUILT_IN_FPUTS_UNLOCKED:
862 case BUILT_IN_STRCPY:
863 case BUILT_IN_STRNCPY:
867 case BUILT_IN_MEMCPY_CHK:
868 case BUILT_IN_MEMPCPY_CHK:
869 case BUILT_IN_MEMMOVE_CHK:
870 case BUILT_IN_MEMSET_CHK:
871 case BUILT_IN_STRNCPY_CHK:
872 case BUILT_IN_STPNCPY_CHK:
876 case BUILT_IN_STRCPY_CHK:
877 case BUILT_IN_STPCPY_CHK:
881 case BUILT_IN_SNPRINTF_CHK:
882 case BUILT_IN_VSNPRINTF_CHK:
890 if (arg_idx >= nargs)
893 /* Try to use the dataflow information gathered by the CCP process. */
894 visited = BITMAP_ALLOC (NULL);
895 bitmap_clear (visited);
897 memset (val, 0, sizeof (val));
898 a = gimple_call_arg (stmt, arg_idx);
899 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
900 val[arg_idx] = NULL_TREE;
902 BITMAP_FREE (visited);
905 switch (DECL_FUNCTION_CODE (callee))
907 case BUILT_IN_STRLEN:
908 if (val[0] && nargs == 1)
911 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
913 /* If the result is not a valid gimple value, or not a cast
914 of a valid gimple value, then we cannot use the result. */
915 if (is_gimple_val (new_val)
916 || (CONVERT_EXPR_P (new_val)
917 && is_gimple_val (TREE_OPERAND (new_val, 0))))
922 case BUILT_IN_STRCPY:
923 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
924 result = fold_builtin_strcpy (loc, callee,
925 gimple_call_arg (stmt, 0),
926 gimple_call_arg (stmt, 1),
930 case BUILT_IN_STRNCPY:
931 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
932 result = fold_builtin_strncpy (loc, callee,
933 gimple_call_arg (stmt, 0),
934 gimple_call_arg (stmt, 1),
935 gimple_call_arg (stmt, 2),
941 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 ignore, false, val[0]);
946 case BUILT_IN_FPUTS_UNLOCKED:
948 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
949 gimple_call_arg (stmt, 1),
950 ignore, true, val[0]);
953 case BUILT_IN_MEMCPY_CHK:
954 case BUILT_IN_MEMPCPY_CHK:
955 case BUILT_IN_MEMMOVE_CHK:
956 case BUILT_IN_MEMSET_CHK:
957 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
958 result = fold_builtin_memory_chk (loc, callee,
959 gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
962 gimple_call_arg (stmt, 3),
964 DECL_FUNCTION_CODE (callee));
967 case BUILT_IN_STRCPY_CHK:
968 case BUILT_IN_STPCPY_CHK:
969 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
970 result = fold_builtin_stxcpy_chk (loc, callee,
971 gimple_call_arg (stmt, 0),
972 gimple_call_arg (stmt, 1),
973 gimple_call_arg (stmt, 2),
975 DECL_FUNCTION_CODE (callee));
978 case BUILT_IN_STRNCPY_CHK:
979 case BUILT_IN_STPNCPY_CHK:
980 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
981 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
986 DECL_FUNCTION_CODE (callee));
989 case BUILT_IN_SNPRINTF_CHK:
990 case BUILT_IN_VSNPRINTF_CHK:
991 if (val[1] && is_gimple_val (val[1]))
992 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
993 DECL_FUNCTION_CODE (callee));
1000 if (result && ignore)
1001 result = fold_ignored_result (result);
1006 /* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1013 gimple_extract_devirt_binfo_from_cst (tree cst)
1015 HOST_WIDE_INT offset, size, max_size;
1016 tree base, type, expected_type, binfo;
1017 bool last_artificial = false;
1019 if (!flag_devirtualize
1020 || TREE_CODE (cst) != ADDR_EXPR
1021 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1024 cst = TREE_OPERAND (cst, 0);
1025 expected_type = TREE_TYPE (cst);
1026 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1027 type = TREE_TYPE (base);
1031 || TREE_CODE (type) != RECORD_TYPE)
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1038 HOST_WIDE_INT pos, size;
1041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1046 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1048 if (TREE_CODE (fld) != FIELD_DECL)
1051 pos = int_bit_position (fld);
1052 size = tree_low_cst (DECL_SIZE (fld), 1);
1053 if (pos <= offset && (pos + size) > offset)
1056 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1059 last_artificial = DECL_ARTIFICIAL (fld);
1060 type = TREE_TYPE (fld);
1063 /* Artificial sub-objects are ancestors, we do not want to use them for
1064 devirtualization, at least not here. */
1065 if (last_artificial)
1067 binfo = TYPE_BINFO (type);
1068 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1074 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1080 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 gimple stmt = gsi_stmt (*gsi);
1084 bool changed = false;
1087 /* Fold *& in call arguments. */
1088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1094 gimple_call_set_arg (stmt, i, tmp);
1099 /* Check for virtual calls that became direct calls. */
1100 callee = gimple_call_fn (stmt);
1101 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1110 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1111 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1115 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1116 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1119 gimple_call_set_fndecl (stmt, fndecl);
1129 /* Check for builtins that CCP can handle using information not
1130 available in the generic fold routines. */
1131 callee = gimple_call_fndecl (stmt);
1132 if (callee && DECL_BUILT_IN (callee))
1134 tree result = gimple_fold_builtin (stmt);
1137 if (!update_call_from_tree (gsi, result))
1138 gimplify_and_update_call_from_tree (gsi, result);
1146 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1147 distinguishes both cases. */
1150 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1152 bool changed = false;
1153 gimple stmt = gsi_stmt (*gsi);
1155 gimple_stmt_iterator gsinext = *gsi;
1158 gsi_next (&gsinext);
1159 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1161 /* Fold the main computation performed by the statement. */
1162 switch (gimple_code (stmt))
1166 unsigned old_num_ops = gimple_num_ops (stmt);
1167 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1168 tree lhs = gimple_assign_lhs (stmt);
1170 /* First canonicalize operand order. This avoids building new
1171 trees if this is the only thing fold would later do. */
1172 if ((commutative_tree_code (subcode)
1173 || commutative_ternary_tree_code (subcode))
1174 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1175 gimple_assign_rhs2 (stmt), false))
1177 tree tem = gimple_assign_rhs1 (stmt);
1178 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1179 gimple_assign_set_rhs2 (stmt, tem);
1182 new_rhs = fold_gimple_assign (gsi);
1184 && !useless_type_conversion_p (TREE_TYPE (lhs),
1185 TREE_TYPE (new_rhs)))
1186 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1189 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1191 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1198 changed |= fold_gimple_cond (stmt);
1202 changed |= gimple_fold_call (gsi, inplace);
1206 /* Fold *& in asm operands. */
1209 const char **oconstraints;
1210 const char *constraint;
1211 bool allows_mem, allows_reg;
1213 noutputs = gimple_asm_noutputs (stmt);
1214 oconstraints = XALLOCAVEC (const char *, noutputs);
1216 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1218 tree link = gimple_asm_output_op (stmt, i);
1219 tree op = TREE_VALUE (link);
1221 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1222 if (REFERENCE_CLASS_P (op)
1223 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1225 TREE_VALUE (link) = op;
1229 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1231 tree link = gimple_asm_input_op (stmt, i);
1232 tree op = TREE_VALUE (link);
1234 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1235 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1236 oconstraints, &allows_mem, &allows_reg);
1237 if (REFERENCE_CLASS_P (op)
1238 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1241 TREE_VALUE (link) = op;
1249 if (gimple_debug_bind_p (stmt))
1251 tree val = gimple_debug_bind_get_value (stmt);
1253 && REFERENCE_CLASS_P (val))
1255 tree tem = maybe_fold_reference (val, false);
1258 gimple_debug_bind_set_value (stmt, tem);
1263 && TREE_CODE (val) == ADDR_EXPR)
1265 tree ref = TREE_OPERAND (val, 0);
1266 tree tem = maybe_fold_reference (ref, false);
1269 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1270 gimple_debug_bind_set_value (stmt, tem);
1280 /* If stmt folds into nothing and it was the last stmt in a bb,
1281 don't call gsi_stmt. */
1282 if (gsi_end_p (*gsi))
1284 gcc_assert (next_stmt == NULL);
1288 stmt = gsi_stmt (*gsi);
1290 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1291 as we'd changing the next stmt. */
1292 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1294 tree lhs = gimple_get_lhs (stmt);
1295 if (lhs && REFERENCE_CLASS_P (lhs))
1297 tree new_lhs = maybe_fold_reference (lhs, true);
1300 gimple_set_lhs (stmt, new_lhs);
1309 /* Fold the statement pointed to by GSI. In some cases, this function may
1310 replace the whole statement with a new one. Returns true iff folding
1312 The statement pointed to by GSI should be in valid gimple form but may
1313 be in unfolded state as resulting from for example constant propagation
1314 which can produce *&x = 0. */
1317 fold_stmt (gimple_stmt_iterator *gsi)
1319 return fold_stmt_1 (gsi, false);
1322 /* Perform the minimal folding on statement *GSI. Only operations like
1323 *&x created by constant propagation are handled. The statement cannot
1324 be replaced with a new one. Return true if the statement was
1325 changed, false otherwise.
1326 The statement *GSI should be in valid gimple form but may
1327 be in unfolded state as resulting from for example constant propagation
1328 which can produce *&x = 0. */
1331 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1333 gimple stmt = gsi_stmt (*gsi);
1334 bool changed = fold_stmt_1 (gsi, true);
1335 gcc_assert (gsi_stmt (*gsi) == stmt);
1339 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1340 if EXPR is null or we don't know how.
1341 If non-null, the result always has boolean type. */
1344 canonicalize_bool (tree expr, bool invert)
1350 if (integer_nonzerop (expr))
1351 return boolean_false_node;
1352 else if (integer_zerop (expr))
1353 return boolean_true_node;
1354 else if (TREE_CODE (expr) == SSA_NAME)
1355 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1356 build_int_cst (TREE_TYPE (expr), 0));
1357 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1358 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1360 TREE_OPERAND (expr, 0),
1361 TREE_OPERAND (expr, 1));
1367 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1369 if (integer_nonzerop (expr))
1370 return boolean_true_node;
1371 else if (integer_zerop (expr))
1372 return boolean_false_node;
1373 else if (TREE_CODE (expr) == SSA_NAME)
1374 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1375 build_int_cst (TREE_TYPE (expr), 0));
1376 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1377 return fold_build2 (TREE_CODE (expr),
1379 TREE_OPERAND (expr, 0),
1380 TREE_OPERAND (expr, 1));
1386 /* Check to see if a boolean expression EXPR is logically equivalent to the
1387 comparison (OP1 CODE OP2). Check for various identities involving
1391 same_bool_comparison_p (const_tree expr, enum tree_code code,
1392 const_tree op1, const_tree op2)
1396 /* The obvious case. */
1397 if (TREE_CODE (expr) == code
1398 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1399 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1402 /* Check for comparing (name, name != 0) and the case where expr
1403 is an SSA_NAME with a definition matching the comparison. */
1404 if (TREE_CODE (expr) == SSA_NAME
1405 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1407 if (operand_equal_p (expr, op1, 0))
1408 return ((code == NE_EXPR && integer_zerop (op2))
1409 || (code == EQ_EXPR && integer_nonzerop (op2)));
1410 s = SSA_NAME_DEF_STMT (expr);
1411 if (is_gimple_assign (s)
1412 && gimple_assign_rhs_code (s) == code
1413 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1414 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1418 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1419 of name is a comparison, recurse. */
1420 if (TREE_CODE (op1) == SSA_NAME
1421 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1423 s = SSA_NAME_DEF_STMT (op1);
1424 if (is_gimple_assign (s)
1425 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1427 enum tree_code c = gimple_assign_rhs_code (s);
1428 if ((c == NE_EXPR && integer_zerop (op2))
1429 || (c == EQ_EXPR && integer_nonzerop (op2)))
1430 return same_bool_comparison_p (expr, c,
1431 gimple_assign_rhs1 (s),
1432 gimple_assign_rhs2 (s));
1433 if ((c == EQ_EXPR && integer_zerop (op2))
1434 || (c == NE_EXPR && integer_nonzerop (op2)))
1435 return same_bool_comparison_p (expr,
1436 invert_tree_comparison (c, false),
1437 gimple_assign_rhs1 (s),
1438 gimple_assign_rhs2 (s));
1444 /* Check to see if two boolean expressions OP1 and OP2 are logically
1448 same_bool_result_p (const_tree op1, const_tree op2)
1450 /* Simple cases first. */
1451 if (operand_equal_p (op1, op2, 0))
1454 /* Check the cases where at least one of the operands is a comparison.
1455 These are a bit smarter than operand_equal_p in that they apply some
1456 identifies on SSA_NAMEs. */
1457 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1458 && same_bool_comparison_p (op1, TREE_CODE (op2),
1459 TREE_OPERAND (op2, 0),
1460 TREE_OPERAND (op2, 1)))
1462 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1463 && same_bool_comparison_p (op2, TREE_CODE (op1),
1464 TREE_OPERAND (op1, 0),
1465 TREE_OPERAND (op1, 1)))
1472 /* Forward declarations for some mutually recursive functions. */
1475 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1476 enum tree_code code2, tree op2a, tree op2b);
1478 and_var_with_comparison (tree var, bool invert,
1479 enum tree_code code2, tree op2a, tree op2b);
1481 and_var_with_comparison_1 (gimple stmt,
1482 enum tree_code code2, tree op2a, tree op2b);
1484 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1485 enum tree_code code2, tree op2a, tree op2b);
1487 or_var_with_comparison (tree var, bool invert,
1488 enum tree_code code2, tree op2a, tree op2b);
1490 or_var_with_comparison_1 (gimple stmt,
1491 enum tree_code code2, tree op2a, tree op2b);
1493 /* Helper function for and_comparisons_1: try to simplify the AND of the
1494 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1495 If INVERT is true, invert the value of the VAR before doing the AND.
1496 Return NULL_EXPR if we can't simplify this to a single expression. */
1499 and_var_with_comparison (tree var, bool invert,
1500 enum tree_code code2, tree op2a, tree op2b)
1503 gimple stmt = SSA_NAME_DEF_STMT (var);
1505 /* We can only deal with variables whose definitions are assignments. */
1506 if (!is_gimple_assign (stmt))
1509 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1510 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1511 Then we only have to consider the simpler non-inverted cases. */
1513 t = or_var_with_comparison_1 (stmt,
1514 invert_tree_comparison (code2, false),
1517 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1518 return canonicalize_bool (t, invert);
1521 /* Try to simplify the AND of the ssa variable defined by the assignment
1522 STMT with the comparison specified by (OP2A CODE2 OP2B).
1523 Return NULL_EXPR if we can't simplify this to a single expression. */
1526 and_var_with_comparison_1 (gimple stmt,
1527 enum tree_code code2, tree op2a, tree op2b)
1529 tree var = gimple_assign_lhs (stmt);
1530 tree true_test_var = NULL_TREE;
1531 tree false_test_var = NULL_TREE;
1532 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1534 /* Check for identities like (var AND (var == 0)) => false. */
1535 if (TREE_CODE (op2a) == SSA_NAME
1536 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1538 if ((code2 == NE_EXPR && integer_zerop (op2b))
1539 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1541 true_test_var = op2a;
1542 if (var == true_test_var)
1545 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1546 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1548 false_test_var = op2a;
1549 if (var == false_test_var)
1550 return boolean_false_node;
1554 /* If the definition is a comparison, recurse on it. */
1555 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1557 tree t = and_comparisons_1 (innercode,
1558 gimple_assign_rhs1 (stmt),
1559 gimple_assign_rhs2 (stmt),
1567 /* If the definition is an AND or OR expression, we may be able to
1568 simplify by reassociating. */
1569 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1570 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1572 tree inner1 = gimple_assign_rhs1 (stmt);
1573 tree inner2 = gimple_assign_rhs2 (stmt);
1576 tree partial = NULL_TREE;
1577 bool is_and = (innercode == BIT_AND_EXPR);
1579 /* Check for boolean identities that don't require recursive examination
1581 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1582 inner1 AND (inner1 OR inner2) => inner1
1583 !inner1 AND (inner1 AND inner2) => false
1584 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1585 Likewise for similar cases involving inner2. */
1586 if (inner1 == true_test_var)
1587 return (is_and ? var : inner1);
1588 else if (inner2 == true_test_var)
1589 return (is_and ? var : inner2);
1590 else if (inner1 == false_test_var)
1592 ? boolean_false_node
1593 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1594 else if (inner2 == false_test_var)
1596 ? boolean_false_node
1597 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1599 /* Next, redistribute/reassociate the AND across the inner tests.
1600 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1601 if (TREE_CODE (inner1) == SSA_NAME
1602 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1604 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1605 gimple_assign_rhs1 (s),
1606 gimple_assign_rhs2 (s),
1607 code2, op2a, op2b)))
1609 /* Handle the AND case, where we are reassociating:
1610 (inner1 AND inner2) AND (op2a code2 op2b)
1612 If the partial result t is a constant, we win. Otherwise
1613 continue on to try reassociating with the other inner test. */
1616 if (integer_onep (t))
1618 else if (integer_zerop (t))
1619 return boolean_false_node;
1622 /* Handle the OR case, where we are redistributing:
1623 (inner1 OR inner2) AND (op2a code2 op2b)
1624 => (t OR (inner2 AND (op2a code2 op2b))) */
1625 else if (integer_onep (t))
1626 return boolean_true_node;
1628 /* Save partial result for later. */
1632 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1633 if (TREE_CODE (inner2) == SSA_NAME
1634 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1635 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1636 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1637 gimple_assign_rhs1 (s),
1638 gimple_assign_rhs2 (s),
1639 code2, op2a, op2b)))
1641 /* Handle the AND case, where we are reassociating:
1642 (inner1 AND inner2) AND (op2a code2 op2b)
1643 => (inner1 AND t) */
1646 if (integer_onep (t))
1648 else if (integer_zerop (t))
1649 return boolean_false_node;
1650 /* If both are the same, we can apply the identity
1652 else if (partial && same_bool_result_p (t, partial))
1656 /* Handle the OR case. where we are redistributing:
1657 (inner1 OR inner2) AND (op2a code2 op2b)
1658 => (t OR (inner1 AND (op2a code2 op2b)))
1659 => (t OR partial) */
1662 if (integer_onep (t))
1663 return boolean_true_node;
1666 /* We already got a simplification for the other
1667 operand to the redistributed OR expression. The
1668 interesting case is when at least one is false.
1669 Or, if both are the same, we can apply the identity
1671 if (integer_zerop (partial))
1673 else if (integer_zerop (t))
1675 else if (same_bool_result_p (t, partial))
1684 /* Try to simplify the AND of two comparisons defined by
1685 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1686 If this can be done without constructing an intermediate value,
1687 return the resulting tree; otherwise NULL_TREE is returned.
1688 This function is deliberately asymmetric as it recurses on SSA_DEFs
1689 in the first comparison but not the second. */
1692 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1693 enum tree_code code2, tree op2a, tree op2b)
1695 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1696 if (operand_equal_p (op1a, op2a, 0)
1697 && operand_equal_p (op1b, op2b, 0))
1699 /* Result will be either NULL_TREE, or a combined comparison. */
1700 tree t = combine_comparisons (UNKNOWN_LOCATION,
1701 TRUTH_ANDIF_EXPR, code1, code2,
1702 boolean_type_node, op1a, op1b);
1707 /* Likewise the swapped case of the above. */
1708 if (operand_equal_p (op1a, op2b, 0)
1709 && operand_equal_p (op1b, op2a, 0))
1711 /* Result will be either NULL_TREE, or a combined comparison. */
1712 tree t = combine_comparisons (UNKNOWN_LOCATION,
1713 TRUTH_ANDIF_EXPR, code1,
1714 swap_tree_comparison (code2),
1715 boolean_type_node, op1a, op1b);
1720 /* If both comparisons are of the same value against constants, we might
1721 be able to merge them. */
1722 if (operand_equal_p (op1a, op2a, 0)
1723 && TREE_CODE (op1b) == INTEGER_CST
1724 && TREE_CODE (op2b) == INTEGER_CST)
1726 int cmp = tree_int_cst_compare (op1b, op2b);
1728 /* If we have (op1a == op1b), we should either be able to
1729 return that or FALSE, depending on whether the constant op1b
1730 also satisfies the other comparison against op2b. */
1731 if (code1 == EQ_EXPR)
1737 case EQ_EXPR: val = (cmp == 0); break;
1738 case NE_EXPR: val = (cmp != 0); break;
1739 case LT_EXPR: val = (cmp < 0); break;
1740 case GT_EXPR: val = (cmp > 0); break;
1741 case LE_EXPR: val = (cmp <= 0); break;
1742 case GE_EXPR: val = (cmp >= 0); break;
1743 default: done = false;
1748 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1750 return boolean_false_node;
1753 /* Likewise if the second comparison is an == comparison. */
1754 else if (code2 == EQ_EXPR)
1760 case EQ_EXPR: val = (cmp == 0); break;
1761 case NE_EXPR: val = (cmp != 0); break;
1762 case LT_EXPR: val = (cmp > 0); break;
1763 case GT_EXPR: val = (cmp < 0); break;
1764 case LE_EXPR: val = (cmp >= 0); break;
1765 case GE_EXPR: val = (cmp <= 0); break;
1766 default: done = false;
1771 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1773 return boolean_false_node;
1777 /* Same business with inequality tests. */
1778 else if (code1 == NE_EXPR)
1783 case EQ_EXPR: val = (cmp != 0); break;
1784 case NE_EXPR: val = (cmp == 0); break;
1785 case LT_EXPR: val = (cmp >= 0); break;
1786 case GT_EXPR: val = (cmp <= 0); break;
1787 case LE_EXPR: val = (cmp > 0); break;
1788 case GE_EXPR: val = (cmp < 0); break;
1793 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1795 else if (code2 == NE_EXPR)
1800 case EQ_EXPR: val = (cmp == 0); break;
1801 case NE_EXPR: val = (cmp != 0); break;
1802 case LT_EXPR: val = (cmp <= 0); break;
1803 case GT_EXPR: val = (cmp >= 0); break;
1804 case LE_EXPR: val = (cmp < 0); break;
1805 case GE_EXPR: val = (cmp > 0); break;
1810 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1813 /* Chose the more restrictive of two < or <= comparisons. */
1814 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1815 && (code2 == LT_EXPR || code2 == LE_EXPR))
1817 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1818 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1820 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1823 /* Likewise chose the more restrictive of two > or >= comparisons. */
1824 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1825 && (code2 == GT_EXPR || code2 == GE_EXPR))
1827 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1828 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1830 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1833 /* Check for singleton ranges. */
1835 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1836 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1837 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1839 /* Check for disjoint ranges. */
1841 && (code1 == LT_EXPR || code1 == LE_EXPR)
1842 && (code2 == GT_EXPR || code2 == GE_EXPR))
1843 return boolean_false_node;
1845 && (code1 == GT_EXPR || code1 == GE_EXPR)
1846 && (code2 == LT_EXPR || code2 == LE_EXPR))
1847 return boolean_false_node;
1850 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1851 NAME's definition is a truth value. See if there are any simplifications
1852 that can be done against the NAME's definition. */
1853 if (TREE_CODE (op1a) == SSA_NAME
1854 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1855 && (integer_zerop (op1b) || integer_onep (op1b)))
1857 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1858 || (code1 == NE_EXPR && integer_onep (op1b)));
1859 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1860 switch (gimple_code (stmt))
1863 /* Try to simplify by copy-propagating the definition. */
1864 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1867 /* If every argument to the PHI produces the same result when
1868 ANDed with the second comparison, we win.
1869 Do not do this unless the type is bool since we need a bool
1870 result here anyway. */
1871 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1873 tree result = NULL_TREE;
1875 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1877 tree arg = gimple_phi_arg_def (stmt, i);
1879 /* If this PHI has itself as an argument, ignore it.
1880 If all the other args produce the same result,
1882 if (arg == gimple_phi_result (stmt))
1884 else if (TREE_CODE (arg) == INTEGER_CST)
1886 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1889 result = boolean_false_node;
1890 else if (!integer_zerop (result))
1894 result = fold_build2 (code2, boolean_type_node,
1896 else if (!same_bool_comparison_p (result,
1900 else if (TREE_CODE (arg) == SSA_NAME
1901 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1904 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1905 /* In simple cases we can look through PHI nodes,
1906 but we have to be careful with loops.
1908 if (! dom_info_available_p (CDI_DOMINATORS)
1909 || gimple_bb (def_stmt) == gimple_bb (stmt)
1910 || dominated_by_p (CDI_DOMINATORS,
1911 gimple_bb (def_stmt),
1914 temp = and_var_with_comparison (arg, invert, code2,
1920 else if (!same_bool_result_p (result, temp))
1936 /* Try to simplify the AND of two comparisons, specified by
1937 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1938 If this can be simplified to a single expression (without requiring
1939 introducing more SSA variables to hold intermediate values),
1940 return the resulting tree. Otherwise return NULL_TREE.
1941 If the result expression is non-null, it has boolean type. */
1944 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1945 enum tree_code code2, tree op2a, tree op2b)
1947 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1951 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1954 /* Helper function for or_comparisons_1: try to simplify the OR of the
1955 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1956 If INVERT is true, invert the value of VAR before doing the OR.
1957 Return NULL_EXPR if we can't simplify this to a single expression. */
1960 or_var_with_comparison (tree var, bool invert,
1961 enum tree_code code2, tree op2a, tree op2b)
1964 gimple stmt = SSA_NAME_DEF_STMT (var);
1966 /* We can only deal with variables whose definitions are assignments. */
1967 if (!is_gimple_assign (stmt))
1970 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1971 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1972 Then we only have to consider the simpler non-inverted cases. */
1974 t = and_var_with_comparison_1 (stmt,
1975 invert_tree_comparison (code2, false),
1978 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1979 return canonicalize_bool (t, invert);
1982 /* Try to simplify the OR of the ssa variable defined by the assignment
1983 STMT with the comparison specified by (OP2A CODE2 OP2B).
1984 Return NULL_EXPR if we can't simplify this to a single expression. */
1987 or_var_with_comparison_1 (gimple stmt,
1988 enum tree_code code2, tree op2a, tree op2b)
1990 tree var = gimple_assign_lhs (stmt);
1991 tree true_test_var = NULL_TREE;
1992 tree false_test_var = NULL_TREE;
1993 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1995 /* Check for identities like (var OR (var != 0)) => true . */
1996 if (TREE_CODE (op2a) == SSA_NAME
1997 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1999 if ((code2 == NE_EXPR && integer_zerop (op2b))
2000 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2002 true_test_var = op2a;
2003 if (var == true_test_var)
2006 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2007 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2009 false_test_var = op2a;
2010 if (var == false_test_var)
2011 return boolean_true_node;
2015 /* If the definition is a comparison, recurse on it. */
2016 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2018 tree t = or_comparisons_1 (innercode,
2019 gimple_assign_rhs1 (stmt),
2020 gimple_assign_rhs2 (stmt),
2028 /* If the definition is an AND or OR expression, we may be able to
2029 simplify by reassociating. */
2030 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2031 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2033 tree inner1 = gimple_assign_rhs1 (stmt);
2034 tree inner2 = gimple_assign_rhs2 (stmt);
2037 tree partial = NULL_TREE;
2038 bool is_or = (innercode == BIT_IOR_EXPR);
2040 /* Check for boolean identities that don't require recursive examination
2042 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2043 inner1 OR (inner1 AND inner2) => inner1
2044 !inner1 OR (inner1 OR inner2) => true
2045 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2047 if (inner1 == true_test_var)
2048 return (is_or ? var : inner1);
2049 else if (inner2 == true_test_var)
2050 return (is_or ? var : inner2);
2051 else if (inner1 == false_test_var)
2054 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2055 else if (inner2 == false_test_var)
2058 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2060 /* Next, redistribute/reassociate the OR across the inner tests.
2061 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2062 if (TREE_CODE (inner1) == SSA_NAME
2063 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2064 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2065 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2066 gimple_assign_rhs1 (s),
2067 gimple_assign_rhs2 (s),
2068 code2, op2a, op2b)))
2070 /* Handle the OR case, where we are reassociating:
2071 (inner1 OR inner2) OR (op2a code2 op2b)
2073 If the partial result t is a constant, we win. Otherwise
2074 continue on to try reassociating with the other inner test. */
2077 if (integer_onep (t))
2078 return boolean_true_node;
2079 else if (integer_zerop (t))
2083 /* Handle the AND case, where we are redistributing:
2084 (inner1 AND inner2) OR (op2a code2 op2b)
2085 => (t AND (inner2 OR (op2a code op2b))) */
2086 else if (integer_zerop (t))
2087 return boolean_false_node;
2089 /* Save partial result for later. */
2093 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2094 if (TREE_CODE (inner2) == SSA_NAME
2095 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2096 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2097 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2098 gimple_assign_rhs1 (s),
2099 gimple_assign_rhs2 (s),
2100 code2, op2a, op2b)))
2102 /* Handle the OR case, where we are reassociating:
2103 (inner1 OR inner2) OR (op2a code2 op2b)
2105 => (t OR partial) */
2108 if (integer_zerop (t))
2110 else if (integer_onep (t))
2111 return boolean_true_node;
2112 /* If both are the same, we can apply the identity
2114 else if (partial && same_bool_result_p (t, partial))
2118 /* Handle the AND case, where we are redistributing:
2119 (inner1 AND inner2) OR (op2a code2 op2b)
2120 => (t AND (inner1 OR (op2a code2 op2b)))
2121 => (t AND partial) */
2124 if (integer_zerop (t))
2125 return boolean_false_node;
2128 /* We already got a simplification for the other
2129 operand to the redistributed AND expression. The
2130 interesting case is when at least one is true.
2131 Or, if both are the same, we can apply the identity
2133 if (integer_onep (partial))
2135 else if (integer_onep (t))
2137 else if (same_bool_result_p (t, partial))
2146 /* Try to simplify the OR of two comparisons defined by
2147 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2148 If this can be done without constructing an intermediate value,
2149 return the resulting tree; otherwise NULL_TREE is returned.
2150 This function is deliberately asymmetric as it recurses on SSA_DEFs
2151 in the first comparison but not the second. */
2154 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2155 enum tree_code code2, tree op2a, tree op2b)
2157 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2158 if (operand_equal_p (op1a, op2a, 0)
2159 && operand_equal_p (op1b, op2b, 0))
2161 /* Result will be either NULL_TREE, or a combined comparison. */
2162 tree t = combine_comparisons (UNKNOWN_LOCATION,
2163 TRUTH_ORIF_EXPR, code1, code2,
2164 boolean_type_node, op1a, op1b);
2169 /* Likewise the swapped case of the above. */
2170 if (operand_equal_p (op1a, op2b, 0)
2171 && operand_equal_p (op1b, op2a, 0))
2173 /* Result will be either NULL_TREE, or a combined comparison. */
2174 tree t = combine_comparisons (UNKNOWN_LOCATION,
2175 TRUTH_ORIF_EXPR, code1,
2176 swap_tree_comparison (code2),
2177 boolean_type_node, op1a, op1b);
2182 /* If both comparisons are of the same value against constants, we might
2183 be able to merge them. */
2184 if (operand_equal_p (op1a, op2a, 0)
2185 && TREE_CODE (op1b) == INTEGER_CST
2186 && TREE_CODE (op2b) == INTEGER_CST)
2188 int cmp = tree_int_cst_compare (op1b, op2b);
2190 /* If we have (op1a != op1b), we should either be able to
2191 return that or TRUE, depending on whether the constant op1b
2192 also satisfies the other comparison against op2b. */
2193 if (code1 == NE_EXPR)
2199 case EQ_EXPR: val = (cmp == 0); break;
2200 case NE_EXPR: val = (cmp != 0); break;
2201 case LT_EXPR: val = (cmp < 0); break;
2202 case GT_EXPR: val = (cmp > 0); break;
2203 case LE_EXPR: val = (cmp <= 0); break;
2204 case GE_EXPR: val = (cmp >= 0); break;
2205 default: done = false;
2210 return boolean_true_node;
2212 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2215 /* Likewise if the second comparison is a != comparison. */
2216 else if (code2 == NE_EXPR)
2222 case EQ_EXPR: val = (cmp == 0); break;
2223 case NE_EXPR: val = (cmp != 0); break;
2224 case LT_EXPR: val = (cmp > 0); break;
2225 case GT_EXPR: val = (cmp < 0); break;
2226 case LE_EXPR: val = (cmp >= 0); break;
2227 case GE_EXPR: val = (cmp <= 0); break;
2228 default: done = false;
2233 return boolean_true_node;
2235 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2239 /* See if an equality test is redundant with the other comparison. */
2240 else if (code1 == EQ_EXPR)
2245 case EQ_EXPR: val = (cmp == 0); break;
2246 case NE_EXPR: val = (cmp != 0); break;
2247 case LT_EXPR: val = (cmp < 0); break;
2248 case GT_EXPR: val = (cmp > 0); break;
2249 case LE_EXPR: val = (cmp <= 0); break;
2250 case GE_EXPR: val = (cmp >= 0); break;
2255 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2257 else if (code2 == EQ_EXPR)
2262 case EQ_EXPR: val = (cmp == 0); break;
2263 case NE_EXPR: val = (cmp != 0); break;
2264 case LT_EXPR: val = (cmp > 0); break;
2265 case GT_EXPR: val = (cmp < 0); break;
2266 case LE_EXPR: val = (cmp >= 0); break;
2267 case GE_EXPR: val = (cmp <= 0); break;
2272 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2275 /* Chose the less restrictive of two < or <= comparisons. */
2276 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2277 && (code2 == LT_EXPR || code2 == LE_EXPR))
2279 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2280 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2282 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2285 /* Likewise chose the less restrictive of two > or >= comparisons. */
2286 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2287 && (code2 == GT_EXPR || code2 == GE_EXPR))
2289 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2290 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2292 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2295 /* Check for singleton ranges. */
2297 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2298 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2299 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2301 /* Check for less/greater pairs that don't restrict the range at all. */
2303 && (code1 == LT_EXPR || code1 == LE_EXPR)
2304 && (code2 == GT_EXPR || code2 == GE_EXPR))
2305 return boolean_true_node;
2307 && (code1 == GT_EXPR || code1 == GE_EXPR)
2308 && (code2 == LT_EXPR || code2 == LE_EXPR))
2309 return boolean_true_node;
2312 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2313 NAME's definition is a truth value. See if there are any simplifications
2314 that can be done against the NAME's definition. */
2315 if (TREE_CODE (op1a) == SSA_NAME
2316 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2317 && (integer_zerop (op1b) || integer_onep (op1b)))
2319 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2320 || (code1 == NE_EXPR && integer_onep (op1b)));
2321 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2322 switch (gimple_code (stmt))
2325 /* Try to simplify by copy-propagating the definition. */
2326 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2329 /* If every argument to the PHI produces the same result when
2330 ORed with the second comparison, we win.
2331 Do not do this unless the type is bool since we need a bool
2332 result here anyway. */
2333 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2335 tree result = NULL_TREE;
2337 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2339 tree arg = gimple_phi_arg_def (stmt, i);
2341 /* If this PHI has itself as an argument, ignore it.
2342 If all the other args produce the same result,
2344 if (arg == gimple_phi_result (stmt))
2346 else if (TREE_CODE (arg) == INTEGER_CST)
2348 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2351 result = boolean_true_node;
2352 else if (!integer_onep (result))
2356 result = fold_build2 (code2, boolean_type_node,
2358 else if (!same_bool_comparison_p (result,
2362 else if (TREE_CODE (arg) == SSA_NAME
2363 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2366 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2367 /* In simple cases we can look through PHI nodes,
2368 but we have to be careful with loops.
2370 if (! dom_info_available_p (CDI_DOMINATORS)
2371 || gimple_bb (def_stmt) == gimple_bb (stmt)
2372 || dominated_by_p (CDI_DOMINATORS,
2373 gimple_bb (def_stmt),
2376 temp = or_var_with_comparison (arg, invert, code2,
2382 else if (!same_bool_result_p (result, temp))
2398 /* Try to simplify the OR of two comparisons, specified by
2399 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2400 If this can be simplified to a single expression (without requiring
2401 introducing more SSA variables to hold intermediate values),
2402 return the resulting tree. Otherwise return NULL_TREE.
2403 If the result expression is non-null, it has boolean type. */
2406 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2407 enum tree_code code2, tree op2a, tree op2b)
2409 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2413 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2417 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2419 Either NULL_TREE, a simplified but non-constant or a constant
2422 ??? This should go into a gimple-fold-inline.h file to be eventually
2423 privatized with the single valueize function used in the various TUs
2424 to avoid the indirect function call overhead. */
2427 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2429 location_t loc = gimple_location (stmt);
2430 switch (gimple_code (stmt))
2434 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2436 switch (get_gimple_rhs_class (subcode))
2438 case GIMPLE_SINGLE_RHS:
2440 tree rhs = gimple_assign_rhs1 (stmt);
2441 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2443 if (TREE_CODE (rhs) == SSA_NAME)
2445 /* If the RHS is an SSA_NAME, return its known constant value,
2447 return (*valueize) (rhs);
2449 /* Handle propagating invariant addresses into address
2451 else if (TREE_CODE (rhs) == ADDR_EXPR
2452 && !is_gimple_min_invariant (rhs))
2454 HOST_WIDE_INT offset = 0;
2456 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2460 && (CONSTANT_CLASS_P (base)
2461 || decl_address_invariant_p (base)))
2462 return build_invariant_address (TREE_TYPE (rhs),
2465 else if (TREE_CODE (rhs) == CONSTRUCTOR
2466 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2467 && (CONSTRUCTOR_NELTS (rhs)
2468 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2473 vec = XALLOCAVEC (tree,
2474 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2475 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2477 val = (*valueize) (val);
2478 if (TREE_CODE (val) == INTEGER_CST
2479 || TREE_CODE (val) == REAL_CST
2480 || TREE_CODE (val) == FIXED_CST)
2486 return build_vector (TREE_TYPE (rhs), vec);
2489 if (kind == tcc_reference)
2491 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2492 || TREE_CODE (rhs) == REALPART_EXPR
2493 || TREE_CODE (rhs) == IMAGPART_EXPR)
2494 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2496 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2497 return fold_unary_loc (EXPR_LOCATION (rhs),
2499 TREE_TYPE (rhs), val);
2501 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2502 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2504 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2505 return fold_ternary_loc (EXPR_LOCATION (rhs),
2507 TREE_TYPE (rhs), val,
2508 TREE_OPERAND (rhs, 1),
2509 TREE_OPERAND (rhs, 2));
2511 else if (TREE_CODE (rhs) == MEM_REF
2512 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2514 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2515 if (TREE_CODE (val) == ADDR_EXPR
2516 && is_gimple_min_invariant (val))
2518 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2520 TREE_OPERAND (rhs, 1));
2525 return fold_const_aggregate_ref_1 (rhs, valueize);
2527 else if (kind == tcc_declaration)
2528 return get_symbol_constant_value (rhs);
2532 case GIMPLE_UNARY_RHS:
2534 /* Handle unary operators that can appear in GIMPLE form.
2535 Note that we know the single operand must be a constant,
2536 so this should almost always return a simplified RHS. */
2537 tree lhs = gimple_assign_lhs (stmt);
2538 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2540 /* Conversions are useless for CCP purposes if they are
2541 value-preserving. Thus the restrictions that
2542 useless_type_conversion_p places for restrict qualification
2543 of pointer types should not apply here.
2544 Substitution later will only substitute to allowed places. */
2545 if (CONVERT_EXPR_CODE_P (subcode)
2546 && POINTER_TYPE_P (TREE_TYPE (lhs))
2547 && POINTER_TYPE_P (TREE_TYPE (op0))
2548 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2549 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2550 && TYPE_MODE (TREE_TYPE (lhs))
2551 == TYPE_MODE (TREE_TYPE (op0)))
2555 fold_unary_ignore_overflow_loc (loc, subcode,
2556 gimple_expr_type (stmt), op0);
2559 case GIMPLE_BINARY_RHS:
2561 /* Handle binary operators that can appear in GIMPLE form. */
2562 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2563 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2565 /* Translate &x + CST into an invariant form suitable for
2566 further propagation. */
2567 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2568 && TREE_CODE (op0) == ADDR_EXPR
2569 && TREE_CODE (op1) == INTEGER_CST)
2571 tree off = fold_convert (ptr_type_node, op1);
2572 return build_fold_addr_expr_loc
2574 fold_build2 (MEM_REF,
2575 TREE_TYPE (TREE_TYPE (op0)),
2576 unshare_expr (op0), off));
2579 return fold_binary_loc (loc, subcode,
2580 gimple_expr_type (stmt), op0, op1);
2583 case GIMPLE_TERNARY_RHS:
2585 /* Handle ternary operators that can appear in GIMPLE form. */
2586 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2587 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2588 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2590 /* Fold embedded expressions in ternary codes. */
2591 if ((subcode == COND_EXPR
2592 || subcode == VEC_COND_EXPR)
2593 && COMPARISON_CLASS_P (op0))
2595 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2596 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2597 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2598 TREE_TYPE (op0), op00, op01);
2603 return fold_ternary_loc (loc, subcode,
2604 gimple_expr_type (stmt), op0, op1, op2);
2616 if (gimple_call_internal_p (stmt))
2617 /* No folding yet for these functions. */
2620 fn = (*valueize) (gimple_call_fn (stmt));
2621 if (TREE_CODE (fn) == ADDR_EXPR
2622 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2623 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2625 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2628 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2629 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2630 call = build_call_array_loc (loc,
2631 gimple_call_return_type (stmt),
2632 fn, gimple_call_num_args (stmt), args);
2633 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2635 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2636 STRIP_NOPS (retval);
2647 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2648 Returns NULL_TREE if folding to a constant is not possible, otherwise
2649 returns a constant according to is_gimple_min_invariant. */
2652 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2654 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2655 if (res && is_gimple_min_invariant (res))
2661 /* The following set of functions are supposed to fold references using
2662 their constant initializers. */
2664 static tree fold_ctor_reference (tree type, tree ctor,
2665 unsigned HOST_WIDE_INT offset,
2666 unsigned HOST_WIDE_INT size, tree);
2668 /* See if we can find constructor defining value of BASE.
2669 When we know the consructor with constant offset (such as
2670 base is array[40] and we do know constructor of array), then
2671 BIT_OFFSET is adjusted accordingly.
2673 As a special case, return error_mark_node when constructor
2674 is not explicitly available, but it is known to be zero
2675 such as 'static const int a;'. */
2677 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2678 tree (*valueize)(tree))
2680 HOST_WIDE_INT bit_offset2, size, max_size;
2681 if (TREE_CODE (base) == MEM_REF)
2683 if (!integer_zerop (TREE_OPERAND (base, 1)))
2685 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2687 *bit_offset += (mem_ref_offset (base).low
2692 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2693 base = valueize (TREE_OPERAND (base, 0));
2694 if (!base || TREE_CODE (base) != ADDR_EXPR)
2696 base = TREE_OPERAND (base, 0);
2699 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2700 DECL_INITIAL. If BASE is a nested reference into another
2701 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2702 the inner reference. */
2703 switch (TREE_CODE (base))
2706 if (!const_value_known_p (base))
2711 if (!DECL_INITIAL (base)
2712 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2713 return error_mark_node;
2714 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2715 as special marker (_not_ zero ...) for its own purposes. */
2716 if (DECL_INITIAL (base) == error_mark_node)
2718 return DECL_INITIAL (base);
2722 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2723 if (max_size == -1 || size != max_size)
2725 *bit_offset += bit_offset2;
2726 return get_base_constructor (base, bit_offset, valueize);
2737 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2738 to the memory at bit OFFSET.
2740 We do only simple job of folding byte accesses. */
2743 fold_string_cst_ctor_reference (tree type, tree ctor,
2744 unsigned HOST_WIDE_INT offset,
2745 unsigned HOST_WIDE_INT size)
2747 if (INTEGRAL_TYPE_P (type)
2748 && (TYPE_MODE (type)
2749 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2750 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2752 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2753 && size == BITS_PER_UNIT
2754 && !(offset % BITS_PER_UNIT))
2756 offset /= BITS_PER_UNIT;
2757 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2758 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2761 const char a[20]="hello";
2764 might lead to offset greater than string length. In this case we
2765 know value is either initialized to 0 or out of bounds. Return 0
2767 return build_zero_cst (type);
2772 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2773 SIZE to the memory at bit OFFSET. */
2776 fold_array_ctor_reference (tree type, tree ctor,
2777 unsigned HOST_WIDE_INT offset,
2778 unsigned HOST_WIDE_INT size,
2781 unsigned HOST_WIDE_INT cnt;
2783 double_int low_bound, elt_size;
2784 double_int index, max_index;
2785 double_int access_index;
2786 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2787 HOST_WIDE_INT inner_offset;
2789 /* Compute low bound and elt size. */
2790 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2791 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2792 if (domain_type && TYPE_MIN_VALUE (domain_type))
2794 /* Static constructors for variably sized objects makes no sense. */
2795 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2796 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2797 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2800 low_bound = double_int_zero;
2801 /* Static constructors for variably sized objects makes no sense. */
2802 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2805 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2808 /* We can handle only constantly sized accesses that are known to not
2809 be larger than size of array element. */
2810 if (!TYPE_SIZE_UNIT (type)
2811 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2812 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2815 /* Compute the array index we look for. */
2816 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2817 .udiv (elt_size, TRUNC_DIV_EXPR);
2818 access_index += low_bound;
2820 access_index = access_index.ext (TYPE_PRECISION (index_type),
2821 TYPE_UNSIGNED (index_type));
2823 /* And offset within the access. */
2824 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2826 /* See if the array field is large enough to span whole access. We do not
2827 care to fold accesses spanning multiple array indexes. */
2828 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2831 index = low_bound - double_int_one;
2833 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2835 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2837 /* Array constructor might explicitely set index, or specify range
2838 or leave index NULL meaning that it is next index after previous
2842 if (TREE_CODE (cfield) == INTEGER_CST)
2843 max_index = index = tree_to_double_int (cfield);
2846 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2847 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2848 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2853 index += double_int_one;
2855 index = index.ext (TYPE_PRECISION (index_type),
2856 TYPE_UNSIGNED (index_type));
2860 /* Do we have match? */
2861 if (access_index.cmp (index, 1) >= 0
2862 && access_index.cmp (max_index, 1) <= 0)
2863 return fold_ctor_reference (type, cval, inner_offset, size,
2866 /* When memory is not explicitely mentioned in constructor,
2867 it is 0 (or out of range). */
2868 return build_zero_cst (type);
2871 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2872 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2875 fold_nonarray_ctor_reference (tree type, tree ctor,
2876 unsigned HOST_WIDE_INT offset,
2877 unsigned HOST_WIDE_INT size,
2880 unsigned HOST_WIDE_INT cnt;
2883 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2886 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2887 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2888 tree field_size = DECL_SIZE (cfield);
2889 double_int bitoffset;
2890 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2891 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2892 double_int bitoffset_end, access_end;
2894 /* Variable sized objects in static constructors makes no sense,
2895 but field_size can be NULL for flexible array members. */
2896 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2897 && TREE_CODE (byte_offset) == INTEGER_CST
2898 && (field_size != NULL_TREE
2899 ? TREE_CODE (field_size) == INTEGER_CST
2900 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2902 /* Compute bit offset of the field. */
2903 bitoffset = tree_to_double_int (field_offset)
2904 + byte_offset_cst * bits_per_unit_cst;
2905 /* Compute bit offset where the field ends. */
2906 if (field_size != NULL_TREE)
2907 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2909 bitoffset_end = double_int_zero;
2911 access_end = double_int::from_uhwi (offset)
2912 + double_int::from_uhwi (size);
2914 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2915 [BITOFFSET, BITOFFSET_END)? */
2916 if (access_end.cmp (bitoffset, 0) > 0
2917 && (field_size == NULL_TREE
2918 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2920 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2921 /* We do have overlap. Now see if field is large enough to
2922 cover the access. Give up for accesses spanning multiple
2924 if (access_end.cmp (bitoffset_end, 0) > 0)
2926 if (double_int::from_uhwi (offset).slt (bitoffset))
2928 return fold_ctor_reference (type, cval,
2929 inner_offset.to_uhwi (), size,
2933 /* When memory is not explicitely mentioned in constructor, it is 0. */
2934 return build_zero_cst (type);
2937 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2938 to the memory at bit OFFSET. */
2941 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2942 unsigned HOST_WIDE_INT size, tree from_decl)
2946 /* We found the field with exact match. */
2947 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2949 return canonicalize_constructor_val (ctor, from_decl);
2951 /* We are at the end of walk, see if we can view convert the
2953 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2954 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2955 && operand_equal_p (TYPE_SIZE (type),
2956 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2958 ret = canonicalize_constructor_val (ctor, from_decl);
2959 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2964 if (TREE_CODE (ctor) == STRING_CST)
2965 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2966 if (TREE_CODE (ctor) == CONSTRUCTOR)
2969 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2970 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2971 return fold_array_ctor_reference (type, ctor, offset, size,
2974 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2981 /* Return the tree representing the element referenced by T if T is an
2982 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2983 names using VALUEIZE. Return NULL_TREE otherwise. */
2986 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2988 tree ctor, idx, base;
2989 HOST_WIDE_INT offset, size, max_size;
2992 if (TREE_THIS_VOLATILE (t))
2995 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2996 return get_symbol_constant_value (t);
2998 tem = fold_read_from_constant_string (t);
3002 switch (TREE_CODE (t))
3005 case ARRAY_RANGE_REF:
3006 /* Constant indexes are handled well by get_base_constructor.
3007 Only special case variable offsets.
3008 FIXME: This code can't handle nested references with variable indexes
3009 (they will be handled only by iteration of ccp). Perhaps we can bring
3010 get_ref_base_and_extent here and make it use a valueize callback. */
3011 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3013 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3014 && TREE_CODE (idx) == INTEGER_CST)
3016 tree low_bound, unit_size;
3019 /* If the resulting bit-offset is constant, track it. */
3020 if ((low_bound = array_ref_low_bound (t),
3021 TREE_CODE (low_bound) == INTEGER_CST)
3022 && (unit_size = array_ref_element_size (t),
3023 host_integerp (unit_size, 1))
3024 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3025 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3026 doffset.fits_shwi ()))
3028 offset = doffset.to_shwi ();
3029 offset *= TREE_INT_CST_LOW (unit_size);
3030 offset *= BITS_PER_UNIT;
3032 base = TREE_OPERAND (t, 0);
3033 ctor = get_base_constructor (base, &offset, valueize);
3034 /* Empty constructor. Always fold to 0. */
3035 if (ctor == error_mark_node)
3036 return build_zero_cst (TREE_TYPE (t));
3037 /* Out of bound array access. Value is undefined,
3041 /* We can not determine ctor. */
3044 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3045 TREE_INT_CST_LOW (unit_size)
3054 case TARGET_MEM_REF:
3056 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3057 ctor = get_base_constructor (base, &offset, valueize);
3059 /* Empty constructor. Always fold to 0. */
3060 if (ctor == error_mark_node)
3061 return build_zero_cst (TREE_TYPE (t));
3062 /* We do not know precise address. */
3063 if (max_size == -1 || max_size != size)
3065 /* We can not determine ctor. */
3069 /* Out of bound array access. Value is undefined, but don't fold. */
3073 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3079 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3080 if (c && TREE_CODE (c) == COMPLEX_CST)
3081 return fold_build1_loc (EXPR_LOCATION (t),
3082 TREE_CODE (t), TREE_TYPE (t), c);
3094 fold_const_aggregate_ref (tree t)
3096 return fold_const_aggregate_ref_1 (t, NULL);
3099 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3100 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3101 KNOWN_BINFO carries the binfo describing the true type of
3102 OBJ_TYPE_REF_OBJECT(REF). */
3105 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3107 unsigned HOST_WIDE_INT offset, size;
3110 vtable = v = BINFO_VTABLE (known_binfo);
3111 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3115 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3117 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3118 v = TREE_OPERAND (v, 0);
3123 if (TREE_CODE (v) != ADDR_EXPR)
3125 v = TREE_OPERAND (v, 0);
3127 if (TREE_CODE (v) != VAR_DECL
3128 || !DECL_VIRTUAL_P (v)
3129 || !DECL_INITIAL (v)
3130 || DECL_INITIAL (v) == error_mark_node)
3132 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3133 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3134 offset += token * size;
3135 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3136 offset, size, vtable);
3137 if (!fn || integer_zerop (fn))
3139 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3140 || TREE_CODE (fn) == FDESC_EXPR);
3141 fn = TREE_OPERAND (fn, 0);
3142 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3144 /* When cgraph node is missing and function is not public, we cannot
3145 devirtualize. This can happen in WHOPR when the actual method
3146 ends up in other partition, because we found devirtualization
3147 possibility too late. */
3148 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3151 /* Make sure we create a cgraph node for functions we'll reference.
3152 They can be non-existent if the reference comes from an entry
3153 of an external vtable for example. */
3154 cgraph_get_create_node (fn);
3159 /* Return true iff VAL is a gimple expression that is known to be
3160 non-negative. Restricted to floating-point inputs. */
3163 gimple_val_nonnegative_real_p (tree val)
3167 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3169 /* Use existing logic for non-gimple trees. */
3170 if (tree_expr_nonnegative_p (val))
3173 if (TREE_CODE (val) != SSA_NAME)
3176 /* Currently we look only at the immediately defining statement
3177 to make this determination, since recursion on defining
3178 statements of operands can lead to quadratic behavior in the
3179 worst case. This is expected to catch almost all occurrences
3180 in practice. It would be possible to implement limited-depth
3181 recursion if important cases are lost. Alternatively, passes
3182 that need this information (such as the pow/powi lowering code
3183 in the cse_sincos pass) could be revised to provide it through
3184 dataflow propagation. */
3186 def_stmt = SSA_NAME_DEF_STMT (val);
3188 if (is_gimple_assign (def_stmt))
3192 /* See fold-const.c:tree_expr_nonnegative_p for additional
3193 cases that could be handled with recursion. */
3195 switch (gimple_assign_rhs_code (def_stmt))
3198 /* Always true for floating-point operands. */
3202 /* True if the two operands are identical (since we are
3203 restricted to floating-point inputs). */
3204 op0 = gimple_assign_rhs1 (def_stmt);
3205 op1 = gimple_assign_rhs2 (def_stmt);
3208 || operand_equal_p (op0, op1, 0))
3215 else if (is_gimple_call (def_stmt))
3217 tree fndecl = gimple_call_fndecl (def_stmt);
3219 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3223 switch (DECL_FUNCTION_CODE (fndecl))
3225 CASE_FLT_FN (BUILT_IN_ACOS):
3226 CASE_FLT_FN (BUILT_IN_ACOSH):
3227 CASE_FLT_FN (BUILT_IN_CABS):
3228 CASE_FLT_FN (BUILT_IN_COSH):
3229 CASE_FLT_FN (BUILT_IN_ERFC):
3230 CASE_FLT_FN (BUILT_IN_EXP):
3231 CASE_FLT_FN (BUILT_IN_EXP10):
3232 CASE_FLT_FN (BUILT_IN_EXP2):
3233 CASE_FLT_FN (BUILT_IN_FABS):
3234 CASE_FLT_FN (BUILT_IN_FDIM):
3235 CASE_FLT_FN (BUILT_IN_HYPOT):
3236 CASE_FLT_FN (BUILT_IN_POW10):
3239 CASE_FLT_FN (BUILT_IN_SQRT):
3240 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3241 nonnegative inputs. */
3242 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3247 CASE_FLT_FN (BUILT_IN_POWI):
3248 /* True if the second argument is an even integer. */
3249 arg1 = gimple_call_arg (def_stmt, 1);
3251 if (TREE_CODE (arg1) == INTEGER_CST
3252 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3257 CASE_FLT_FN (BUILT_IN_POW):
3258 /* True if the second argument is an even integer-valued
3260 arg1 = gimple_call_arg (def_stmt, 1);
3262 if (TREE_CODE (arg1) == REAL_CST)
3267 c = TREE_REAL_CST (arg1);
3268 n = real_to_integer (&c);
3272 REAL_VALUE_TYPE cint;
3273 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3274 if (real_identical (&c, &cint))