1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "gimple-iterator.h"
32 #include "gimple-ssa.h"
33 #include "tree-ssanames.h"
34 #include "tree-into-ssa.h"
37 #include "tree-ssa-propagate.h"
39 #include "ipa-utils.h"
40 #include "gimple-pretty-print.h"
41 #include "tree-ssa-address.h"
42 #include "langhooks.h"
44 /* Return true when DECL can be referenced from current unit.
45 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
46 We can get declarations that are not possible to reference for various
49 1) When analyzing C++ virtual tables.
50 C++ virtual tables do have known constructors even
51 when they are keyed to other compilation unit.
52 Those tables can contain pointers to methods and vars
53 in other units. Those methods have both STATIC and EXTERNAL
55 2) In WHOPR mode devirtualization might lead to reference
56 to method that was partitioned elsehwere.
57 In this case we have static VAR_DECL or FUNCTION_DECL
58 that has no corresponding callgraph/varpool node
60 3) COMDAT functions referred by external vtables that
61 we devirtualize only during final compilation stage.
62 At this time we already decided that we will not output
63 the function body and thus we can't reference the symbol
67 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
69 struct varpool_node *vnode;
70 struct cgraph_node *node;
73 if (DECL_ABSTRACT (decl))
76 /* We are concerned only about static/external vars and functions. */
77 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
78 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
81 /* Static objects can be referred only if they was not optimized out yet. */
82 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
84 snode = symtab_get_node (decl);
87 node = dyn_cast <cgraph_node> (snode);
88 return !node || !node->global.inlined_to;
91 /* We will later output the initializer, so we can refer to it.
92 So we are concerned only when DECL comes from initializer of
95 || TREE_CODE (from_decl) != VAR_DECL
96 || !DECL_EXTERNAL (from_decl)
98 && symtab_get_node (from_decl)->in_other_partition))
100 /* We are folding reference from external vtable. The vtable may reffer
101 to a symbol keyed to other compilation unit. The other compilation
102 unit may be in separate DSO and the symbol may be hidden. */
103 if (DECL_VISIBILITY_SPECIFIED (decl)
104 && DECL_EXTERNAL (decl)
105 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
107 /* When function is public, we always can introduce new reference.
108 Exception are the COMDAT functions where introducing a direct
109 reference imply need to include function body in the curren tunit. */
110 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
112 /* We are not at ltrans stage; so don't worry about WHOPR.
113 Also when still gimplifying all referred comdat functions will be
116 As observed in PR20991 for already optimized out comdat virtual functions
117 it may be tempting to not necessarily give up because the copy will be
118 output elsewhere when corresponding vtable is output.
119 This is however not possible - ABI specify that COMDATs are output in
120 units where they are used and when the other unit was compiled with LTO
121 it is possible that vtable was kept public while the function itself
123 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
126 /* OK we are seeing either COMDAT or static variable. In this case we must
127 check that the definition is still around so we can refer it. */
128 if (TREE_CODE (decl) == FUNCTION_DECL)
130 node = cgraph_get_node (decl);
131 /* Check that we still have function body and that we didn't took
132 the decision to eliminate offline copy of the function yet.
133 The second is important when devirtualization happens during final
134 compilation stage when making a new reference no longer makes callee
136 if (!node || !node->definition || node->global.inlined_to)
138 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
142 else if (TREE_CODE (decl) == VAR_DECL)
144 vnode = varpool_get_node (decl);
145 if (!vnode || !vnode->definition)
147 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
154 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
155 acceptable form for is_gimple_min_invariant.
156 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
159 canonicalize_constructor_val (tree cval, tree from_decl)
161 tree orig_cval = cval;
163 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
164 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
166 tree ptr = TREE_OPERAND (cval, 0);
167 if (is_gimple_min_invariant (ptr))
168 cval = build1_loc (EXPR_LOCATION (cval),
169 ADDR_EXPR, TREE_TYPE (ptr),
170 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
172 fold_convert (ptr_type_node,
173 TREE_OPERAND (cval, 1))));
175 if (TREE_CODE (cval) == ADDR_EXPR)
177 tree base = NULL_TREE;
178 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
180 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
182 TREE_OPERAND (cval, 0) = base;
185 base = get_base_address (TREE_OPERAND (cval, 0));
189 if ((TREE_CODE (base) == VAR_DECL
190 || TREE_CODE (base) == FUNCTION_DECL)
191 && !can_refer_decl_in_current_unit_p (base, from_decl))
193 if (TREE_CODE (base) == VAR_DECL)
194 TREE_ADDRESSABLE (base) = 1;
195 else if (TREE_CODE (base) == FUNCTION_DECL)
197 /* Make sure we create a cgraph node for functions we'll reference.
198 They can be non-existent if the reference comes from an entry
199 of an external vtable for example. */
200 cgraph_get_create_node (base);
202 /* Fixup types in global initializers. */
203 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
204 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
206 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
207 cval = fold_convert (TREE_TYPE (orig_cval), cval);
210 if (TREE_OVERFLOW_P (cval))
211 return drop_tree_overflow (cval);
215 /* If SYM is a constant variable with known value, return the value.
216 NULL_TREE is returned otherwise. */
219 get_symbol_constant_value (tree sym)
221 tree val = ctor_for_folding (sym);
222 if (val != error_mark_node)
226 val = canonicalize_constructor_val (unshare_expr (val), sym);
227 if (val && is_gimple_min_invariant (val))
232 /* Variables declared 'const' without an initializer
233 have zero as the initializer if they may not be
234 overridden at link or run time. */
236 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
237 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
238 return build_zero_cst (TREE_TYPE (sym));
246 /* Subroutine of fold_stmt. We perform several simplifications of the
247 memory reference tree EXPR and make sure to re-gimplify them properly
248 after propagation of constant addresses. IS_LHS is true if the
249 reference is supposed to be an lvalue. */
252 maybe_fold_reference (tree expr, bool is_lhs)
257 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
258 || TREE_CODE (expr) == REALPART_EXPR
259 || TREE_CODE (expr) == IMAGPART_EXPR)
260 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
261 return fold_unary_loc (EXPR_LOCATION (expr),
264 TREE_OPERAND (expr, 0));
265 else if (TREE_CODE (expr) == BIT_FIELD_REF
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_ternary_loc (EXPR_LOCATION (expr),
270 TREE_OPERAND (expr, 0),
271 TREE_OPERAND (expr, 1),
272 TREE_OPERAND (expr, 2));
274 while (handled_component_p (*t))
275 t = &TREE_OPERAND (*t, 0);
277 /* Canonicalize MEM_REFs invariant address operand. Do this first
278 to avoid feeding non-canonical MEM_REFs elsewhere. */
279 if (TREE_CODE (*t) == MEM_REF
280 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
282 bool volatile_p = TREE_THIS_VOLATILE (*t);
283 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
284 TREE_OPERAND (*t, 0),
285 TREE_OPERAND (*t, 1));
288 TREE_THIS_VOLATILE (tem) = volatile_p;
290 tem = maybe_fold_reference (expr, is_lhs);
298 && (result = fold_const_aggregate_ref (expr))
299 && is_gimple_min_invariant (result))
302 /* Fold back MEM_REFs to reference trees. */
303 if (TREE_CODE (*t) == MEM_REF
304 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
305 && integer_zerop (TREE_OPERAND (*t, 1))
306 && (TREE_THIS_VOLATILE (*t)
307 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
308 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
309 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
310 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
311 /* We have to look out here to not drop a required conversion
312 from the rhs to the lhs if is_lhs, but we don't have the
313 rhs here to verify that. Thus require strict type
315 && types_compatible_p (TREE_TYPE (*t),
316 TREE_TYPE (TREE_OPERAND
317 (TREE_OPERAND (*t, 0), 0))))
320 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
321 tem = maybe_fold_reference (expr, is_lhs);
326 else if (TREE_CODE (*t) == TARGET_MEM_REF)
328 tree tem = maybe_fold_tmr (*t);
332 tem = maybe_fold_reference (expr, is_lhs);
343 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
344 replacement rhs for the statement or NULL_TREE if no simplification
345 could be made. It is assumed that the operands have been previously
349 fold_gimple_assign (gimple_stmt_iterator *si)
351 gimple stmt = gsi_stmt (*si);
352 enum tree_code subcode = gimple_assign_rhs_code (stmt);
353 location_t loc = gimple_location (stmt);
355 tree result = NULL_TREE;
357 switch (get_gimple_rhs_class (subcode))
359 case GIMPLE_SINGLE_RHS:
361 tree rhs = gimple_assign_rhs1 (stmt);
363 if (REFERENCE_CLASS_P (rhs))
364 return maybe_fold_reference (rhs, false);
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
382 else if (TREE_CODE (rhs) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
392 if (TREE_CODE (val) != INTEGER_CST
393 && TREE_CODE (val) != REAL_CST
394 && TREE_CODE (val) != FIXED_CST)
397 return build_vector_from_ctor (TREE_TYPE (rhs),
398 CONSTRUCTOR_ELTS (rhs));
401 else if (DECL_P (rhs))
402 return get_symbol_constant_value (rhs);
404 /* If we couldn't fold the RHS, hand over to the generic
406 if (result == NULL_TREE)
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (result != rhs && valid_gimple_rhs_p (result))
421 case GIMPLE_UNARY_RHS:
423 tree rhs = gimple_assign_rhs1 (stmt);
425 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
428 /* If the operation was a conversion do _not_ mark a
429 resulting constant with TREE_OVERFLOW if the original
430 constant was not. These conversions have implementation
431 defined behavior and retaining the TREE_OVERFLOW flag
432 here would confuse later passes such as VRP. */
433 if (CONVERT_EXPR_CODE_P (subcode)
434 && TREE_CODE (result) == INTEGER_CST
435 && TREE_CODE (rhs) == INTEGER_CST)
436 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
445 case GIMPLE_BINARY_RHS:
446 /* Try to canonicalize for boolean-typed X the comparisons
447 X == 0, X == 1, X != 0, and X != 1. */
448 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
449 || gimple_assign_rhs_code (stmt) == NE_EXPR)
451 tree lhs = gimple_assign_lhs (stmt);
452 tree op1 = gimple_assign_rhs1 (stmt);
453 tree op2 = gimple_assign_rhs2 (stmt);
454 tree type = TREE_TYPE (op1);
456 /* Check whether the comparison operands are of the same boolean
457 type as the result type is.
458 Check that second operand is an integer-constant with value
460 if (TREE_CODE (op2) == INTEGER_CST
461 && (integer_zerop (op2) || integer_onep (op2))
462 && useless_type_conversion_p (TREE_TYPE (lhs), type))
464 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
465 bool is_logical_not = false;
467 /* X == 0 and X != 1 is a logical-not.of X
468 X == 1 and X != 0 is X */
469 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
470 || (cmp_code == NE_EXPR && integer_onep (op2)))
471 is_logical_not = true;
473 if (is_logical_not == false)
475 /* Only for one-bit precision typed X the transformation
476 !X -> ~X is valied. */
477 else if (TYPE_PRECISION (type) == 1)
478 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
480 /* Otherwise we use !X -> X ^ 1. */
482 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
483 type, op1, build_int_cst (type, 1));
489 result = fold_binary_loc (loc, subcode,
490 TREE_TYPE (gimple_assign_lhs (stmt)),
491 gimple_assign_rhs1 (stmt),
492 gimple_assign_rhs2 (stmt));
496 STRIP_USELESS_TYPE_CONVERSION (result);
497 if (valid_gimple_rhs_p (result))
502 case GIMPLE_TERNARY_RHS:
503 /* Try to fold a conditional expression. */
504 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
506 tree op0 = gimple_assign_rhs1 (stmt);
509 location_t cond_loc = gimple_location (stmt);
511 if (COMPARISON_CLASS_P (op0))
513 fold_defer_overflow_warnings ();
514 tem = fold_binary_loc (cond_loc,
515 TREE_CODE (op0), TREE_TYPE (op0),
516 TREE_OPERAND (op0, 0),
517 TREE_OPERAND (op0, 1));
518 /* This is actually a conditional expression, not a GIMPLE
519 conditional statement, however, the valid_gimple_rhs_p
520 test still applies. */
521 set = (tem && is_gimple_condexpr (tem)
522 && valid_gimple_rhs_p (tem));
523 fold_undefer_overflow_warnings (set, stmt, 0);
525 else if (is_gimple_min_invariant (op0))
534 result = fold_build3_loc (cond_loc, COND_EXPR,
535 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
536 gimple_assign_rhs2 (stmt),
537 gimple_assign_rhs3 (stmt));
541 result = fold_ternary_loc (loc, subcode,
542 TREE_TYPE (gimple_assign_lhs (stmt)),
543 gimple_assign_rhs1 (stmt),
544 gimple_assign_rhs2 (stmt),
545 gimple_assign_rhs3 (stmt));
549 STRIP_USELESS_TYPE_CONVERSION (result);
550 if (valid_gimple_rhs_p (result))
555 case GIMPLE_INVALID_RHS:
562 /* Attempt to fold a conditional statement. Return true if any changes were
563 made. We only attempt to fold the condition expression, and do not perform
564 any transformation that would require alteration of the cfg. It is
565 assumed that the operands have been previously folded. */
568 fold_gimple_cond (gimple stmt)
570 tree result = fold_binary_loc (gimple_location (stmt),
571 gimple_cond_code (stmt),
573 gimple_cond_lhs (stmt),
574 gimple_cond_rhs (stmt));
578 STRIP_USELESS_TYPE_CONVERSION (result);
579 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
581 gimple_cond_set_condition_from_tree (stmt, result);
589 /* Convert EXPR into a GIMPLE value suitable for substitution on the
590 RHS of an assignment. Insert the necessary statements before
591 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
592 is replaced. If the call is expected to produces a result, then it
593 is replaced by an assignment of the new RHS to the result variable.
594 If the result is to be ignored, then the call is replaced by a
595 GIMPLE_NOP. A proper VDEF chain is retained by making the first
596 VUSE and the last VDEF of the whole sequence be the same as the replaced
597 statement and using new SSA names for stores in between. */
600 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
603 gimple stmt, new_stmt;
604 gimple_stmt_iterator i;
605 gimple_seq stmts = NULL;
606 struct gimplify_ctx gctx;
610 stmt = gsi_stmt (*si_p);
612 gcc_assert (is_gimple_call (stmt));
614 push_gimplify_context (&gctx);
615 gctx.into_ssa = gimple_in_ssa_p (cfun);
617 lhs = gimple_call_lhs (stmt);
618 if (lhs == NULL_TREE)
620 gimplify_and_add (expr, &stmts);
621 /* We can end up with folding a memcpy of an empty class assignment
622 which gets optimized away by C++ gimplification. */
623 if (gimple_seq_empty_p (stmts))
625 pop_gimplify_context (NULL);
626 if (gimple_in_ssa_p (cfun))
628 unlink_stmt_vdef (stmt);
631 gsi_replace (si_p, gimple_build_nop (), true);
637 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
638 new_stmt = gimple_build_assign (lhs, tmp);
639 i = gsi_last (stmts);
640 gsi_insert_after_without_update (&i, new_stmt,
641 GSI_CONTINUE_LINKING);
644 pop_gimplify_context (NULL);
646 if (gimple_has_location (stmt))
647 annotate_all_with_location (stmts, gimple_location (stmt));
649 /* First iterate over the replacement statements backward, assigning
650 virtual operands to their defining statements. */
652 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
654 new_stmt = gsi_stmt (i);
655 if ((gimple_assign_single_p (new_stmt)
656 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
657 || (is_gimple_call (new_stmt)
658 && (gimple_call_flags (new_stmt)
659 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
663 vdef = gimple_vdef (stmt);
665 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
666 gimple_set_vdef (new_stmt, vdef);
667 if (vdef && TREE_CODE (vdef) == SSA_NAME)
668 SSA_NAME_DEF_STMT (vdef) = new_stmt;
669 laststore = new_stmt;
673 /* Second iterate over the statements forward, assigning virtual
674 operands to their uses. */
675 reaching_vuse = gimple_vuse (stmt);
676 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
678 new_stmt = gsi_stmt (i);
679 /* If the new statement possibly has a VUSE, update it with exact SSA
680 name we know will reach this one. */
681 if (gimple_has_mem_ops (new_stmt))
682 gimple_set_vuse (new_stmt, reaching_vuse);
683 gimple_set_modified (new_stmt, true);
684 if (gimple_vdef (new_stmt))
685 reaching_vuse = gimple_vdef (new_stmt);
688 /* If the new sequence does not do a store release the virtual
689 definition of the original statement. */
691 && reaching_vuse == gimple_vuse (stmt))
693 tree vdef = gimple_vdef (stmt);
695 && TREE_CODE (vdef) == SSA_NAME)
697 unlink_stmt_vdef (stmt);
698 release_ssa_name (vdef);
702 /* Finally replace the original statement with the sequence. */
703 gsi_replace_with_seq (si_p, stmts, false);
706 /* Return the string length, maximum string length or maximum value of
708 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
709 is not NULL and, for TYPE == 0, its value is not equal to the length
710 we determine or if we are unable to determine the length or value,
711 return false. VISITED is a bitmap of visited variables.
712 TYPE is 0 if string length should be returned, 1 for maximum string
713 length and 2 for maximum value ARG can have. */
716 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
721 if (TREE_CODE (arg) != SSA_NAME)
723 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
724 if (TREE_CODE (arg) == ADDR_EXPR
725 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
726 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
728 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
729 if (TREE_CODE (aop0) == INDIRECT_REF
730 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
731 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
732 length, visited, type);
738 if (TREE_CODE (val) != INTEGER_CST
739 || tree_int_cst_sgn (val) < 0)
743 val = c_strlen (arg, 1);
751 if (TREE_CODE (*length) != INTEGER_CST
752 || TREE_CODE (val) != INTEGER_CST)
755 if (tree_int_cst_lt (*length, val))
759 else if (simple_cst_equal (val, *length) != 1)
767 /* If ARG is registered for SSA update we cannot look at its defining
769 if (name_registered_for_update_p (arg))
772 /* If we were already here, break the infinite cycle. */
773 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
777 def_stmt = SSA_NAME_DEF_STMT (var);
779 switch (gimple_code (def_stmt))
782 /* The RHS of the statement defining VAR must either have a
783 constant length or come from another SSA_NAME with a constant
785 if (gimple_assign_single_p (def_stmt)
786 || gimple_assign_unary_nop_p (def_stmt))
788 tree rhs = gimple_assign_rhs1 (def_stmt);
789 return get_maxval_strlen (rhs, length, visited, type);
791 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
793 tree op2 = gimple_assign_rhs2 (def_stmt);
794 tree op3 = gimple_assign_rhs3 (def_stmt);
795 return get_maxval_strlen (op2, length, visited, type)
796 && get_maxval_strlen (op3, length, visited, type);
802 /* All the arguments of the PHI node must have the same constant
806 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
808 tree arg = gimple_phi_arg (def_stmt, i)->def;
810 /* If this PHI has itself as an argument, we cannot
811 determine the string length of this argument. However,
812 if we can find a constant string length for the other
813 PHI args then we can still be sure that this is a
814 constant string length. So be optimistic and just
815 continue with the next argument. */
816 if (arg == gimple_phi_result (def_stmt))
819 if (!get_maxval_strlen (arg, length, visited, type))
831 /* Fold builtin call in statement STMT. Returns a simplified tree.
832 We may return a non-constant expression, including another call
833 to a different function and with different arguments, e.g.,
834 substituting memcpy for strcpy when the string length is known.
835 Note that some builtins expand into inline code that may not
836 be valid in GIMPLE. Callers must take care. */
839 gimple_fold_builtin (gimple stmt)
847 location_t loc = gimple_location (stmt);
849 gcc_assert (is_gimple_call (stmt));
851 ignore = (gimple_call_lhs (stmt) == NULL);
853 /* First try the generic builtin folder. If that succeeds, return the
855 result = fold_call_stmt (stmt, ignore);
863 /* Ignore MD builtins. */
864 callee = gimple_call_fndecl (stmt);
865 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
868 /* Give up for always_inline inline builtins until they are
870 if (avoid_folding_inline_builtin (callee))
873 /* If the builtin could not be folded, and it has no argument list,
875 nargs = gimple_call_num_args (stmt);
879 /* Limit the work only for builtins we know how to simplify. */
880 switch (DECL_FUNCTION_CODE (callee))
882 case BUILT_IN_STRLEN:
884 case BUILT_IN_FPUTS_UNLOCKED:
888 case BUILT_IN_STRCPY:
889 case BUILT_IN_STRNCPY:
893 case BUILT_IN_MEMCPY_CHK:
894 case BUILT_IN_MEMPCPY_CHK:
895 case BUILT_IN_MEMMOVE_CHK:
896 case BUILT_IN_MEMSET_CHK:
897 case BUILT_IN_STRNCPY_CHK:
898 case BUILT_IN_STPNCPY_CHK:
902 case BUILT_IN_STRCPY_CHK:
903 case BUILT_IN_STPCPY_CHK:
907 case BUILT_IN_SNPRINTF_CHK:
908 case BUILT_IN_VSNPRINTF_CHK:
916 if (arg_idx >= nargs)
919 /* Try to use the dataflow information gathered by the CCP process. */
920 visited = BITMAP_ALLOC (NULL);
921 bitmap_clear (visited);
923 memset (val, 0, sizeof (val));
924 a = gimple_call_arg (stmt, arg_idx);
925 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
926 val[arg_idx] = NULL_TREE;
928 BITMAP_FREE (visited);
931 switch (DECL_FUNCTION_CODE (callee))
933 case BUILT_IN_STRLEN:
934 if (val[0] && nargs == 1)
937 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
939 /* If the result is not a valid gimple value, or not a cast
940 of a valid gimple value, then we cannot use the result. */
941 if (is_gimple_val (new_val)
942 || (CONVERT_EXPR_P (new_val)
943 && is_gimple_val (TREE_OPERAND (new_val, 0))))
948 case BUILT_IN_STRCPY:
949 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
950 result = fold_builtin_strcpy (loc, callee,
951 gimple_call_arg (stmt, 0),
952 gimple_call_arg (stmt, 1),
956 case BUILT_IN_STRNCPY:
957 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
958 result = fold_builtin_strncpy (loc, callee,
959 gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
967 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
968 gimple_call_arg (stmt, 1),
969 ignore, false, val[0]);
972 case BUILT_IN_FPUTS_UNLOCKED:
974 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
975 gimple_call_arg (stmt, 1),
976 ignore, true, val[0]);
979 case BUILT_IN_MEMCPY_CHK:
980 case BUILT_IN_MEMPCPY_CHK:
981 case BUILT_IN_MEMMOVE_CHK:
982 case BUILT_IN_MEMSET_CHK:
983 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
984 result = fold_builtin_memory_chk (loc, callee,
985 gimple_call_arg (stmt, 0),
986 gimple_call_arg (stmt, 1),
987 gimple_call_arg (stmt, 2),
988 gimple_call_arg (stmt, 3),
990 DECL_FUNCTION_CODE (callee));
993 case BUILT_IN_STRCPY_CHK:
994 case BUILT_IN_STPCPY_CHK:
995 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
996 result = fold_builtin_stxcpy_chk (loc, callee,
997 gimple_call_arg (stmt, 0),
998 gimple_call_arg (stmt, 1),
999 gimple_call_arg (stmt, 2),
1001 DECL_FUNCTION_CODE (callee));
1004 case BUILT_IN_STRNCPY_CHK:
1005 case BUILT_IN_STPNCPY_CHK:
1006 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1007 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1008 gimple_call_arg (stmt, 1),
1009 gimple_call_arg (stmt, 2),
1010 gimple_call_arg (stmt, 3),
1012 DECL_FUNCTION_CODE (callee));
1015 case BUILT_IN_SNPRINTF_CHK:
1016 case BUILT_IN_VSNPRINTF_CHK:
1017 if (val[1] && is_gimple_val (val[1]))
1018 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1019 DECL_FUNCTION_CODE (callee));
1026 if (result && ignore)
1027 result = fold_ignored_result (result);
1032 /* Return a binfo to be used for devirtualization of calls based on an object
1033 represented by a declaration (i.e. a global or automatically allocated one)
1034 or NULL if it cannot be found or is not safe. CST is expected to be an
1035 ADDR_EXPR of such object or the function will return NULL. Currently it is
1036 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1037 EXPECTED_TYPE is type of the class virtual belongs to. */
1040 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1042 HOST_WIDE_INT offset, size, max_size;
1043 tree base, type, binfo;
1044 bool last_artificial = false;
1046 if (!flag_devirtualize
1047 || TREE_CODE (cst) != ADDR_EXPR
1048 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1051 cst = TREE_OPERAND (cst, 0);
1052 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1053 type = TREE_TYPE (base);
1057 || TREE_CODE (type) != RECORD_TYPE)
1060 /* Find the sub-object the constant actually refers to and mark whether it is
1061 an artificial one (as opposed to a user-defined one). */
1064 HOST_WIDE_INT pos, size;
1067 if (types_same_for_odr (type, expected_type))
1072 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1074 if (TREE_CODE (fld) != FIELD_DECL)
1077 pos = int_bit_position (fld);
1078 size = tree_low_cst (DECL_SIZE (fld), 1);
1079 if (pos <= offset && (pos + size) > offset)
1082 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1085 last_artificial = DECL_ARTIFICIAL (fld);
1086 type = TREE_TYPE (fld);
1089 /* Artificial sub-objects are ancestors, we do not want to use them for
1090 devirtualization, at least not here. */
1091 if (last_artificial)
1093 binfo = TYPE_BINFO (type);
1094 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1100 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1101 The statement may be replaced by another statement, e.g., if the call
1102 simplifies to a constant value. Return true if any changes were made.
1103 It is assumed that the operands have been previously folded. */
1106 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1108 gimple stmt = gsi_stmt (*gsi);
1110 bool changed = false;
1113 /* Fold *& in call arguments. */
1114 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1115 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1117 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1120 gimple_call_set_arg (stmt, i, tmp);
1125 /* Check for virtual calls that became direct calls. */
1126 callee = gimple_call_fn (stmt);
1127 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1129 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1131 if (dump_file && virtual_method_call_p (callee)
1132 && !possible_polymorphic_call_target_p
1133 (callee, cgraph_get_node (gimple_call_addr_fndecl
1134 (OBJ_TYPE_REF_EXPR (callee)))))
1137 "Type inheritnace inconsistent devirtualization of ");
1138 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1139 fprintf (dump_file, " to ");
1140 print_generic_expr (dump_file, callee, TDF_SLIM);
1141 fprintf (dump_file, "\n");
1144 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1147 else if (virtual_method_call_p (callee))
1149 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1150 tree binfo = gimple_extract_devirt_binfo_from_cst
1151 (obj, obj_type_ref_class (callee));
1155 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1156 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1159 #ifdef ENABLE_CHECKING
1160 gcc_assert (possible_polymorphic_call_target_p
1161 (callee, cgraph_get_node (fndecl)));
1164 gimple_call_set_fndecl (stmt, fndecl);
1174 /* Check for builtins that CCP can handle using information not
1175 available in the generic fold routines. */
1176 callee = gimple_call_fndecl (stmt);
1177 if (callee && DECL_BUILT_IN (callee))
1179 tree result = gimple_fold_builtin (stmt);
1182 if (!update_call_from_tree (gsi, result))
1183 gimplify_and_update_call_from_tree (gsi, result);
1186 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1187 changed |= targetm.gimple_fold_builtin (gsi);
1193 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1194 distinguishes both cases. */
1197 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1199 bool changed = false;
1200 gimple stmt = gsi_stmt (*gsi);
1203 /* Fold the main computation performed by the statement. */
1204 switch (gimple_code (stmt))
1208 unsigned old_num_ops = gimple_num_ops (stmt);
1209 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1210 tree lhs = gimple_assign_lhs (stmt);
1212 /* First canonicalize operand order. This avoids building new
1213 trees if this is the only thing fold would later do. */
1214 if ((commutative_tree_code (subcode)
1215 || commutative_ternary_tree_code (subcode))
1216 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1217 gimple_assign_rhs2 (stmt), false))
1219 tree tem = gimple_assign_rhs1 (stmt);
1220 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1221 gimple_assign_set_rhs2 (stmt, tem);
1224 new_rhs = fold_gimple_assign (gsi);
1226 && !useless_type_conversion_p (TREE_TYPE (lhs),
1227 TREE_TYPE (new_rhs)))
1228 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1231 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1233 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1240 changed |= fold_gimple_cond (stmt);
1244 changed |= gimple_fold_call (gsi, inplace);
1248 /* Fold *& in asm operands. */
1251 const char **oconstraints;
1252 const char *constraint;
1253 bool allows_mem, allows_reg;
1255 noutputs = gimple_asm_noutputs (stmt);
1256 oconstraints = XALLOCAVEC (const char *, noutputs);
1258 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1260 tree link = gimple_asm_output_op (stmt, i);
1261 tree op = TREE_VALUE (link);
1263 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1264 if (REFERENCE_CLASS_P (op)
1265 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1267 TREE_VALUE (link) = op;
1271 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1273 tree link = gimple_asm_input_op (stmt, i);
1274 tree op = TREE_VALUE (link);
1276 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1277 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1278 oconstraints, &allows_mem, &allows_reg);
1279 if (REFERENCE_CLASS_P (op)
1280 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1283 TREE_VALUE (link) = op;
1291 if (gimple_debug_bind_p (stmt))
1293 tree val = gimple_debug_bind_get_value (stmt);
1295 && REFERENCE_CLASS_P (val))
1297 tree tem = maybe_fold_reference (val, false);
1300 gimple_debug_bind_set_value (stmt, tem);
1305 && TREE_CODE (val) == ADDR_EXPR)
1307 tree ref = TREE_OPERAND (val, 0);
1308 tree tem = maybe_fold_reference (ref, false);
1311 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1312 gimple_debug_bind_set_value (stmt, tem);
1322 stmt = gsi_stmt (*gsi);
1324 /* Fold *& on the lhs. */
1325 if (gimple_has_lhs (stmt))
1327 tree lhs = gimple_get_lhs (stmt);
1328 if (lhs && REFERENCE_CLASS_P (lhs))
1330 tree new_lhs = maybe_fold_reference (lhs, true);
1333 gimple_set_lhs (stmt, new_lhs);
1342 /* Fold the statement pointed to by GSI. In some cases, this function may
1343 replace the whole statement with a new one. Returns true iff folding
1345 The statement pointed to by GSI should be in valid gimple form but may
1346 be in unfolded state as resulting from for example constant propagation
1347 which can produce *&x = 0. */
1350 fold_stmt (gimple_stmt_iterator *gsi)
1352 return fold_stmt_1 (gsi, false);
1355 /* Perform the minimal folding on statement *GSI. Only operations like
1356 *&x created by constant propagation are handled. The statement cannot
1357 be replaced with a new one. Return true if the statement was
1358 changed, false otherwise.
1359 The statement *GSI should be in valid gimple form but may
1360 be in unfolded state as resulting from for example constant propagation
1361 which can produce *&x = 0. */
1364 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1366 gimple stmt = gsi_stmt (*gsi);
1367 bool changed = fold_stmt_1 (gsi, true);
1368 gcc_assert (gsi_stmt (*gsi) == stmt);
1372 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1373 if EXPR is null or we don't know how.
1374 If non-null, the result always has boolean type. */
1377 canonicalize_bool (tree expr, bool invert)
1383 if (integer_nonzerop (expr))
1384 return boolean_false_node;
1385 else if (integer_zerop (expr))
1386 return boolean_true_node;
1387 else if (TREE_CODE (expr) == SSA_NAME)
1388 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1389 build_int_cst (TREE_TYPE (expr), 0));
1390 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1391 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1393 TREE_OPERAND (expr, 0),
1394 TREE_OPERAND (expr, 1));
1400 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1402 if (integer_nonzerop (expr))
1403 return boolean_true_node;
1404 else if (integer_zerop (expr))
1405 return boolean_false_node;
1406 else if (TREE_CODE (expr) == SSA_NAME)
1407 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1408 build_int_cst (TREE_TYPE (expr), 0));
1409 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1410 return fold_build2 (TREE_CODE (expr),
1412 TREE_OPERAND (expr, 0),
1413 TREE_OPERAND (expr, 1));
1419 /* Check to see if a boolean expression EXPR is logically equivalent to the
1420 comparison (OP1 CODE OP2). Check for various identities involving
1424 same_bool_comparison_p (const_tree expr, enum tree_code code,
1425 const_tree op1, const_tree op2)
1429 /* The obvious case. */
1430 if (TREE_CODE (expr) == code
1431 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1432 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1435 /* Check for comparing (name, name != 0) and the case where expr
1436 is an SSA_NAME with a definition matching the comparison. */
1437 if (TREE_CODE (expr) == SSA_NAME
1438 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1440 if (operand_equal_p (expr, op1, 0))
1441 return ((code == NE_EXPR && integer_zerop (op2))
1442 || (code == EQ_EXPR && integer_nonzerop (op2)));
1443 s = SSA_NAME_DEF_STMT (expr);
1444 if (is_gimple_assign (s)
1445 && gimple_assign_rhs_code (s) == code
1446 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1447 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1451 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1452 of name is a comparison, recurse. */
1453 if (TREE_CODE (op1) == SSA_NAME
1454 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1456 s = SSA_NAME_DEF_STMT (op1);
1457 if (is_gimple_assign (s)
1458 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1460 enum tree_code c = gimple_assign_rhs_code (s);
1461 if ((c == NE_EXPR && integer_zerop (op2))
1462 || (c == EQ_EXPR && integer_nonzerop (op2)))
1463 return same_bool_comparison_p (expr, c,
1464 gimple_assign_rhs1 (s),
1465 gimple_assign_rhs2 (s));
1466 if ((c == EQ_EXPR && integer_zerop (op2))
1467 || (c == NE_EXPR && integer_nonzerop (op2)))
1468 return same_bool_comparison_p (expr,
1469 invert_tree_comparison (c, false),
1470 gimple_assign_rhs1 (s),
1471 gimple_assign_rhs2 (s));
1477 /* Check to see if two boolean expressions OP1 and OP2 are logically
1481 same_bool_result_p (const_tree op1, const_tree op2)
1483 /* Simple cases first. */
1484 if (operand_equal_p (op1, op2, 0))
1487 /* Check the cases where at least one of the operands is a comparison.
1488 These are a bit smarter than operand_equal_p in that they apply some
1489 identifies on SSA_NAMEs. */
1490 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1491 && same_bool_comparison_p (op1, TREE_CODE (op2),
1492 TREE_OPERAND (op2, 0),
1493 TREE_OPERAND (op2, 1)))
1495 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1496 && same_bool_comparison_p (op2, TREE_CODE (op1),
1497 TREE_OPERAND (op1, 0),
1498 TREE_OPERAND (op1, 1)))
1505 /* Forward declarations for some mutually recursive functions. */
1508 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1509 enum tree_code code2, tree op2a, tree op2b);
1511 and_var_with_comparison (tree var, bool invert,
1512 enum tree_code code2, tree op2a, tree op2b);
1514 and_var_with_comparison_1 (gimple stmt,
1515 enum tree_code code2, tree op2a, tree op2b);
1517 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1518 enum tree_code code2, tree op2a, tree op2b);
1520 or_var_with_comparison (tree var, bool invert,
1521 enum tree_code code2, tree op2a, tree op2b);
1523 or_var_with_comparison_1 (gimple stmt,
1524 enum tree_code code2, tree op2a, tree op2b);
1526 /* Helper function for and_comparisons_1: try to simplify the AND of the
1527 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1528 If INVERT is true, invert the value of the VAR before doing the AND.
1529 Return NULL_EXPR if we can't simplify this to a single expression. */
1532 and_var_with_comparison (tree var, bool invert,
1533 enum tree_code code2, tree op2a, tree op2b)
1536 gimple stmt = SSA_NAME_DEF_STMT (var);
1538 /* We can only deal with variables whose definitions are assignments. */
1539 if (!is_gimple_assign (stmt))
1542 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1543 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1544 Then we only have to consider the simpler non-inverted cases. */
1546 t = or_var_with_comparison_1 (stmt,
1547 invert_tree_comparison (code2, false),
1550 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1551 return canonicalize_bool (t, invert);
1554 /* Try to simplify the AND of the ssa variable defined by the assignment
1555 STMT with the comparison specified by (OP2A CODE2 OP2B).
1556 Return NULL_EXPR if we can't simplify this to a single expression. */
1559 and_var_with_comparison_1 (gimple stmt,
1560 enum tree_code code2, tree op2a, tree op2b)
1562 tree var = gimple_assign_lhs (stmt);
1563 tree true_test_var = NULL_TREE;
1564 tree false_test_var = NULL_TREE;
1565 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1567 /* Check for identities like (var AND (var == 0)) => false. */
1568 if (TREE_CODE (op2a) == SSA_NAME
1569 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1571 if ((code2 == NE_EXPR && integer_zerop (op2b))
1572 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1574 true_test_var = op2a;
1575 if (var == true_test_var)
1578 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1579 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1581 false_test_var = op2a;
1582 if (var == false_test_var)
1583 return boolean_false_node;
1587 /* If the definition is a comparison, recurse on it. */
1588 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1590 tree t = and_comparisons_1 (innercode,
1591 gimple_assign_rhs1 (stmt),
1592 gimple_assign_rhs2 (stmt),
1600 /* If the definition is an AND or OR expression, we may be able to
1601 simplify by reassociating. */
1602 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1603 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1605 tree inner1 = gimple_assign_rhs1 (stmt);
1606 tree inner2 = gimple_assign_rhs2 (stmt);
1609 tree partial = NULL_TREE;
1610 bool is_and = (innercode == BIT_AND_EXPR);
1612 /* Check for boolean identities that don't require recursive examination
1614 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1615 inner1 AND (inner1 OR inner2) => inner1
1616 !inner1 AND (inner1 AND inner2) => false
1617 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1618 Likewise for similar cases involving inner2. */
1619 if (inner1 == true_test_var)
1620 return (is_and ? var : inner1);
1621 else if (inner2 == true_test_var)
1622 return (is_and ? var : inner2);
1623 else if (inner1 == false_test_var)
1625 ? boolean_false_node
1626 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1627 else if (inner2 == false_test_var)
1629 ? boolean_false_node
1630 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1632 /* Next, redistribute/reassociate the AND across the inner tests.
1633 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1634 if (TREE_CODE (inner1) == SSA_NAME
1635 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1636 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1637 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1638 gimple_assign_rhs1 (s),
1639 gimple_assign_rhs2 (s),
1640 code2, op2a, op2b)))
1642 /* Handle the AND case, where we are reassociating:
1643 (inner1 AND inner2) AND (op2a code2 op2b)
1645 If the partial result t is a constant, we win. Otherwise
1646 continue on to try reassociating with the other inner test. */
1649 if (integer_onep (t))
1651 else if (integer_zerop (t))
1652 return boolean_false_node;
1655 /* Handle the OR case, where we are redistributing:
1656 (inner1 OR inner2) AND (op2a code2 op2b)
1657 => (t OR (inner2 AND (op2a code2 op2b))) */
1658 else if (integer_onep (t))
1659 return boolean_true_node;
1661 /* Save partial result for later. */
1665 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1666 if (TREE_CODE (inner2) == SSA_NAME
1667 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1668 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1669 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1670 gimple_assign_rhs1 (s),
1671 gimple_assign_rhs2 (s),
1672 code2, op2a, op2b)))
1674 /* Handle the AND case, where we are reassociating:
1675 (inner1 AND inner2) AND (op2a code2 op2b)
1676 => (inner1 AND t) */
1679 if (integer_onep (t))
1681 else if (integer_zerop (t))
1682 return boolean_false_node;
1683 /* If both are the same, we can apply the identity
1685 else if (partial && same_bool_result_p (t, partial))
1689 /* Handle the OR case. where we are redistributing:
1690 (inner1 OR inner2) AND (op2a code2 op2b)
1691 => (t OR (inner1 AND (op2a code2 op2b)))
1692 => (t OR partial) */
1695 if (integer_onep (t))
1696 return boolean_true_node;
1699 /* We already got a simplification for the other
1700 operand to the redistributed OR expression. The
1701 interesting case is when at least one is false.
1702 Or, if both are the same, we can apply the identity
1704 if (integer_zerop (partial))
1706 else if (integer_zerop (t))
1708 else if (same_bool_result_p (t, partial))
1717 /* Try to simplify the AND of two comparisons defined by
1718 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1719 If this can be done without constructing an intermediate value,
1720 return the resulting tree; otherwise NULL_TREE is returned.
1721 This function is deliberately asymmetric as it recurses on SSA_DEFs
1722 in the first comparison but not the second. */
1725 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1726 enum tree_code code2, tree op2a, tree op2b)
1728 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1730 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1731 if (operand_equal_p (op1a, op2a, 0)
1732 && operand_equal_p (op1b, op2b, 0))
1734 /* Result will be either NULL_TREE, or a combined comparison. */
1735 tree t = combine_comparisons (UNKNOWN_LOCATION,
1736 TRUTH_ANDIF_EXPR, code1, code2,
1737 truth_type, op1a, op1b);
1742 /* Likewise the swapped case of the above. */
1743 if (operand_equal_p (op1a, op2b, 0)
1744 && operand_equal_p (op1b, op2a, 0))
1746 /* Result will be either NULL_TREE, or a combined comparison. */
1747 tree t = combine_comparisons (UNKNOWN_LOCATION,
1748 TRUTH_ANDIF_EXPR, code1,
1749 swap_tree_comparison (code2),
1750 truth_type, op1a, op1b);
1755 /* If both comparisons are of the same value against constants, we might
1756 be able to merge them. */
1757 if (operand_equal_p (op1a, op2a, 0)
1758 && TREE_CODE (op1b) == INTEGER_CST
1759 && TREE_CODE (op2b) == INTEGER_CST)
1761 int cmp = tree_int_cst_compare (op1b, op2b);
1763 /* If we have (op1a == op1b), we should either be able to
1764 return that or FALSE, depending on whether the constant op1b
1765 also satisfies the other comparison against op2b. */
1766 if (code1 == EQ_EXPR)
1772 case EQ_EXPR: val = (cmp == 0); break;
1773 case NE_EXPR: val = (cmp != 0); break;
1774 case LT_EXPR: val = (cmp < 0); break;
1775 case GT_EXPR: val = (cmp > 0); break;
1776 case LE_EXPR: val = (cmp <= 0); break;
1777 case GE_EXPR: val = (cmp >= 0); break;
1778 default: done = false;
1783 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1785 return boolean_false_node;
1788 /* Likewise if the second comparison is an == comparison. */
1789 else if (code2 == EQ_EXPR)
1795 case EQ_EXPR: val = (cmp == 0); break;
1796 case NE_EXPR: val = (cmp != 0); break;
1797 case LT_EXPR: val = (cmp > 0); break;
1798 case GT_EXPR: val = (cmp < 0); break;
1799 case LE_EXPR: val = (cmp >= 0); break;
1800 case GE_EXPR: val = (cmp <= 0); break;
1801 default: done = false;
1806 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1808 return boolean_false_node;
1812 /* Same business with inequality tests. */
1813 else if (code1 == NE_EXPR)
1818 case EQ_EXPR: val = (cmp != 0); break;
1819 case NE_EXPR: val = (cmp == 0); break;
1820 case LT_EXPR: val = (cmp >= 0); break;
1821 case GT_EXPR: val = (cmp <= 0); break;
1822 case LE_EXPR: val = (cmp > 0); break;
1823 case GE_EXPR: val = (cmp < 0); break;
1828 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1830 else if (code2 == NE_EXPR)
1835 case EQ_EXPR: val = (cmp == 0); break;
1836 case NE_EXPR: val = (cmp != 0); break;
1837 case LT_EXPR: val = (cmp <= 0); break;
1838 case GT_EXPR: val = (cmp >= 0); break;
1839 case LE_EXPR: val = (cmp < 0); break;
1840 case GE_EXPR: val = (cmp > 0); break;
1845 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1848 /* Chose the more restrictive of two < or <= comparisons. */
1849 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1850 && (code2 == LT_EXPR || code2 == LE_EXPR))
1852 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1853 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1855 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1858 /* Likewise chose the more restrictive of two > or >= comparisons. */
1859 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1860 && (code2 == GT_EXPR || code2 == GE_EXPR))
1862 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1863 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1865 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1868 /* Check for singleton ranges. */
1870 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1871 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1872 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1874 /* Check for disjoint ranges. */
1876 && (code1 == LT_EXPR || code1 == LE_EXPR)
1877 && (code2 == GT_EXPR || code2 == GE_EXPR))
1878 return boolean_false_node;
1880 && (code1 == GT_EXPR || code1 == GE_EXPR)
1881 && (code2 == LT_EXPR || code2 == LE_EXPR))
1882 return boolean_false_node;
1885 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1886 NAME's definition is a truth value. See if there are any simplifications
1887 that can be done against the NAME's definition. */
1888 if (TREE_CODE (op1a) == SSA_NAME
1889 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1890 && (integer_zerop (op1b) || integer_onep (op1b)))
1892 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1893 || (code1 == NE_EXPR && integer_onep (op1b)));
1894 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1895 switch (gimple_code (stmt))
1898 /* Try to simplify by copy-propagating the definition. */
1899 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1902 /* If every argument to the PHI produces the same result when
1903 ANDed with the second comparison, we win.
1904 Do not do this unless the type is bool since we need a bool
1905 result here anyway. */
1906 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1908 tree result = NULL_TREE;
1910 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1912 tree arg = gimple_phi_arg_def (stmt, i);
1914 /* If this PHI has itself as an argument, ignore it.
1915 If all the other args produce the same result,
1917 if (arg == gimple_phi_result (stmt))
1919 else if (TREE_CODE (arg) == INTEGER_CST)
1921 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1924 result = boolean_false_node;
1925 else if (!integer_zerop (result))
1929 result = fold_build2 (code2, boolean_type_node,
1931 else if (!same_bool_comparison_p (result,
1935 else if (TREE_CODE (arg) == SSA_NAME
1936 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1939 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1940 /* In simple cases we can look through PHI nodes,
1941 but we have to be careful with loops.
1943 if (! dom_info_available_p (CDI_DOMINATORS)
1944 || gimple_bb (def_stmt) == gimple_bb (stmt)
1945 || dominated_by_p (CDI_DOMINATORS,
1946 gimple_bb (def_stmt),
1949 temp = and_var_with_comparison (arg, invert, code2,
1955 else if (!same_bool_result_p (result, temp))
1971 /* Try to simplify the AND of two comparisons, specified by
1972 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1973 If this can be simplified to a single expression (without requiring
1974 introducing more SSA variables to hold intermediate values),
1975 return the resulting tree. Otherwise return NULL_TREE.
1976 If the result expression is non-null, it has boolean type. */
1979 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1980 enum tree_code code2, tree op2a, tree op2b)
1982 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1986 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1989 /* Helper function for or_comparisons_1: try to simplify the OR of the
1990 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1991 If INVERT is true, invert the value of VAR before doing the OR.
1992 Return NULL_EXPR if we can't simplify this to a single expression. */
1995 or_var_with_comparison (tree var, bool invert,
1996 enum tree_code code2, tree op2a, tree op2b)
1999 gimple stmt = SSA_NAME_DEF_STMT (var);
2001 /* We can only deal with variables whose definitions are assignments. */
2002 if (!is_gimple_assign (stmt))
2005 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2006 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2007 Then we only have to consider the simpler non-inverted cases. */
2009 t = and_var_with_comparison_1 (stmt,
2010 invert_tree_comparison (code2, false),
2013 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2014 return canonicalize_bool (t, invert);
2017 /* Try to simplify the OR of the ssa variable defined by the assignment
2018 STMT with the comparison specified by (OP2A CODE2 OP2B).
2019 Return NULL_EXPR if we can't simplify this to a single expression. */
2022 or_var_with_comparison_1 (gimple stmt,
2023 enum tree_code code2, tree op2a, tree op2b)
2025 tree var = gimple_assign_lhs (stmt);
2026 tree true_test_var = NULL_TREE;
2027 tree false_test_var = NULL_TREE;
2028 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2030 /* Check for identities like (var OR (var != 0)) => true . */
2031 if (TREE_CODE (op2a) == SSA_NAME
2032 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2034 if ((code2 == NE_EXPR && integer_zerop (op2b))
2035 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2037 true_test_var = op2a;
2038 if (var == true_test_var)
2041 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2042 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2044 false_test_var = op2a;
2045 if (var == false_test_var)
2046 return boolean_true_node;
2050 /* If the definition is a comparison, recurse on it. */
2051 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2053 tree t = or_comparisons_1 (innercode,
2054 gimple_assign_rhs1 (stmt),
2055 gimple_assign_rhs2 (stmt),
2063 /* If the definition is an AND or OR expression, we may be able to
2064 simplify by reassociating. */
2065 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2066 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2068 tree inner1 = gimple_assign_rhs1 (stmt);
2069 tree inner2 = gimple_assign_rhs2 (stmt);
2072 tree partial = NULL_TREE;
2073 bool is_or = (innercode == BIT_IOR_EXPR);
2075 /* Check for boolean identities that don't require recursive examination
2077 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2078 inner1 OR (inner1 AND inner2) => inner1
2079 !inner1 OR (inner1 OR inner2) => true
2080 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2082 if (inner1 == true_test_var)
2083 return (is_or ? var : inner1);
2084 else if (inner2 == true_test_var)
2085 return (is_or ? var : inner2);
2086 else if (inner1 == false_test_var)
2089 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2090 else if (inner2 == false_test_var)
2093 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2095 /* Next, redistribute/reassociate the OR across the inner tests.
2096 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2097 if (TREE_CODE (inner1) == SSA_NAME
2098 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2099 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2100 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2101 gimple_assign_rhs1 (s),
2102 gimple_assign_rhs2 (s),
2103 code2, op2a, op2b)))
2105 /* Handle the OR case, where we are reassociating:
2106 (inner1 OR inner2) OR (op2a code2 op2b)
2108 If the partial result t is a constant, we win. Otherwise
2109 continue on to try reassociating with the other inner test. */
2112 if (integer_onep (t))
2113 return boolean_true_node;
2114 else if (integer_zerop (t))
2118 /* Handle the AND case, where we are redistributing:
2119 (inner1 AND inner2) OR (op2a code2 op2b)
2120 => (t AND (inner2 OR (op2a code op2b))) */
2121 else if (integer_zerop (t))
2122 return boolean_false_node;
2124 /* Save partial result for later. */
2128 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2129 if (TREE_CODE (inner2) == SSA_NAME
2130 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2131 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2132 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2133 gimple_assign_rhs1 (s),
2134 gimple_assign_rhs2 (s),
2135 code2, op2a, op2b)))
2137 /* Handle the OR case, where we are reassociating:
2138 (inner1 OR inner2) OR (op2a code2 op2b)
2140 => (t OR partial) */
2143 if (integer_zerop (t))
2145 else if (integer_onep (t))
2146 return boolean_true_node;
2147 /* If both are the same, we can apply the identity
2149 else if (partial && same_bool_result_p (t, partial))
2153 /* Handle the AND case, where we are redistributing:
2154 (inner1 AND inner2) OR (op2a code2 op2b)
2155 => (t AND (inner1 OR (op2a code2 op2b)))
2156 => (t AND partial) */
2159 if (integer_zerop (t))
2160 return boolean_false_node;
2163 /* We already got a simplification for the other
2164 operand to the redistributed AND expression. The
2165 interesting case is when at least one is true.
2166 Or, if both are the same, we can apply the identity
2168 if (integer_onep (partial))
2170 else if (integer_onep (t))
2172 else if (same_bool_result_p (t, partial))
2181 /* Try to simplify the OR of two comparisons defined by
2182 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2183 If this can be done without constructing an intermediate value,
2184 return the resulting tree; otherwise NULL_TREE is returned.
2185 This function is deliberately asymmetric as it recurses on SSA_DEFs
2186 in the first comparison but not the second. */
2189 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2190 enum tree_code code2, tree op2a, tree op2b)
2192 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2194 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2195 if (operand_equal_p (op1a, op2a, 0)
2196 && operand_equal_p (op1b, op2b, 0))
2198 /* Result will be either NULL_TREE, or a combined comparison. */
2199 tree t = combine_comparisons (UNKNOWN_LOCATION,
2200 TRUTH_ORIF_EXPR, code1, code2,
2201 truth_type, op1a, op1b);
2206 /* Likewise the swapped case of the above. */
2207 if (operand_equal_p (op1a, op2b, 0)
2208 && operand_equal_p (op1b, op2a, 0))
2210 /* Result will be either NULL_TREE, or a combined comparison. */
2211 tree t = combine_comparisons (UNKNOWN_LOCATION,
2212 TRUTH_ORIF_EXPR, code1,
2213 swap_tree_comparison (code2),
2214 truth_type, op1a, op1b);
2219 /* If both comparisons are of the same value against constants, we might
2220 be able to merge them. */
2221 if (operand_equal_p (op1a, op2a, 0)
2222 && TREE_CODE (op1b) == INTEGER_CST
2223 && TREE_CODE (op2b) == INTEGER_CST)
2225 int cmp = tree_int_cst_compare (op1b, op2b);
2227 /* If we have (op1a != op1b), we should either be able to
2228 return that or TRUE, depending on whether the constant op1b
2229 also satisfies the other comparison against op2b. */
2230 if (code1 == NE_EXPR)
2236 case EQ_EXPR: val = (cmp == 0); break;
2237 case NE_EXPR: val = (cmp != 0); break;
2238 case LT_EXPR: val = (cmp < 0); break;
2239 case GT_EXPR: val = (cmp > 0); break;
2240 case LE_EXPR: val = (cmp <= 0); break;
2241 case GE_EXPR: val = (cmp >= 0); break;
2242 default: done = false;
2247 return boolean_true_node;
2249 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2252 /* Likewise if the second comparison is a != comparison. */
2253 else if (code2 == NE_EXPR)
2259 case EQ_EXPR: val = (cmp == 0); break;
2260 case NE_EXPR: val = (cmp != 0); break;
2261 case LT_EXPR: val = (cmp > 0); break;
2262 case GT_EXPR: val = (cmp < 0); break;
2263 case LE_EXPR: val = (cmp >= 0); break;
2264 case GE_EXPR: val = (cmp <= 0); break;
2265 default: done = false;
2270 return boolean_true_node;
2272 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2276 /* See if an equality test is redundant with the other comparison. */
2277 else if (code1 == EQ_EXPR)
2282 case EQ_EXPR: val = (cmp == 0); break;
2283 case NE_EXPR: val = (cmp != 0); break;
2284 case LT_EXPR: val = (cmp < 0); break;
2285 case GT_EXPR: val = (cmp > 0); break;
2286 case LE_EXPR: val = (cmp <= 0); break;
2287 case GE_EXPR: val = (cmp >= 0); break;
2292 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2294 else if (code2 == EQ_EXPR)
2299 case EQ_EXPR: val = (cmp == 0); break;
2300 case NE_EXPR: val = (cmp != 0); break;
2301 case LT_EXPR: val = (cmp > 0); break;
2302 case GT_EXPR: val = (cmp < 0); break;
2303 case LE_EXPR: val = (cmp >= 0); break;
2304 case GE_EXPR: val = (cmp <= 0); break;
2309 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2312 /* Chose the less restrictive of two < or <= comparisons. */
2313 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2314 && (code2 == LT_EXPR || code2 == LE_EXPR))
2316 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2317 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2319 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2322 /* Likewise chose the less restrictive of two > or >= comparisons. */
2323 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2324 && (code2 == GT_EXPR || code2 == GE_EXPR))
2326 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2327 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2329 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2332 /* Check for singleton ranges. */
2334 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2335 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2336 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2338 /* Check for less/greater pairs that don't restrict the range at all. */
2340 && (code1 == LT_EXPR || code1 == LE_EXPR)
2341 && (code2 == GT_EXPR || code2 == GE_EXPR))
2342 return boolean_true_node;
2344 && (code1 == GT_EXPR || code1 == GE_EXPR)
2345 && (code2 == LT_EXPR || code2 == LE_EXPR))
2346 return boolean_true_node;
2349 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2350 NAME's definition is a truth value. See if there are any simplifications
2351 that can be done against the NAME's definition. */
2352 if (TREE_CODE (op1a) == SSA_NAME
2353 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2354 && (integer_zerop (op1b) || integer_onep (op1b)))
2356 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2357 || (code1 == NE_EXPR && integer_onep (op1b)));
2358 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2359 switch (gimple_code (stmt))
2362 /* Try to simplify by copy-propagating the definition. */
2363 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2366 /* If every argument to the PHI produces the same result when
2367 ORed with the second comparison, we win.
2368 Do not do this unless the type is bool since we need a bool
2369 result here anyway. */
2370 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2372 tree result = NULL_TREE;
2374 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2376 tree arg = gimple_phi_arg_def (stmt, i);
2378 /* If this PHI has itself as an argument, ignore it.
2379 If all the other args produce the same result,
2381 if (arg == gimple_phi_result (stmt))
2383 else if (TREE_CODE (arg) == INTEGER_CST)
2385 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2388 result = boolean_true_node;
2389 else if (!integer_onep (result))
2393 result = fold_build2 (code2, boolean_type_node,
2395 else if (!same_bool_comparison_p (result,
2399 else if (TREE_CODE (arg) == SSA_NAME
2400 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2403 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2404 /* In simple cases we can look through PHI nodes,
2405 but we have to be careful with loops.
2407 if (! dom_info_available_p (CDI_DOMINATORS)
2408 || gimple_bb (def_stmt) == gimple_bb (stmt)
2409 || dominated_by_p (CDI_DOMINATORS,
2410 gimple_bb (def_stmt),
2413 temp = or_var_with_comparison (arg, invert, code2,
2419 else if (!same_bool_result_p (result, temp))
2435 /* Try to simplify the OR of two comparisons, specified by
2436 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2437 If this can be simplified to a single expression (without requiring
2438 introducing more SSA variables to hold intermediate values),
2439 return the resulting tree. Otherwise return NULL_TREE.
2440 If the result expression is non-null, it has boolean type. */
2443 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2444 enum tree_code code2, tree op2a, tree op2b)
2446 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2450 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2454 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2456 Either NULL_TREE, a simplified but non-constant or a constant
2459 ??? This should go into a gimple-fold-inline.h file to be eventually
2460 privatized with the single valueize function used in the various TUs
2461 to avoid the indirect function call overhead. */
2464 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2466 location_t loc = gimple_location (stmt);
2467 switch (gimple_code (stmt))
2471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2473 switch (get_gimple_rhs_class (subcode))
2475 case GIMPLE_SINGLE_RHS:
2477 tree rhs = gimple_assign_rhs1 (stmt);
2478 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2480 if (TREE_CODE (rhs) == SSA_NAME)
2482 /* If the RHS is an SSA_NAME, return its known constant value,
2484 return (*valueize) (rhs);
2486 /* Handle propagating invariant addresses into address
2488 else if (TREE_CODE (rhs) == ADDR_EXPR
2489 && !is_gimple_min_invariant (rhs))
2491 HOST_WIDE_INT offset = 0;
2493 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2497 && (CONSTANT_CLASS_P (base)
2498 || decl_address_invariant_p (base)))
2499 return build_invariant_address (TREE_TYPE (rhs),
2502 else if (TREE_CODE (rhs) == CONSTRUCTOR
2503 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2504 && (CONSTRUCTOR_NELTS (rhs)
2505 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2510 vec = XALLOCAVEC (tree,
2511 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2512 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2514 val = (*valueize) (val);
2515 if (TREE_CODE (val) == INTEGER_CST
2516 || TREE_CODE (val) == REAL_CST
2517 || TREE_CODE (val) == FIXED_CST)
2523 return build_vector (TREE_TYPE (rhs), vec);
2526 if (kind == tcc_reference)
2528 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2529 || TREE_CODE (rhs) == REALPART_EXPR
2530 || TREE_CODE (rhs) == IMAGPART_EXPR)
2531 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2533 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2534 return fold_unary_loc (EXPR_LOCATION (rhs),
2536 TREE_TYPE (rhs), val);
2538 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2539 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2541 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2542 return fold_ternary_loc (EXPR_LOCATION (rhs),
2544 TREE_TYPE (rhs), val,
2545 TREE_OPERAND (rhs, 1),
2546 TREE_OPERAND (rhs, 2));
2548 else if (TREE_CODE (rhs) == MEM_REF
2549 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2551 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2552 if (TREE_CODE (val) == ADDR_EXPR
2553 && is_gimple_min_invariant (val))
2555 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2557 TREE_OPERAND (rhs, 1));
2562 return fold_const_aggregate_ref_1 (rhs, valueize);
2564 else if (kind == tcc_declaration)
2565 return get_symbol_constant_value (rhs);
2569 case GIMPLE_UNARY_RHS:
2571 /* Handle unary operators that can appear in GIMPLE form.
2572 Note that we know the single operand must be a constant,
2573 so this should almost always return a simplified RHS. */
2574 tree lhs = gimple_assign_lhs (stmt);
2575 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2577 /* Conversions are useless for CCP purposes if they are
2578 value-preserving. Thus the restrictions that
2579 useless_type_conversion_p places for restrict qualification
2580 of pointer types should not apply here.
2581 Substitution later will only substitute to allowed places. */
2582 if (CONVERT_EXPR_CODE_P (subcode)
2583 && POINTER_TYPE_P (TREE_TYPE (lhs))
2584 && POINTER_TYPE_P (TREE_TYPE (op0))
2585 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2586 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2587 && TYPE_MODE (TREE_TYPE (lhs))
2588 == TYPE_MODE (TREE_TYPE (op0)))
2592 fold_unary_ignore_overflow_loc (loc, subcode,
2593 gimple_expr_type (stmt), op0);
2596 case GIMPLE_BINARY_RHS:
2598 /* Handle binary operators that can appear in GIMPLE form. */
2599 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2600 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2602 /* Translate &x + CST into an invariant form suitable for
2603 further propagation. */
2604 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2605 && TREE_CODE (op0) == ADDR_EXPR
2606 && TREE_CODE (op1) == INTEGER_CST)
2608 tree off = fold_convert (ptr_type_node, op1);
2609 return build_fold_addr_expr_loc
2611 fold_build2 (MEM_REF,
2612 TREE_TYPE (TREE_TYPE (op0)),
2613 unshare_expr (op0), off));
2616 return fold_binary_loc (loc, subcode,
2617 gimple_expr_type (stmt), op0, op1);
2620 case GIMPLE_TERNARY_RHS:
2622 /* Handle ternary operators that can appear in GIMPLE form. */
2623 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2624 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2625 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2627 /* Fold embedded expressions in ternary codes. */
2628 if ((subcode == COND_EXPR
2629 || subcode == VEC_COND_EXPR)
2630 && COMPARISON_CLASS_P (op0))
2632 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2633 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2634 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2635 TREE_TYPE (op0), op00, op01);
2640 return fold_ternary_loc (loc, subcode,
2641 gimple_expr_type (stmt), op0, op1, op2);
2653 if (gimple_call_internal_p (stmt))
2654 /* No folding yet for these functions. */
2657 fn = (*valueize) (gimple_call_fn (stmt));
2658 if (TREE_CODE (fn) == ADDR_EXPR
2659 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2660 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2662 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2665 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2666 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2667 call = build_call_array_loc (loc,
2668 gimple_call_return_type (stmt),
2669 fn, gimple_call_num_args (stmt), args);
2670 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2672 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2673 STRIP_NOPS (retval);
2684 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2685 Returns NULL_TREE if folding to a constant is not possible, otherwise
2686 returns a constant according to is_gimple_min_invariant. */
2689 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2691 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2692 if (res && is_gimple_min_invariant (res))
2698 /* The following set of functions are supposed to fold references using
2699 their constant initializers. */
2701 static tree fold_ctor_reference (tree type, tree ctor,
2702 unsigned HOST_WIDE_INT offset,
2703 unsigned HOST_WIDE_INT size, tree);
2705 /* See if we can find constructor defining value of BASE.
2706 When we know the consructor with constant offset (such as
2707 base is array[40] and we do know constructor of array), then
2708 BIT_OFFSET is adjusted accordingly.
2710 As a special case, return error_mark_node when constructor
2711 is not explicitly available, but it is known to be zero
2712 such as 'static const int a;'. */
2714 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2715 tree (*valueize)(tree))
2717 HOST_WIDE_INT bit_offset2, size, max_size;
2718 if (TREE_CODE (base) == MEM_REF)
2720 if (!integer_zerop (TREE_OPERAND (base, 1)))
2722 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2724 *bit_offset += (mem_ref_offset (base).low
2729 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2730 base = valueize (TREE_OPERAND (base, 0));
2731 if (!base || TREE_CODE (base) != ADDR_EXPR)
2733 base = TREE_OPERAND (base, 0);
2736 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2737 DECL_INITIAL. If BASE is a nested reference into another
2738 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2739 the inner reference. */
2740 switch (TREE_CODE (base))
2745 tree init = ctor_for_folding (base);
2747 /* Our semantic is exact opposite of ctor_for_folding;
2748 NULL means unknown, while error_mark_node is 0. */
2749 if (init == error_mark_node)
2752 return error_mark_node;
2758 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2759 if (max_size == -1 || size != max_size)
2761 *bit_offset += bit_offset2;
2762 return get_base_constructor (base, bit_offset, valueize);
2773 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2774 to the memory at bit OFFSET.
2776 We do only simple job of folding byte accesses. */
2779 fold_string_cst_ctor_reference (tree type, tree ctor,
2780 unsigned HOST_WIDE_INT offset,
2781 unsigned HOST_WIDE_INT size)
2783 if (INTEGRAL_TYPE_P (type)
2784 && (TYPE_MODE (type)
2785 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2786 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2788 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2789 && size == BITS_PER_UNIT
2790 && !(offset % BITS_PER_UNIT))
2792 offset /= BITS_PER_UNIT;
2793 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2794 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2797 const char a[20]="hello";
2800 might lead to offset greater than string length. In this case we
2801 know value is either initialized to 0 or out of bounds. Return 0
2803 return build_zero_cst (type);
2808 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2809 SIZE to the memory at bit OFFSET. */
2812 fold_array_ctor_reference (tree type, tree ctor,
2813 unsigned HOST_WIDE_INT offset,
2814 unsigned HOST_WIDE_INT size,
2817 unsigned HOST_WIDE_INT cnt;
2819 double_int low_bound, elt_size;
2820 double_int index, max_index;
2821 double_int access_index;
2822 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2823 HOST_WIDE_INT inner_offset;
2825 /* Compute low bound and elt size. */
2826 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2827 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2828 if (domain_type && TYPE_MIN_VALUE (domain_type))
2830 /* Static constructors for variably sized objects makes no sense. */
2831 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2832 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2833 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2836 low_bound = double_int_zero;
2837 /* Static constructors for variably sized objects makes no sense. */
2838 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2841 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2844 /* We can handle only constantly sized accesses that are known to not
2845 be larger than size of array element. */
2846 if (!TYPE_SIZE_UNIT (type)
2847 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2848 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2851 /* Compute the array index we look for. */
2852 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2853 .udiv (elt_size, TRUNC_DIV_EXPR);
2854 access_index += low_bound;
2856 access_index = access_index.ext (TYPE_PRECISION (index_type),
2857 TYPE_UNSIGNED (index_type));
2859 /* And offset within the access. */
2860 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2862 /* See if the array field is large enough to span whole access. We do not
2863 care to fold accesses spanning multiple array indexes. */
2864 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2867 index = low_bound - double_int_one;
2869 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2871 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2873 /* Array constructor might explicitely set index, or specify range
2874 or leave index NULL meaning that it is next index after previous
2878 if (TREE_CODE (cfield) == INTEGER_CST)
2879 max_index = index = tree_to_double_int (cfield);
2882 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2883 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2884 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2889 index += double_int_one;
2891 index = index.ext (TYPE_PRECISION (index_type),
2892 TYPE_UNSIGNED (index_type));
2896 /* Do we have match? */
2897 if (access_index.cmp (index, 1) >= 0
2898 && access_index.cmp (max_index, 1) <= 0)
2899 return fold_ctor_reference (type, cval, inner_offset, size,
2902 /* When memory is not explicitely mentioned in constructor,
2903 it is 0 (or out of range). */
2904 return build_zero_cst (type);
2907 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2908 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2911 fold_nonarray_ctor_reference (tree type, tree ctor,
2912 unsigned HOST_WIDE_INT offset,
2913 unsigned HOST_WIDE_INT size,
2916 unsigned HOST_WIDE_INT cnt;
2919 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2922 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2923 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2924 tree field_size = DECL_SIZE (cfield);
2925 double_int bitoffset;
2926 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2927 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2928 double_int bitoffset_end, access_end;
2930 /* Variable sized objects in static constructors makes no sense,
2931 but field_size can be NULL for flexible array members. */
2932 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2933 && TREE_CODE (byte_offset) == INTEGER_CST
2934 && (field_size != NULL_TREE
2935 ? TREE_CODE (field_size) == INTEGER_CST
2936 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2938 /* Compute bit offset of the field. */
2939 bitoffset = tree_to_double_int (field_offset)
2940 + byte_offset_cst * bits_per_unit_cst;
2941 /* Compute bit offset where the field ends. */
2942 if (field_size != NULL_TREE)
2943 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2945 bitoffset_end = double_int_zero;
2947 access_end = double_int::from_uhwi (offset)
2948 + double_int::from_uhwi (size);
2950 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2951 [BITOFFSET, BITOFFSET_END)? */
2952 if (access_end.cmp (bitoffset, 0) > 0
2953 && (field_size == NULL_TREE
2954 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2956 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2957 /* We do have overlap. Now see if field is large enough to
2958 cover the access. Give up for accesses spanning multiple
2960 if (access_end.cmp (bitoffset_end, 0) > 0)
2962 if (double_int::from_uhwi (offset).slt (bitoffset))
2964 return fold_ctor_reference (type, cval,
2965 inner_offset.to_uhwi (), size,
2969 /* When memory is not explicitely mentioned in constructor, it is 0. */
2970 return build_zero_cst (type);
2973 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2974 to the memory at bit OFFSET. */
2977 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2978 unsigned HOST_WIDE_INT size, tree from_decl)
2982 /* We found the field with exact match. */
2983 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2985 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2987 /* We are at the end of walk, see if we can view convert the
2989 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2990 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2991 && operand_equal_p (TYPE_SIZE (type),
2992 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2994 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2995 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3000 if (TREE_CODE (ctor) == STRING_CST)
3001 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3002 if (TREE_CODE (ctor) == CONSTRUCTOR)
3005 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3006 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3007 return fold_array_ctor_reference (type, ctor, offset, size,
3010 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3017 /* Return the tree representing the element referenced by T if T is an
3018 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3019 names using VALUEIZE. Return NULL_TREE otherwise. */
3022 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3024 tree ctor, idx, base;
3025 HOST_WIDE_INT offset, size, max_size;
3028 if (TREE_THIS_VOLATILE (t))
3031 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3032 return get_symbol_constant_value (t);
3034 tem = fold_read_from_constant_string (t);
3038 switch (TREE_CODE (t))
3041 case ARRAY_RANGE_REF:
3042 /* Constant indexes are handled well by get_base_constructor.
3043 Only special case variable offsets.
3044 FIXME: This code can't handle nested references with variable indexes
3045 (they will be handled only by iteration of ccp). Perhaps we can bring
3046 get_ref_base_and_extent here and make it use a valueize callback. */
3047 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3049 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3050 && TREE_CODE (idx) == INTEGER_CST)
3052 tree low_bound, unit_size;
3055 /* If the resulting bit-offset is constant, track it. */
3056 if ((low_bound = array_ref_low_bound (t),
3057 TREE_CODE (low_bound) == INTEGER_CST)
3058 && (unit_size = array_ref_element_size (t),
3059 host_integerp (unit_size, 1))
3060 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3061 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3062 doffset.fits_shwi ()))
3064 offset = doffset.to_shwi ();
3065 offset *= TREE_INT_CST_LOW (unit_size);
3066 offset *= BITS_PER_UNIT;
3068 base = TREE_OPERAND (t, 0);
3069 ctor = get_base_constructor (base, &offset, valueize);
3070 /* Empty constructor. Always fold to 0. */
3071 if (ctor == error_mark_node)
3072 return build_zero_cst (TREE_TYPE (t));
3073 /* Out of bound array access. Value is undefined,
3077 /* We can not determine ctor. */
3080 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3081 TREE_INT_CST_LOW (unit_size)
3090 case TARGET_MEM_REF:
3092 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3093 ctor = get_base_constructor (base, &offset, valueize);
3095 /* Empty constructor. Always fold to 0. */
3096 if (ctor == error_mark_node)
3097 return build_zero_cst (TREE_TYPE (t));
3098 /* We do not know precise address. */
3099 if (max_size == -1 || max_size != size)
3101 /* We can not determine ctor. */
3105 /* Out of bound array access. Value is undefined, but don't fold. */
3109 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3115 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3116 if (c && TREE_CODE (c) == COMPLEX_CST)
3117 return fold_build1_loc (EXPR_LOCATION (t),
3118 TREE_CODE (t), TREE_TYPE (t), c);
3130 fold_const_aggregate_ref (tree t)
3132 return fold_const_aggregate_ref_1 (t, NULL);
3135 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3136 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3137 KNOWN_BINFO carries the binfo describing the true type of
3138 OBJ_TYPE_REF_OBJECT(REF). */
3141 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3143 unsigned HOST_WIDE_INT offset, size;
3144 tree v, fn, vtable, init;
3146 vtable = v = BINFO_VTABLE (known_binfo);
3147 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3151 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3153 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3154 v = TREE_OPERAND (v, 0);
3159 if (TREE_CODE (v) != ADDR_EXPR)
3161 v = TREE_OPERAND (v, 0);
3163 if (TREE_CODE (v) != VAR_DECL
3164 || !DECL_VIRTUAL_P (v))
3166 init = ctor_for_folding (v);
3168 /* The virtual tables should always be born with constructors.
3169 and we always should assume that they are avaialble for
3170 folding. At the moment we do not stream them in all cases,
3171 but it should never happen that ctor seem unreachable. */
3173 if (init == error_mark_node)
3175 gcc_assert (in_lto_p);
3178 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3179 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3180 offset += token * size;
3181 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3183 if (!fn || integer_zerop (fn))
3185 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3186 || TREE_CODE (fn) == FDESC_EXPR);
3187 fn = TREE_OPERAND (fn, 0);
3188 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3190 /* When cgraph node is missing and function is not public, we cannot
3191 devirtualize. This can happen in WHOPR when the actual method
3192 ends up in other partition, because we found devirtualization
3193 possibility too late. */
3194 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3197 /* Make sure we create a cgraph node for functions we'll reference.
3198 They can be non-existent if the reference comes from an entry
3199 of an external vtable for example. */
3200 cgraph_get_create_node (fn);
3205 /* Return true iff VAL is a gimple expression that is known to be
3206 non-negative. Restricted to floating-point inputs. */
3209 gimple_val_nonnegative_real_p (tree val)
3213 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3215 /* Use existing logic for non-gimple trees. */
3216 if (tree_expr_nonnegative_p (val))
3219 if (TREE_CODE (val) != SSA_NAME)
3222 /* Currently we look only at the immediately defining statement
3223 to make this determination, since recursion on defining
3224 statements of operands can lead to quadratic behavior in the
3225 worst case. This is expected to catch almost all occurrences
3226 in practice. It would be possible to implement limited-depth
3227 recursion if important cases are lost. Alternatively, passes
3228 that need this information (such as the pow/powi lowering code
3229 in the cse_sincos pass) could be revised to provide it through
3230 dataflow propagation. */
3232 def_stmt = SSA_NAME_DEF_STMT (val);
3234 if (is_gimple_assign (def_stmt))
3238 /* See fold-const.c:tree_expr_nonnegative_p for additional
3239 cases that could be handled with recursion. */
3241 switch (gimple_assign_rhs_code (def_stmt))
3244 /* Always true for floating-point operands. */
3248 /* True if the two operands are identical (since we are
3249 restricted to floating-point inputs). */
3250 op0 = gimple_assign_rhs1 (def_stmt);
3251 op1 = gimple_assign_rhs2 (def_stmt);
3254 || operand_equal_p (op0, op1, 0))
3261 else if (is_gimple_call (def_stmt))
3263 tree fndecl = gimple_call_fndecl (def_stmt);
3265 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3269 switch (DECL_FUNCTION_CODE (fndecl))
3271 CASE_FLT_FN (BUILT_IN_ACOS):
3272 CASE_FLT_FN (BUILT_IN_ACOSH):
3273 CASE_FLT_FN (BUILT_IN_CABS):
3274 CASE_FLT_FN (BUILT_IN_COSH):
3275 CASE_FLT_FN (BUILT_IN_ERFC):
3276 CASE_FLT_FN (BUILT_IN_EXP):
3277 CASE_FLT_FN (BUILT_IN_EXP10):
3278 CASE_FLT_FN (BUILT_IN_EXP2):
3279 CASE_FLT_FN (BUILT_IN_FABS):
3280 CASE_FLT_FN (BUILT_IN_FDIM):
3281 CASE_FLT_FN (BUILT_IN_HYPOT):
3282 CASE_FLT_FN (BUILT_IN_POW10):
3285 CASE_FLT_FN (BUILT_IN_SQRT):
3286 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3287 nonnegative inputs. */
3288 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3293 CASE_FLT_FN (BUILT_IN_POWI):
3294 /* True if the second argument is an even integer. */
3295 arg1 = gimple_call_arg (def_stmt, 1);
3297 if (TREE_CODE (arg1) == INTEGER_CST
3298 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3303 CASE_FLT_FN (BUILT_IN_POW):
3304 /* True if the second argument is an even integer-valued
3306 arg1 = gimple_call_arg (def_stmt, 1);
3308 if (TREE_CODE (arg1) == REAL_CST)
3313 c = TREE_REAL_CST (arg1);
3314 n = real_to_integer (&c);
3318 REAL_VALUE_TYPE cint;
3319 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3320 if (real_identical (&c, &cint))
3336 /* Given a pointer value OP0, return a simplified version of an
3337 indirection through OP0, or NULL_TREE if no simplification is
3338 possible. Note that the resulting type may be different from
3339 the type pointed to in the sense that it is still compatible
3340 from the langhooks point of view. */
3343 gimple_fold_indirect_ref (tree t)
3345 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3350 subtype = TREE_TYPE (sub);
3351 if (!POINTER_TYPE_P (subtype))
3354 if (TREE_CODE (sub) == ADDR_EXPR)
3356 tree op = TREE_OPERAND (sub, 0);
3357 tree optype = TREE_TYPE (op);
3359 if (useless_type_conversion_p (type, optype))
3362 /* *(foo *)&fooarray => fooarray[0] */
3363 if (TREE_CODE (optype) == ARRAY_TYPE
3364 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3365 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3367 tree type_domain = TYPE_DOMAIN (optype);
3368 tree min_val = size_zero_node;
3369 if (type_domain && TYPE_MIN_VALUE (type_domain))
3370 min_val = TYPE_MIN_VALUE (type_domain);
3371 if (TREE_CODE (min_val) == INTEGER_CST)
3372 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3374 /* *(foo *)&complexfoo => __real__ complexfoo */
3375 else if (TREE_CODE (optype) == COMPLEX_TYPE
3376 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3377 return fold_build1 (REALPART_EXPR, type, op);
3378 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3379 else if (TREE_CODE (optype) == VECTOR_TYPE
3380 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3382 tree part_width = TYPE_SIZE (type);
3383 tree index = bitsize_int (0);
3384 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3388 /* *(p + CST) -> ... */
3389 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3390 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3392 tree addr = TREE_OPERAND (sub, 0);
3393 tree off = TREE_OPERAND (sub, 1);
3397 addrtype = TREE_TYPE (addr);
3399 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3400 if (TREE_CODE (addr) == ADDR_EXPR
3401 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3402 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3403 && host_integerp (off, 1))
3405 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
3406 tree part_width = TYPE_SIZE (type);
3407 unsigned HOST_WIDE_INT part_widthi
3408 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
3409 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3410 tree index = bitsize_int (indexi);
3411 if (offset / part_widthi
3412 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3413 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3417 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3418 if (TREE_CODE (addr) == ADDR_EXPR
3419 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3420 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3422 tree size = TYPE_SIZE_UNIT (type);
3423 if (tree_int_cst_equal (size, off))
3424 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3427 /* *(p + CST) -> MEM_REF <p, CST>. */
3428 if (TREE_CODE (addr) != ADDR_EXPR
3429 || DECL_P (TREE_OPERAND (addr, 0)))
3430 return fold_build2 (MEM_REF, type,
3432 build_int_cst_wide (ptype,
3433 TREE_INT_CST_LOW (off),
3434 TREE_INT_CST_HIGH (off)));
3437 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3438 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3439 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3440 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3443 tree min_val = size_zero_node;
3445 sub = gimple_fold_indirect_ref (sub);
3447 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3448 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3449 if (type_domain && TYPE_MIN_VALUE (type_domain))
3450 min_val = TYPE_MIN_VALUE (type_domain);
3451 if (TREE_CODE (min_val) == INTEGER_CST)
3452 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);