1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "gimple-ssa.h"
32 #include "tree-ssanames.h"
33 #include "tree-into-ssa.h"
36 #include "tree-ssa-propagate.h"
38 #include "ipa-utils.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-ssa-address.h"
42 /* Return true when DECL can be referenced from current unit.
43 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
44 We can get declarations that are not possible to reference for various
47 1) When analyzing C++ virtual tables.
48 C++ virtual tables do have known constructors even
49 when they are keyed to other compilation unit.
50 Those tables can contain pointers to methods and vars
51 in other units. Those methods have both STATIC and EXTERNAL
53 2) In WHOPR mode devirtualization might lead to reference
54 to method that was partitioned elsehwere.
55 In this case we have static VAR_DECL or FUNCTION_DECL
56 that has no corresponding callgraph/varpool node
58 3) COMDAT functions referred by external vtables that
59 we devirtualize only during final compilation stage.
60 At this time we already decided that we will not output
61 the function body and thus we can't reference the symbol
65 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
67 struct varpool_node *vnode;
68 struct cgraph_node *node;
71 if (DECL_ABSTRACT (decl))
74 /* We are concerned only about static/external vars and functions. */
75 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
76 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
79 /* Static objects can be referred only if they was not optimized out yet. */
80 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
82 snode = symtab_get_node (decl);
85 node = dyn_cast <cgraph_node> (snode);
86 return !node || !node->global.inlined_to;
89 /* We will later output the initializer, so we can refer to it.
90 So we are concerned only when DECL comes from initializer of
93 || TREE_CODE (from_decl) != VAR_DECL
94 || !DECL_EXTERNAL (from_decl)
96 && symtab_get_node (from_decl)->in_other_partition))
98 /* We are folding reference from external vtable. The vtable may reffer
99 to a symbol keyed to other compilation unit. The other compilation
100 unit may be in separate DSO and the symbol may be hidden. */
101 if (DECL_VISIBILITY_SPECIFIED (decl)
102 && DECL_EXTERNAL (decl)
103 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
105 /* When function is public, we always can introduce new reference.
106 Exception are the COMDAT functions where introducing a direct
107 reference imply need to include function body in the curren tunit. */
108 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
110 /* We are not at ltrans stage; so don't worry about WHOPR.
111 Also when still gimplifying all referred comdat functions will be
114 As observed in PR20991 for already optimized out comdat virtual functions
115 it may be tempting to not necessarily give up because the copy will be
116 output elsewhere when corresponding vtable is output.
117 This is however not possible - ABI specify that COMDATs are output in
118 units where they are used and when the other unit was compiled with LTO
119 it is possible that vtable was kept public while the function itself
121 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
124 /* OK we are seeing either COMDAT or static variable. In this case we must
125 check that the definition is still around so we can refer it. */
126 if (TREE_CODE (decl) == FUNCTION_DECL)
128 node = cgraph_get_node (decl);
129 /* Check that we still have function body and that we didn't took
130 the decision to eliminate offline copy of the function yet.
131 The second is important when devirtualization happens during final
132 compilation stage when making a new reference no longer makes callee
134 if (!node || !node->definition || node->global.inlined_to)
136 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
140 else if (TREE_CODE (decl) == VAR_DECL)
142 vnode = varpool_get_node (decl);
143 if (!vnode || !vnode->definition)
145 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
152 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
153 acceptable form for is_gimple_min_invariant.
154 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
157 canonicalize_constructor_val (tree cval, tree from_decl)
159 tree orig_cval = cval;
161 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
162 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
164 tree ptr = TREE_OPERAND (cval, 0);
165 if (is_gimple_min_invariant (ptr))
166 cval = build1_loc (EXPR_LOCATION (cval),
167 ADDR_EXPR, TREE_TYPE (ptr),
168 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
170 fold_convert (ptr_type_node,
171 TREE_OPERAND (cval, 1))));
173 if (TREE_CODE (cval) == ADDR_EXPR)
175 tree base = NULL_TREE;
176 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
178 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
180 TREE_OPERAND (cval, 0) = base;
183 base = get_base_address (TREE_OPERAND (cval, 0));
187 if ((TREE_CODE (base) == VAR_DECL
188 || TREE_CODE (base) == FUNCTION_DECL)
189 && !can_refer_decl_in_current_unit_p (base, from_decl))
191 if (TREE_CODE (base) == VAR_DECL)
192 TREE_ADDRESSABLE (base) = 1;
193 else if (TREE_CODE (base) == FUNCTION_DECL)
195 /* Make sure we create a cgraph node for functions we'll reference.
196 They can be non-existent if the reference comes from an entry
197 of an external vtable for example. */
198 cgraph_get_create_real_symbol_node (base);
200 /* Fixup types in global initializers. */
201 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
202 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
204 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
205 cval = fold_convert (TREE_TYPE (orig_cval), cval);
211 /* If SYM is a constant variable with known value, return the value.
212 NULL_TREE is returned otherwise. */
215 get_symbol_constant_value (tree sym)
217 tree val = ctor_for_folding (sym);
218 if (val != error_mark_node)
222 val = canonicalize_constructor_val (unshare_expr (val), sym);
223 if (val && is_gimple_min_invariant (val))
228 /* Variables declared 'const' without an initializer
229 have zero as the initializer if they may not be
230 overridden at link or run time. */
232 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
233 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
234 return build_zero_cst (TREE_TYPE (sym));
242 /* Subroutine of fold_stmt. We perform several simplifications of the
243 memory reference tree EXPR and make sure to re-gimplify them properly
244 after propagation of constant addresses. IS_LHS is true if the
245 reference is supposed to be an lvalue. */
248 maybe_fold_reference (tree expr, bool is_lhs)
253 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
254 || TREE_CODE (expr) == REALPART_EXPR
255 || TREE_CODE (expr) == IMAGPART_EXPR)
256 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
257 return fold_unary_loc (EXPR_LOCATION (expr),
260 TREE_OPERAND (expr, 0));
261 else if (TREE_CODE (expr) == BIT_FIELD_REF
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
263 return fold_ternary_loc (EXPR_LOCATION (expr),
266 TREE_OPERAND (expr, 0),
267 TREE_OPERAND (expr, 1),
268 TREE_OPERAND (expr, 2));
270 while (handled_component_p (*t))
271 t = &TREE_OPERAND (*t, 0);
273 /* Canonicalize MEM_REFs invariant address operand. Do this first
274 to avoid feeding non-canonical MEM_REFs elsewhere. */
275 if (TREE_CODE (*t) == MEM_REF
276 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
278 bool volatile_p = TREE_THIS_VOLATILE (*t);
279 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
280 TREE_OPERAND (*t, 0),
281 TREE_OPERAND (*t, 1));
284 TREE_THIS_VOLATILE (tem) = volatile_p;
286 tem = maybe_fold_reference (expr, is_lhs);
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
298 /* Fold back MEM_REFs to reference trees. */
299 if (TREE_CODE (*t) == MEM_REF
300 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
301 && integer_zerop (TREE_OPERAND (*t, 1))
302 && (TREE_THIS_VOLATILE (*t)
303 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
304 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
305 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
306 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
307 /* We have to look out here to not drop a required conversion
308 from the rhs to the lhs if is_lhs, but we don't have the
309 rhs here to verify that. Thus require strict type
311 && types_compatible_p (TREE_TYPE (*t),
312 TREE_TYPE (TREE_OPERAND
313 (TREE_OPERAND (*t, 0), 0))))
316 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
317 tem = maybe_fold_reference (expr, is_lhs);
322 else if (TREE_CODE (*t) == TARGET_MEM_REF)
324 tree tem = maybe_fold_tmr (*t);
328 tem = maybe_fold_reference (expr, is_lhs);
339 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
340 replacement rhs for the statement or NULL_TREE if no simplification
341 could be made. It is assumed that the operands have been previously
345 fold_gimple_assign (gimple_stmt_iterator *si)
347 gimple stmt = gsi_stmt (*si);
348 enum tree_code subcode = gimple_assign_rhs_code (stmt);
349 location_t loc = gimple_location (stmt);
351 tree result = NULL_TREE;
353 switch (get_gimple_rhs_class (subcode))
355 case GIMPLE_SINGLE_RHS:
357 tree rhs = gimple_assign_rhs1 (stmt);
359 if (REFERENCE_CLASS_P (rhs))
360 return maybe_fold_reference (rhs, false);
362 else if (TREE_CODE (rhs) == ADDR_EXPR)
364 tree ref = TREE_OPERAND (rhs, 0);
365 tree tem = maybe_fold_reference (ref, true);
367 && TREE_CODE (tem) == MEM_REF
368 && integer_zerop (TREE_OPERAND (tem, 1)))
369 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
371 result = fold_convert (TREE_TYPE (rhs),
372 build_fold_addr_expr_loc (loc, tem));
373 else if (TREE_CODE (ref) == MEM_REF
374 && integer_zerop (TREE_OPERAND (ref, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
380 && (CONSTRUCTOR_NELTS (rhs)
381 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (TREE_CODE (val) != INTEGER_CST
389 && TREE_CODE (val) != REAL_CST
390 && TREE_CODE (val) != FIXED_CST)
393 return build_vector_from_ctor (TREE_TYPE (rhs),
394 CONSTRUCTOR_ELTS (rhs));
397 else if (DECL_P (rhs))
398 return get_symbol_constant_value (rhs);
400 /* If we couldn't fold the RHS, hand over to the generic
402 if (result == NULL_TREE)
405 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
406 that may have been added by fold, and "useless" type
407 conversions that might now be apparent due to propagation. */
408 STRIP_USELESS_TYPE_CONVERSION (result);
410 if (result != rhs && valid_gimple_rhs_p (result))
417 case GIMPLE_UNARY_RHS:
419 tree rhs = gimple_assign_rhs1 (stmt);
421 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
424 /* If the operation was a conversion do _not_ mark a
425 resulting constant with TREE_OVERFLOW if the original
426 constant was not. These conversions have implementation
427 defined behavior and retaining the TREE_OVERFLOW flag
428 here would confuse later passes such as VRP. */
429 if (CONVERT_EXPR_CODE_P (subcode)
430 && TREE_CODE (result) == INTEGER_CST
431 && TREE_CODE (rhs) == INTEGER_CST)
432 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
441 case GIMPLE_BINARY_RHS:
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
444 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt) == NE_EXPR)
447 tree lhs = gimple_assign_lhs (stmt);
448 tree op1 = gimple_assign_rhs1 (stmt);
449 tree op2 = gimple_assign_rhs2 (stmt);
450 tree type = TREE_TYPE (op1);
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
456 if (TREE_CODE (op2) == INTEGER_CST
457 && (integer_zerop (op2) || integer_onep (op2))
458 && useless_type_conversion_p (TREE_TYPE (lhs), type))
460 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
461 bool is_logical_not = false;
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
466 || (cmp_code == NE_EXPR && integer_onep (op2)))
467 is_logical_not = true;
469 if (is_logical_not == false)
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type) == 1)
474 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
476 /* Otherwise we use !X -> X ^ 1. */
478 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
479 type, op1, build_int_cst (type, 1));
485 result = fold_binary_loc (loc, subcode,
486 TREE_TYPE (gimple_assign_lhs (stmt)),
487 gimple_assign_rhs1 (stmt),
488 gimple_assign_rhs2 (stmt));
492 STRIP_USELESS_TYPE_CONVERSION (result);
493 if (valid_gimple_rhs_p (result))
498 case GIMPLE_TERNARY_RHS:
499 /* Try to fold a conditional expression. */
500 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
502 tree op0 = gimple_assign_rhs1 (stmt);
505 location_t cond_loc = gimple_location (stmt);
507 if (COMPARISON_CLASS_P (op0))
509 fold_defer_overflow_warnings ();
510 tem = fold_binary_loc (cond_loc,
511 TREE_CODE (op0), TREE_TYPE (op0),
512 TREE_OPERAND (op0, 0),
513 TREE_OPERAND (op0, 1));
514 /* This is actually a conditional expression, not a GIMPLE
515 conditional statement, however, the valid_gimple_rhs_p
516 test still applies. */
517 set = (tem && is_gimple_condexpr (tem)
518 && valid_gimple_rhs_p (tem));
519 fold_undefer_overflow_warnings (set, stmt, 0);
521 else if (is_gimple_min_invariant (op0))
530 result = fold_build3_loc (cond_loc, COND_EXPR,
531 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
532 gimple_assign_rhs2 (stmt),
533 gimple_assign_rhs3 (stmt));
537 result = fold_ternary_loc (loc, subcode,
538 TREE_TYPE (gimple_assign_lhs (stmt)),
539 gimple_assign_rhs1 (stmt),
540 gimple_assign_rhs2 (stmt),
541 gimple_assign_rhs3 (stmt));
545 STRIP_USELESS_TYPE_CONVERSION (result);
546 if (valid_gimple_rhs_p (result))
551 case GIMPLE_INVALID_RHS:
558 /* Attempt to fold a conditional statement. Return true if any changes were
559 made. We only attempt to fold the condition expression, and do not perform
560 any transformation that would require alteration of the cfg. It is
561 assumed that the operands have been previously folded. */
564 fold_gimple_cond (gimple stmt)
566 tree result = fold_binary_loc (gimple_location (stmt),
567 gimple_cond_code (stmt),
569 gimple_cond_lhs (stmt),
570 gimple_cond_rhs (stmt));
574 STRIP_USELESS_TYPE_CONVERSION (result);
575 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
577 gimple_cond_set_condition_from_tree (stmt, result);
585 /* Convert EXPR into a GIMPLE value suitable for substitution on the
586 RHS of an assignment. Insert the necessary statements before
587 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
588 is replaced. If the call is expected to produces a result, then it
589 is replaced by an assignment of the new RHS to the result variable.
590 If the result is to be ignored, then the call is replaced by a
591 GIMPLE_NOP. A proper VDEF chain is retained by making the first
592 VUSE and the last VDEF of the whole sequence be the same as the replaced
593 statement and using new SSA names for stores in between. */
596 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
599 gimple stmt, new_stmt;
600 gimple_stmt_iterator i;
601 gimple_seq stmts = NULL;
602 struct gimplify_ctx gctx;
606 stmt = gsi_stmt (*si_p);
608 gcc_assert (is_gimple_call (stmt));
610 push_gimplify_context (&gctx);
611 gctx.into_ssa = gimple_in_ssa_p (cfun);
613 lhs = gimple_call_lhs (stmt);
614 if (lhs == NULL_TREE)
616 gimplify_and_add (expr, &stmts);
617 /* We can end up with folding a memcpy of an empty class assignment
618 which gets optimized away by C++ gimplification. */
619 if (gimple_seq_empty_p (stmts))
621 pop_gimplify_context (NULL);
622 if (gimple_in_ssa_p (cfun))
624 unlink_stmt_vdef (stmt);
627 gsi_replace (si_p, gimple_build_nop (), true);
633 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
634 new_stmt = gimple_build_assign (lhs, tmp);
635 i = gsi_last (stmts);
636 gsi_insert_after_without_update (&i, new_stmt,
637 GSI_CONTINUE_LINKING);
640 pop_gimplify_context (NULL);
642 if (gimple_has_location (stmt))
643 annotate_all_with_location (stmts, gimple_location (stmt));
645 /* First iterate over the replacement statements backward, assigning
646 virtual operands to their defining statements. */
648 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
650 new_stmt = gsi_stmt (i);
651 if ((gimple_assign_single_p (new_stmt)
652 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
653 || (is_gimple_call (new_stmt)
654 && (gimple_call_flags (new_stmt)
655 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
659 vdef = gimple_vdef (stmt);
661 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
662 gimple_set_vdef (new_stmt, vdef);
663 if (vdef && TREE_CODE (vdef) == SSA_NAME)
664 SSA_NAME_DEF_STMT (vdef) = new_stmt;
665 laststore = new_stmt;
669 /* Second iterate over the statements forward, assigning virtual
670 operands to their uses. */
671 reaching_vuse = gimple_vuse (stmt);
672 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
674 new_stmt = gsi_stmt (i);
675 /* If the new statement possibly has a VUSE, update it with exact SSA
676 name we know will reach this one. */
677 if (gimple_has_mem_ops (new_stmt))
678 gimple_set_vuse (new_stmt, reaching_vuse);
679 gimple_set_modified (new_stmt, true);
680 if (gimple_vdef (new_stmt))
681 reaching_vuse = gimple_vdef (new_stmt);
684 /* If the new sequence does not do a store release the virtual
685 definition of the original statement. */
687 && reaching_vuse == gimple_vuse (stmt))
689 tree vdef = gimple_vdef (stmt);
691 && TREE_CODE (vdef) == SSA_NAME)
693 unlink_stmt_vdef (stmt);
694 release_ssa_name (vdef);
698 /* Finally replace the original statement with the sequence. */
699 gsi_replace_with_seq (si_p, stmts, false);
702 /* Return the string length, maximum string length or maximum value of
704 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
705 is not NULL and, for TYPE == 0, its value is not equal to the length
706 we determine or if we are unable to determine the length or value,
707 return false. VISITED is a bitmap of visited variables.
708 TYPE is 0 if string length should be returned, 1 for maximum string
709 length and 2 for maximum value ARG can have. */
712 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
717 if (TREE_CODE (arg) != SSA_NAME)
719 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
720 if (TREE_CODE (arg) == ADDR_EXPR
721 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
722 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
724 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
725 if (TREE_CODE (aop0) == INDIRECT_REF
726 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
727 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
728 length, visited, type);
734 if (TREE_CODE (val) != INTEGER_CST
735 || tree_int_cst_sgn (val) < 0)
739 val = c_strlen (arg, 1);
747 if (TREE_CODE (*length) != INTEGER_CST
748 || TREE_CODE (val) != INTEGER_CST)
751 if (tree_int_cst_lt (*length, val))
755 else if (simple_cst_equal (val, *length) != 1)
763 /* If ARG is registered for SSA update we cannot look at its defining
765 if (name_registered_for_update_p (arg))
768 /* If we were already here, break the infinite cycle. */
769 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
773 def_stmt = SSA_NAME_DEF_STMT (var);
775 switch (gimple_code (def_stmt))
778 /* The RHS of the statement defining VAR must either have a
779 constant length or come from another SSA_NAME with a constant
781 if (gimple_assign_single_p (def_stmt)
782 || gimple_assign_unary_nop_p (def_stmt))
784 tree rhs = gimple_assign_rhs1 (def_stmt);
785 return get_maxval_strlen (rhs, length, visited, type);
787 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
789 tree op2 = gimple_assign_rhs2 (def_stmt);
790 tree op3 = gimple_assign_rhs3 (def_stmt);
791 return get_maxval_strlen (op2, length, visited, type)
792 && get_maxval_strlen (op3, length, visited, type);
798 /* All the arguments of the PHI node must have the same constant
802 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
804 tree arg = gimple_phi_arg (def_stmt, i)->def;
806 /* If this PHI has itself as an argument, we cannot
807 determine the string length of this argument. However,
808 if we can find a constant string length for the other
809 PHI args then we can still be sure that this is a
810 constant string length. So be optimistic and just
811 continue with the next argument. */
812 if (arg == gimple_phi_result (def_stmt))
815 if (!get_maxval_strlen (arg, length, visited, type))
827 /* Fold builtin call in statement STMT. Returns a simplified tree.
828 We may return a non-constant expression, including another call
829 to a different function and with different arguments, e.g.,
830 substituting memcpy for strcpy when the string length is known.
831 Note that some builtins expand into inline code that may not
832 be valid in GIMPLE. Callers must take care. */
835 gimple_fold_builtin (gimple stmt)
843 location_t loc = gimple_location (stmt);
845 gcc_assert (is_gimple_call (stmt));
847 ignore = (gimple_call_lhs (stmt) == NULL);
849 /* First try the generic builtin folder. If that succeeds, return the
851 result = fold_call_stmt (stmt, ignore);
859 /* Ignore MD builtins. */
860 callee = gimple_call_fndecl (stmt);
861 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
864 /* Give up for always_inline inline builtins until they are
866 if (avoid_folding_inline_builtin (callee))
869 /* If the builtin could not be folded, and it has no argument list,
871 nargs = gimple_call_num_args (stmt);
875 /* Limit the work only for builtins we know how to simplify. */
876 switch (DECL_FUNCTION_CODE (callee))
878 case BUILT_IN_STRLEN:
880 case BUILT_IN_FPUTS_UNLOCKED:
884 case BUILT_IN_STRCPY:
885 case BUILT_IN_STRNCPY:
889 case BUILT_IN_MEMCPY_CHK:
890 case BUILT_IN_MEMPCPY_CHK:
891 case BUILT_IN_MEMMOVE_CHK:
892 case BUILT_IN_MEMSET_CHK:
893 case BUILT_IN_STRNCPY_CHK:
894 case BUILT_IN_STPNCPY_CHK:
898 case BUILT_IN_STRCPY_CHK:
899 case BUILT_IN_STPCPY_CHK:
903 case BUILT_IN_SNPRINTF_CHK:
904 case BUILT_IN_VSNPRINTF_CHK:
912 if (arg_idx >= nargs)
915 /* Try to use the dataflow information gathered by the CCP process. */
916 visited = BITMAP_ALLOC (NULL);
917 bitmap_clear (visited);
919 memset (val, 0, sizeof (val));
920 a = gimple_call_arg (stmt, arg_idx);
921 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
922 val[arg_idx] = NULL_TREE;
924 BITMAP_FREE (visited);
927 switch (DECL_FUNCTION_CODE (callee))
929 case BUILT_IN_STRLEN:
930 if (val[0] && nargs == 1)
933 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
935 /* If the result is not a valid gimple value, or not a cast
936 of a valid gimple value, then we cannot use the result. */
937 if (is_gimple_val (new_val)
938 || (CONVERT_EXPR_P (new_val)
939 && is_gimple_val (TREE_OPERAND (new_val, 0))))
944 case BUILT_IN_STRCPY:
945 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
946 result = fold_builtin_strcpy (loc, callee,
947 gimple_call_arg (stmt, 0),
948 gimple_call_arg (stmt, 1),
952 case BUILT_IN_STRNCPY:
953 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
954 result = fold_builtin_strncpy (loc, callee,
955 gimple_call_arg (stmt, 0),
956 gimple_call_arg (stmt, 1),
957 gimple_call_arg (stmt, 2),
963 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
964 gimple_call_arg (stmt, 1),
965 ignore, false, val[0]);
968 case BUILT_IN_FPUTS_UNLOCKED:
970 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
971 gimple_call_arg (stmt, 1),
972 ignore, true, val[0]);
975 case BUILT_IN_MEMCPY_CHK:
976 case BUILT_IN_MEMPCPY_CHK:
977 case BUILT_IN_MEMMOVE_CHK:
978 case BUILT_IN_MEMSET_CHK:
979 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
980 result = fold_builtin_memory_chk (loc, callee,
981 gimple_call_arg (stmt, 0),
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
986 DECL_FUNCTION_CODE (callee));
989 case BUILT_IN_STRCPY_CHK:
990 case BUILT_IN_STPCPY_CHK:
991 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
992 result = fold_builtin_stxcpy_chk (loc, callee,
993 gimple_call_arg (stmt, 0),
994 gimple_call_arg (stmt, 1),
995 gimple_call_arg (stmt, 2),
997 DECL_FUNCTION_CODE (callee));
1000 case BUILT_IN_STRNCPY_CHK:
1001 case BUILT_IN_STPNCPY_CHK:
1002 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1003 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1004 gimple_call_arg (stmt, 1),
1005 gimple_call_arg (stmt, 2),
1006 gimple_call_arg (stmt, 3),
1008 DECL_FUNCTION_CODE (callee));
1011 case BUILT_IN_SNPRINTF_CHK:
1012 case BUILT_IN_VSNPRINTF_CHK:
1013 if (val[1] && is_gimple_val (val[1]))
1014 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1015 DECL_FUNCTION_CODE (callee));
1022 if (result && ignore)
1023 result = fold_ignored_result (result);
1028 /* Return a binfo to be used for devirtualization of calls based on an object
1029 represented by a declaration (i.e. a global or automatically allocated one)
1030 or NULL if it cannot be found or is not safe. CST is expected to be an
1031 ADDR_EXPR of such object or the function will return NULL. Currently it is
1032 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1033 EXPECTED_TYPE is type of the class virtual belongs to. */
1036 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1038 HOST_WIDE_INT offset, size, max_size;
1039 tree base, type, binfo;
1040 bool last_artificial = false;
1042 if (!flag_devirtualize
1043 || TREE_CODE (cst) != ADDR_EXPR
1044 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1047 cst = TREE_OPERAND (cst, 0);
1048 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1049 type = TREE_TYPE (base);
1053 || TREE_CODE (type) != RECORD_TYPE)
1056 /* Find the sub-object the constant actually refers to and mark whether it is
1057 an artificial one (as opposed to a user-defined one). */
1060 HOST_WIDE_INT pos, size;
1063 if (types_same_for_odr (type, expected_type))
1068 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1070 if (TREE_CODE (fld) != FIELD_DECL)
1073 pos = int_bit_position (fld);
1074 size = tree_low_cst (DECL_SIZE (fld), 1);
1075 if (pos <= offset && (pos + size) > offset)
1078 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1081 last_artificial = DECL_ARTIFICIAL (fld);
1082 type = TREE_TYPE (fld);
1085 /* Artificial sub-objects are ancestors, we do not want to use them for
1086 devirtualization, at least not here. */
1087 if (last_artificial)
1089 binfo = TYPE_BINFO (type);
1090 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1096 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1097 The statement may be replaced by another statement, e.g., if the call
1098 simplifies to a constant value. Return true if any changes were made.
1099 It is assumed that the operands have been previously folded. */
1102 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1104 gimple stmt = gsi_stmt (*gsi);
1106 bool changed = false;
1109 /* Fold *& in call arguments. */
1110 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1111 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1113 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1116 gimple_call_set_arg (stmt, i, tmp);
1121 /* Check for virtual calls that became direct calls. */
1122 callee = gimple_call_fn (stmt);
1123 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1125 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1127 if (dump_file && virtual_method_call_p (callee)
1128 && !possible_polymorphic_call_target_p
1129 (callee, cgraph_get_node (gimple_call_addr_fndecl
1130 (OBJ_TYPE_REF_EXPR (callee)))))
1133 "Type inheritnace inconsistent devirtualization of ");
1134 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1135 fprintf (dump_file, " to ");
1136 print_generic_expr (dump_file, callee, TDF_SLIM);
1137 fprintf (dump_file, "\n");
1140 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1143 else if (virtual_method_call_p (callee))
1145 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1146 tree binfo = gimple_extract_devirt_binfo_from_cst
1147 (obj, obj_type_ref_class (callee));
1151 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1152 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1155 #ifdef ENABLE_CHECKING
1156 gcc_assert (possible_polymorphic_call_target_p
1157 (callee, cgraph_get_node (fndecl)));
1160 gimple_call_set_fndecl (stmt, fndecl);
1170 /* Check for builtins that CCP can handle using information not
1171 available in the generic fold routines. */
1172 callee = gimple_call_fndecl (stmt);
1173 if (callee && DECL_BUILT_IN (callee))
1175 tree result = gimple_fold_builtin (stmt);
1178 if (!update_call_from_tree (gsi, result))
1179 gimplify_and_update_call_from_tree (gsi, result);
1182 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1183 changed |= targetm.gimple_fold_builtin (gsi);
1189 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1190 distinguishes both cases. */
1193 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1195 bool changed = false;
1196 gimple stmt = gsi_stmt (*gsi);
1199 /* Fold the main computation performed by the statement. */
1200 switch (gimple_code (stmt))
1204 unsigned old_num_ops = gimple_num_ops (stmt);
1205 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1206 tree lhs = gimple_assign_lhs (stmt);
1208 /* First canonicalize operand order. This avoids building new
1209 trees if this is the only thing fold would later do. */
1210 if ((commutative_tree_code (subcode)
1211 || commutative_ternary_tree_code (subcode))
1212 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1213 gimple_assign_rhs2 (stmt), false))
1215 tree tem = gimple_assign_rhs1 (stmt);
1216 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1217 gimple_assign_set_rhs2 (stmt, tem);
1220 new_rhs = fold_gimple_assign (gsi);
1222 && !useless_type_conversion_p (TREE_TYPE (lhs),
1223 TREE_TYPE (new_rhs)))
1224 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1227 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1229 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1236 changed |= fold_gimple_cond (stmt);
1240 changed |= gimple_fold_call (gsi, inplace);
1244 /* Fold *& in asm operands. */
1247 const char **oconstraints;
1248 const char *constraint;
1249 bool allows_mem, allows_reg;
1251 noutputs = gimple_asm_noutputs (stmt);
1252 oconstraints = XALLOCAVEC (const char *, noutputs);
1254 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1256 tree link = gimple_asm_output_op (stmt, i);
1257 tree op = TREE_VALUE (link);
1259 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1260 if (REFERENCE_CLASS_P (op)
1261 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1263 TREE_VALUE (link) = op;
1267 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1269 tree link = gimple_asm_input_op (stmt, i);
1270 tree op = TREE_VALUE (link);
1272 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1273 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1274 oconstraints, &allows_mem, &allows_reg);
1275 if (REFERENCE_CLASS_P (op)
1276 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1279 TREE_VALUE (link) = op;
1287 if (gimple_debug_bind_p (stmt))
1289 tree val = gimple_debug_bind_get_value (stmt);
1291 && REFERENCE_CLASS_P (val))
1293 tree tem = maybe_fold_reference (val, false);
1296 gimple_debug_bind_set_value (stmt, tem);
1301 && TREE_CODE (val) == ADDR_EXPR)
1303 tree ref = TREE_OPERAND (val, 0);
1304 tree tem = maybe_fold_reference (ref, false);
1307 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1308 gimple_debug_bind_set_value (stmt, tem);
1318 stmt = gsi_stmt (*gsi);
1320 /* Fold *& on the lhs. */
1321 if (gimple_has_lhs (stmt))
1323 tree lhs = gimple_get_lhs (stmt);
1324 if (lhs && REFERENCE_CLASS_P (lhs))
1326 tree new_lhs = maybe_fold_reference (lhs, true);
1329 gimple_set_lhs (stmt, new_lhs);
1338 /* Fold the statement pointed to by GSI. In some cases, this function may
1339 replace the whole statement with a new one. Returns true iff folding
1341 The statement pointed to by GSI should be in valid gimple form but may
1342 be in unfolded state as resulting from for example constant propagation
1343 which can produce *&x = 0. */
1346 fold_stmt (gimple_stmt_iterator *gsi)
1348 return fold_stmt_1 (gsi, false);
1351 /* Perform the minimal folding on statement *GSI. Only operations like
1352 *&x created by constant propagation are handled. The statement cannot
1353 be replaced with a new one. Return true if the statement was
1354 changed, false otherwise.
1355 The statement *GSI should be in valid gimple form but may
1356 be in unfolded state as resulting from for example constant propagation
1357 which can produce *&x = 0. */
1360 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1362 gimple stmt = gsi_stmt (*gsi);
1363 bool changed = fold_stmt_1 (gsi, true);
1364 gcc_assert (gsi_stmt (*gsi) == stmt);
1368 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1369 if EXPR is null or we don't know how.
1370 If non-null, the result always has boolean type. */
1373 canonicalize_bool (tree expr, bool invert)
1379 if (integer_nonzerop (expr))
1380 return boolean_false_node;
1381 else if (integer_zerop (expr))
1382 return boolean_true_node;
1383 else if (TREE_CODE (expr) == SSA_NAME)
1384 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1385 build_int_cst (TREE_TYPE (expr), 0));
1386 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1387 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1389 TREE_OPERAND (expr, 0),
1390 TREE_OPERAND (expr, 1));
1396 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1398 if (integer_nonzerop (expr))
1399 return boolean_true_node;
1400 else if (integer_zerop (expr))
1401 return boolean_false_node;
1402 else if (TREE_CODE (expr) == SSA_NAME)
1403 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1404 build_int_cst (TREE_TYPE (expr), 0));
1405 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1406 return fold_build2 (TREE_CODE (expr),
1408 TREE_OPERAND (expr, 0),
1409 TREE_OPERAND (expr, 1));
1415 /* Check to see if a boolean expression EXPR is logically equivalent to the
1416 comparison (OP1 CODE OP2). Check for various identities involving
1420 same_bool_comparison_p (const_tree expr, enum tree_code code,
1421 const_tree op1, const_tree op2)
1425 /* The obvious case. */
1426 if (TREE_CODE (expr) == code
1427 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1428 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1431 /* Check for comparing (name, name != 0) and the case where expr
1432 is an SSA_NAME with a definition matching the comparison. */
1433 if (TREE_CODE (expr) == SSA_NAME
1434 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1436 if (operand_equal_p (expr, op1, 0))
1437 return ((code == NE_EXPR && integer_zerop (op2))
1438 || (code == EQ_EXPR && integer_nonzerop (op2)));
1439 s = SSA_NAME_DEF_STMT (expr);
1440 if (is_gimple_assign (s)
1441 && gimple_assign_rhs_code (s) == code
1442 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1443 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1447 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1448 of name is a comparison, recurse. */
1449 if (TREE_CODE (op1) == SSA_NAME
1450 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1452 s = SSA_NAME_DEF_STMT (op1);
1453 if (is_gimple_assign (s)
1454 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1456 enum tree_code c = gimple_assign_rhs_code (s);
1457 if ((c == NE_EXPR && integer_zerop (op2))
1458 || (c == EQ_EXPR && integer_nonzerop (op2)))
1459 return same_bool_comparison_p (expr, c,
1460 gimple_assign_rhs1 (s),
1461 gimple_assign_rhs2 (s));
1462 if ((c == EQ_EXPR && integer_zerop (op2))
1463 || (c == NE_EXPR && integer_nonzerop (op2)))
1464 return same_bool_comparison_p (expr,
1465 invert_tree_comparison (c, false),
1466 gimple_assign_rhs1 (s),
1467 gimple_assign_rhs2 (s));
1473 /* Check to see if two boolean expressions OP1 and OP2 are logically
1477 same_bool_result_p (const_tree op1, const_tree op2)
1479 /* Simple cases first. */
1480 if (operand_equal_p (op1, op2, 0))
1483 /* Check the cases where at least one of the operands is a comparison.
1484 These are a bit smarter than operand_equal_p in that they apply some
1485 identifies on SSA_NAMEs. */
1486 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1487 && same_bool_comparison_p (op1, TREE_CODE (op2),
1488 TREE_OPERAND (op2, 0),
1489 TREE_OPERAND (op2, 1)))
1491 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1492 && same_bool_comparison_p (op2, TREE_CODE (op1),
1493 TREE_OPERAND (op1, 0),
1494 TREE_OPERAND (op1, 1)))
1501 /* Forward declarations for some mutually recursive functions. */
1504 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1505 enum tree_code code2, tree op2a, tree op2b);
1507 and_var_with_comparison (tree var, bool invert,
1508 enum tree_code code2, tree op2a, tree op2b);
1510 and_var_with_comparison_1 (gimple stmt,
1511 enum tree_code code2, tree op2a, tree op2b);
1513 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1514 enum tree_code code2, tree op2a, tree op2b);
1516 or_var_with_comparison (tree var, bool invert,
1517 enum tree_code code2, tree op2a, tree op2b);
1519 or_var_with_comparison_1 (gimple stmt,
1520 enum tree_code code2, tree op2a, tree op2b);
1522 /* Helper function for and_comparisons_1: try to simplify the AND of the
1523 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1524 If INVERT is true, invert the value of the VAR before doing the AND.
1525 Return NULL_EXPR if we can't simplify this to a single expression. */
1528 and_var_with_comparison (tree var, bool invert,
1529 enum tree_code code2, tree op2a, tree op2b)
1532 gimple stmt = SSA_NAME_DEF_STMT (var);
1534 /* We can only deal with variables whose definitions are assignments. */
1535 if (!is_gimple_assign (stmt))
1538 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1539 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1540 Then we only have to consider the simpler non-inverted cases. */
1542 t = or_var_with_comparison_1 (stmt,
1543 invert_tree_comparison (code2, false),
1546 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1547 return canonicalize_bool (t, invert);
1550 /* Try to simplify the AND of the ssa variable defined by the assignment
1551 STMT with the comparison specified by (OP2A CODE2 OP2B).
1552 Return NULL_EXPR if we can't simplify this to a single expression. */
1555 and_var_with_comparison_1 (gimple stmt,
1556 enum tree_code code2, tree op2a, tree op2b)
1558 tree var = gimple_assign_lhs (stmt);
1559 tree true_test_var = NULL_TREE;
1560 tree false_test_var = NULL_TREE;
1561 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1563 /* Check for identities like (var AND (var == 0)) => false. */
1564 if (TREE_CODE (op2a) == SSA_NAME
1565 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1567 if ((code2 == NE_EXPR && integer_zerop (op2b))
1568 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1570 true_test_var = op2a;
1571 if (var == true_test_var)
1574 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1575 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1577 false_test_var = op2a;
1578 if (var == false_test_var)
1579 return boolean_false_node;
1583 /* If the definition is a comparison, recurse on it. */
1584 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1586 tree t = and_comparisons_1 (innercode,
1587 gimple_assign_rhs1 (stmt),
1588 gimple_assign_rhs2 (stmt),
1596 /* If the definition is an AND or OR expression, we may be able to
1597 simplify by reassociating. */
1598 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1599 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1601 tree inner1 = gimple_assign_rhs1 (stmt);
1602 tree inner2 = gimple_assign_rhs2 (stmt);
1605 tree partial = NULL_TREE;
1606 bool is_and = (innercode == BIT_AND_EXPR);
1608 /* Check for boolean identities that don't require recursive examination
1610 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1611 inner1 AND (inner1 OR inner2) => inner1
1612 !inner1 AND (inner1 AND inner2) => false
1613 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1614 Likewise for similar cases involving inner2. */
1615 if (inner1 == true_test_var)
1616 return (is_and ? var : inner1);
1617 else if (inner2 == true_test_var)
1618 return (is_and ? var : inner2);
1619 else if (inner1 == false_test_var)
1621 ? boolean_false_node
1622 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1623 else if (inner2 == false_test_var)
1625 ? boolean_false_node
1626 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1628 /* Next, redistribute/reassociate the AND across the inner tests.
1629 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1630 if (TREE_CODE (inner1) == SSA_NAME
1631 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1632 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1633 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1634 gimple_assign_rhs1 (s),
1635 gimple_assign_rhs2 (s),
1636 code2, op2a, op2b)))
1638 /* Handle the AND case, where we are reassociating:
1639 (inner1 AND inner2) AND (op2a code2 op2b)
1641 If the partial result t is a constant, we win. Otherwise
1642 continue on to try reassociating with the other inner test. */
1645 if (integer_onep (t))
1647 else if (integer_zerop (t))
1648 return boolean_false_node;
1651 /* Handle the OR case, where we are redistributing:
1652 (inner1 OR inner2) AND (op2a code2 op2b)
1653 => (t OR (inner2 AND (op2a code2 op2b))) */
1654 else if (integer_onep (t))
1655 return boolean_true_node;
1657 /* Save partial result for later. */
1661 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1662 if (TREE_CODE (inner2) == SSA_NAME
1663 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1664 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1665 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1666 gimple_assign_rhs1 (s),
1667 gimple_assign_rhs2 (s),
1668 code2, op2a, op2b)))
1670 /* Handle the AND case, where we are reassociating:
1671 (inner1 AND inner2) AND (op2a code2 op2b)
1672 => (inner1 AND t) */
1675 if (integer_onep (t))
1677 else if (integer_zerop (t))
1678 return boolean_false_node;
1679 /* If both are the same, we can apply the identity
1681 else if (partial && same_bool_result_p (t, partial))
1685 /* Handle the OR case. where we are redistributing:
1686 (inner1 OR inner2) AND (op2a code2 op2b)
1687 => (t OR (inner1 AND (op2a code2 op2b)))
1688 => (t OR partial) */
1691 if (integer_onep (t))
1692 return boolean_true_node;
1695 /* We already got a simplification for the other
1696 operand to the redistributed OR expression. The
1697 interesting case is when at least one is false.
1698 Or, if both are the same, we can apply the identity
1700 if (integer_zerop (partial))
1702 else if (integer_zerop (t))
1704 else if (same_bool_result_p (t, partial))
1713 /* Try to simplify the AND of two comparisons defined by
1714 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1715 If this can be done without constructing an intermediate value,
1716 return the resulting tree; otherwise NULL_TREE is returned.
1717 This function is deliberately asymmetric as it recurses on SSA_DEFs
1718 in the first comparison but not the second. */
1721 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1722 enum tree_code code2, tree op2a, tree op2b)
1724 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1726 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1727 if (operand_equal_p (op1a, op2a, 0)
1728 && operand_equal_p (op1b, op2b, 0))
1730 /* Result will be either NULL_TREE, or a combined comparison. */
1731 tree t = combine_comparisons (UNKNOWN_LOCATION,
1732 TRUTH_ANDIF_EXPR, code1, code2,
1733 truth_type, op1a, op1b);
1738 /* Likewise the swapped case of the above. */
1739 if (operand_equal_p (op1a, op2b, 0)
1740 && operand_equal_p (op1b, op2a, 0))
1742 /* Result will be either NULL_TREE, or a combined comparison. */
1743 tree t = combine_comparisons (UNKNOWN_LOCATION,
1744 TRUTH_ANDIF_EXPR, code1,
1745 swap_tree_comparison (code2),
1746 truth_type, op1a, op1b);
1751 /* If both comparisons are of the same value against constants, we might
1752 be able to merge them. */
1753 if (operand_equal_p (op1a, op2a, 0)
1754 && TREE_CODE (op1b) == INTEGER_CST
1755 && TREE_CODE (op2b) == INTEGER_CST)
1757 int cmp = tree_int_cst_compare (op1b, op2b);
1759 /* If we have (op1a == op1b), we should either be able to
1760 return that or FALSE, depending on whether the constant op1b
1761 also satisfies the other comparison against op2b. */
1762 if (code1 == EQ_EXPR)
1768 case EQ_EXPR: val = (cmp == 0); break;
1769 case NE_EXPR: val = (cmp != 0); break;
1770 case LT_EXPR: val = (cmp < 0); break;
1771 case GT_EXPR: val = (cmp > 0); break;
1772 case LE_EXPR: val = (cmp <= 0); break;
1773 case GE_EXPR: val = (cmp >= 0); break;
1774 default: done = false;
1779 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1781 return boolean_false_node;
1784 /* Likewise if the second comparison is an == comparison. */
1785 else if (code2 == EQ_EXPR)
1791 case EQ_EXPR: val = (cmp == 0); break;
1792 case NE_EXPR: val = (cmp != 0); break;
1793 case LT_EXPR: val = (cmp > 0); break;
1794 case GT_EXPR: val = (cmp < 0); break;
1795 case LE_EXPR: val = (cmp >= 0); break;
1796 case GE_EXPR: val = (cmp <= 0); break;
1797 default: done = false;
1802 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1804 return boolean_false_node;
1808 /* Same business with inequality tests. */
1809 else if (code1 == NE_EXPR)
1814 case EQ_EXPR: val = (cmp != 0); break;
1815 case NE_EXPR: val = (cmp == 0); break;
1816 case LT_EXPR: val = (cmp >= 0); break;
1817 case GT_EXPR: val = (cmp <= 0); break;
1818 case LE_EXPR: val = (cmp > 0); break;
1819 case GE_EXPR: val = (cmp < 0); break;
1824 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1826 else if (code2 == NE_EXPR)
1831 case EQ_EXPR: val = (cmp == 0); break;
1832 case NE_EXPR: val = (cmp != 0); break;
1833 case LT_EXPR: val = (cmp <= 0); break;
1834 case GT_EXPR: val = (cmp >= 0); break;
1835 case LE_EXPR: val = (cmp < 0); break;
1836 case GE_EXPR: val = (cmp > 0); break;
1841 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1844 /* Chose the more restrictive of two < or <= comparisons. */
1845 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1846 && (code2 == LT_EXPR || code2 == LE_EXPR))
1848 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1849 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1851 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1854 /* Likewise chose the more restrictive of two > or >= comparisons. */
1855 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1856 && (code2 == GT_EXPR || code2 == GE_EXPR))
1858 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1859 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1861 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1864 /* Check for singleton ranges. */
1866 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1867 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1868 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1870 /* Check for disjoint ranges. */
1872 && (code1 == LT_EXPR || code1 == LE_EXPR)
1873 && (code2 == GT_EXPR || code2 == GE_EXPR))
1874 return boolean_false_node;
1876 && (code1 == GT_EXPR || code1 == GE_EXPR)
1877 && (code2 == LT_EXPR || code2 == LE_EXPR))
1878 return boolean_false_node;
1881 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1882 NAME's definition is a truth value. See if there are any simplifications
1883 that can be done against the NAME's definition. */
1884 if (TREE_CODE (op1a) == SSA_NAME
1885 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1886 && (integer_zerop (op1b) || integer_onep (op1b)))
1888 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1889 || (code1 == NE_EXPR && integer_onep (op1b)));
1890 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1891 switch (gimple_code (stmt))
1894 /* Try to simplify by copy-propagating the definition. */
1895 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1898 /* If every argument to the PHI produces the same result when
1899 ANDed with the second comparison, we win.
1900 Do not do this unless the type is bool since we need a bool
1901 result here anyway. */
1902 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1904 tree result = NULL_TREE;
1906 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1908 tree arg = gimple_phi_arg_def (stmt, i);
1910 /* If this PHI has itself as an argument, ignore it.
1911 If all the other args produce the same result,
1913 if (arg == gimple_phi_result (stmt))
1915 else if (TREE_CODE (arg) == INTEGER_CST)
1917 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1920 result = boolean_false_node;
1921 else if (!integer_zerop (result))
1925 result = fold_build2 (code2, boolean_type_node,
1927 else if (!same_bool_comparison_p (result,
1931 else if (TREE_CODE (arg) == SSA_NAME
1932 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1935 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1936 /* In simple cases we can look through PHI nodes,
1937 but we have to be careful with loops.
1939 if (! dom_info_available_p (CDI_DOMINATORS)
1940 || gimple_bb (def_stmt) == gimple_bb (stmt)
1941 || dominated_by_p (CDI_DOMINATORS,
1942 gimple_bb (def_stmt),
1945 temp = and_var_with_comparison (arg, invert, code2,
1951 else if (!same_bool_result_p (result, temp))
1967 /* Try to simplify the AND of two comparisons, specified by
1968 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1969 If this can be simplified to a single expression (without requiring
1970 introducing more SSA variables to hold intermediate values),
1971 return the resulting tree. Otherwise return NULL_TREE.
1972 If the result expression is non-null, it has boolean type. */
1975 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1976 enum tree_code code2, tree op2a, tree op2b)
1978 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1982 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1985 /* Helper function for or_comparisons_1: try to simplify the OR of the
1986 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1987 If INVERT is true, invert the value of VAR before doing the OR.
1988 Return NULL_EXPR if we can't simplify this to a single expression. */
1991 or_var_with_comparison (tree var, bool invert,
1992 enum tree_code code2, tree op2a, tree op2b)
1995 gimple stmt = SSA_NAME_DEF_STMT (var);
1997 /* We can only deal with variables whose definitions are assignments. */
1998 if (!is_gimple_assign (stmt))
2001 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2002 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2003 Then we only have to consider the simpler non-inverted cases. */
2005 t = and_var_with_comparison_1 (stmt,
2006 invert_tree_comparison (code2, false),
2009 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2010 return canonicalize_bool (t, invert);
2013 /* Try to simplify the OR of the ssa variable defined by the assignment
2014 STMT with the comparison specified by (OP2A CODE2 OP2B).
2015 Return NULL_EXPR if we can't simplify this to a single expression. */
2018 or_var_with_comparison_1 (gimple stmt,
2019 enum tree_code code2, tree op2a, tree op2b)
2021 tree var = gimple_assign_lhs (stmt);
2022 tree true_test_var = NULL_TREE;
2023 tree false_test_var = NULL_TREE;
2024 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2026 /* Check for identities like (var OR (var != 0)) => true . */
2027 if (TREE_CODE (op2a) == SSA_NAME
2028 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2030 if ((code2 == NE_EXPR && integer_zerop (op2b))
2031 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2033 true_test_var = op2a;
2034 if (var == true_test_var)
2037 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2038 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2040 false_test_var = op2a;
2041 if (var == false_test_var)
2042 return boolean_true_node;
2046 /* If the definition is a comparison, recurse on it. */
2047 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2049 tree t = or_comparisons_1 (innercode,
2050 gimple_assign_rhs1 (stmt),
2051 gimple_assign_rhs2 (stmt),
2059 /* If the definition is an AND or OR expression, we may be able to
2060 simplify by reassociating. */
2061 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2062 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2064 tree inner1 = gimple_assign_rhs1 (stmt);
2065 tree inner2 = gimple_assign_rhs2 (stmt);
2068 tree partial = NULL_TREE;
2069 bool is_or = (innercode == BIT_IOR_EXPR);
2071 /* Check for boolean identities that don't require recursive examination
2073 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2074 inner1 OR (inner1 AND inner2) => inner1
2075 !inner1 OR (inner1 OR inner2) => true
2076 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2078 if (inner1 == true_test_var)
2079 return (is_or ? var : inner1);
2080 else if (inner2 == true_test_var)
2081 return (is_or ? var : inner2);
2082 else if (inner1 == false_test_var)
2085 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2086 else if (inner2 == false_test_var)
2089 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2091 /* Next, redistribute/reassociate the OR across the inner tests.
2092 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2093 if (TREE_CODE (inner1) == SSA_NAME
2094 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2096 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2097 gimple_assign_rhs1 (s),
2098 gimple_assign_rhs2 (s),
2099 code2, op2a, op2b)))
2101 /* Handle the OR case, where we are reassociating:
2102 (inner1 OR inner2) OR (op2a code2 op2b)
2104 If the partial result t is a constant, we win. Otherwise
2105 continue on to try reassociating with the other inner test. */
2108 if (integer_onep (t))
2109 return boolean_true_node;
2110 else if (integer_zerop (t))
2114 /* Handle the AND case, where we are redistributing:
2115 (inner1 AND inner2) OR (op2a code2 op2b)
2116 => (t AND (inner2 OR (op2a code op2b))) */
2117 else if (integer_zerop (t))
2118 return boolean_false_node;
2120 /* Save partial result for later. */
2124 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2125 if (TREE_CODE (inner2) == SSA_NAME
2126 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2127 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2128 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2129 gimple_assign_rhs1 (s),
2130 gimple_assign_rhs2 (s),
2131 code2, op2a, op2b)))
2133 /* Handle the OR case, where we are reassociating:
2134 (inner1 OR inner2) OR (op2a code2 op2b)
2136 => (t OR partial) */
2139 if (integer_zerop (t))
2141 else if (integer_onep (t))
2142 return boolean_true_node;
2143 /* If both are the same, we can apply the identity
2145 else if (partial && same_bool_result_p (t, partial))
2149 /* Handle the AND case, where we are redistributing:
2150 (inner1 AND inner2) OR (op2a code2 op2b)
2151 => (t AND (inner1 OR (op2a code2 op2b)))
2152 => (t AND partial) */
2155 if (integer_zerop (t))
2156 return boolean_false_node;
2159 /* We already got a simplification for the other
2160 operand to the redistributed AND expression. The
2161 interesting case is when at least one is true.
2162 Or, if both are the same, we can apply the identity
2164 if (integer_onep (partial))
2166 else if (integer_onep (t))
2168 else if (same_bool_result_p (t, partial))
2177 /* Try to simplify the OR of two comparisons defined by
2178 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2179 If this can be done without constructing an intermediate value,
2180 return the resulting tree; otherwise NULL_TREE is returned.
2181 This function is deliberately asymmetric as it recurses on SSA_DEFs
2182 in the first comparison but not the second. */
2185 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2186 enum tree_code code2, tree op2a, tree op2b)
2188 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2190 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2191 if (operand_equal_p (op1a, op2a, 0)
2192 && operand_equal_p (op1b, op2b, 0))
2194 /* Result will be either NULL_TREE, or a combined comparison. */
2195 tree t = combine_comparisons (UNKNOWN_LOCATION,
2196 TRUTH_ORIF_EXPR, code1, code2,
2197 truth_type, op1a, op1b);
2202 /* Likewise the swapped case of the above. */
2203 if (operand_equal_p (op1a, op2b, 0)
2204 && operand_equal_p (op1b, op2a, 0))
2206 /* Result will be either NULL_TREE, or a combined comparison. */
2207 tree t = combine_comparisons (UNKNOWN_LOCATION,
2208 TRUTH_ORIF_EXPR, code1,
2209 swap_tree_comparison (code2),
2210 truth_type, op1a, op1b);
2215 /* If both comparisons are of the same value against constants, we might
2216 be able to merge them. */
2217 if (operand_equal_p (op1a, op2a, 0)
2218 && TREE_CODE (op1b) == INTEGER_CST
2219 && TREE_CODE (op2b) == INTEGER_CST)
2221 int cmp = tree_int_cst_compare (op1b, op2b);
2223 /* If we have (op1a != op1b), we should either be able to
2224 return that or TRUE, depending on whether the constant op1b
2225 also satisfies the other comparison against op2b. */
2226 if (code1 == NE_EXPR)
2232 case EQ_EXPR: val = (cmp == 0); break;
2233 case NE_EXPR: val = (cmp != 0); break;
2234 case LT_EXPR: val = (cmp < 0); break;
2235 case GT_EXPR: val = (cmp > 0); break;
2236 case LE_EXPR: val = (cmp <= 0); break;
2237 case GE_EXPR: val = (cmp >= 0); break;
2238 default: done = false;
2243 return boolean_true_node;
2245 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2248 /* Likewise if the second comparison is a != comparison. */
2249 else if (code2 == NE_EXPR)
2255 case EQ_EXPR: val = (cmp == 0); break;
2256 case NE_EXPR: val = (cmp != 0); break;
2257 case LT_EXPR: val = (cmp > 0); break;
2258 case GT_EXPR: val = (cmp < 0); break;
2259 case LE_EXPR: val = (cmp >= 0); break;
2260 case GE_EXPR: val = (cmp <= 0); break;
2261 default: done = false;
2266 return boolean_true_node;
2268 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2272 /* See if an equality test is redundant with the other comparison. */
2273 else if (code1 == EQ_EXPR)
2278 case EQ_EXPR: val = (cmp == 0); break;
2279 case NE_EXPR: val = (cmp != 0); break;
2280 case LT_EXPR: val = (cmp < 0); break;
2281 case GT_EXPR: val = (cmp > 0); break;
2282 case LE_EXPR: val = (cmp <= 0); break;
2283 case GE_EXPR: val = (cmp >= 0); break;
2288 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2290 else if (code2 == EQ_EXPR)
2295 case EQ_EXPR: val = (cmp == 0); break;
2296 case NE_EXPR: val = (cmp != 0); break;
2297 case LT_EXPR: val = (cmp > 0); break;
2298 case GT_EXPR: val = (cmp < 0); break;
2299 case LE_EXPR: val = (cmp >= 0); break;
2300 case GE_EXPR: val = (cmp <= 0); break;
2305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2308 /* Chose the less restrictive of two < or <= comparisons. */
2309 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2310 && (code2 == LT_EXPR || code2 == LE_EXPR))
2312 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2313 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2318 /* Likewise chose the less restrictive of two > or >= comparisons. */
2319 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2320 && (code2 == GT_EXPR || code2 == GE_EXPR))
2322 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2323 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2325 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2328 /* Check for singleton ranges. */
2330 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2331 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2332 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2334 /* Check for less/greater pairs that don't restrict the range at all. */
2336 && (code1 == LT_EXPR || code1 == LE_EXPR)
2337 && (code2 == GT_EXPR || code2 == GE_EXPR))
2338 return boolean_true_node;
2340 && (code1 == GT_EXPR || code1 == GE_EXPR)
2341 && (code2 == LT_EXPR || code2 == LE_EXPR))
2342 return boolean_true_node;
2345 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2346 NAME's definition is a truth value. See if there are any simplifications
2347 that can be done against the NAME's definition. */
2348 if (TREE_CODE (op1a) == SSA_NAME
2349 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2350 && (integer_zerop (op1b) || integer_onep (op1b)))
2352 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2353 || (code1 == NE_EXPR && integer_onep (op1b)));
2354 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2355 switch (gimple_code (stmt))
2358 /* Try to simplify by copy-propagating the definition. */
2359 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2362 /* If every argument to the PHI produces the same result when
2363 ORed with the second comparison, we win.
2364 Do not do this unless the type is bool since we need a bool
2365 result here anyway. */
2366 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2368 tree result = NULL_TREE;
2370 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2372 tree arg = gimple_phi_arg_def (stmt, i);
2374 /* If this PHI has itself as an argument, ignore it.
2375 If all the other args produce the same result,
2377 if (arg == gimple_phi_result (stmt))
2379 else if (TREE_CODE (arg) == INTEGER_CST)
2381 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2384 result = boolean_true_node;
2385 else if (!integer_onep (result))
2389 result = fold_build2 (code2, boolean_type_node,
2391 else if (!same_bool_comparison_p (result,
2395 else if (TREE_CODE (arg) == SSA_NAME
2396 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2399 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2400 /* In simple cases we can look through PHI nodes,
2401 but we have to be careful with loops.
2403 if (! dom_info_available_p (CDI_DOMINATORS)
2404 || gimple_bb (def_stmt) == gimple_bb (stmt)
2405 || dominated_by_p (CDI_DOMINATORS,
2406 gimple_bb (def_stmt),
2409 temp = or_var_with_comparison (arg, invert, code2,
2415 else if (!same_bool_result_p (result, temp))
2431 /* Try to simplify the OR of two comparisons, specified by
2432 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2433 If this can be simplified to a single expression (without requiring
2434 introducing more SSA variables to hold intermediate values),
2435 return the resulting tree. Otherwise return NULL_TREE.
2436 If the result expression is non-null, it has boolean type. */
2439 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2440 enum tree_code code2, tree op2a, tree op2b)
2442 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2446 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2450 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2452 Either NULL_TREE, a simplified but non-constant or a constant
2455 ??? This should go into a gimple-fold-inline.h file to be eventually
2456 privatized with the single valueize function used in the various TUs
2457 to avoid the indirect function call overhead. */
2460 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2462 location_t loc = gimple_location (stmt);
2463 switch (gimple_code (stmt))
2467 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2469 switch (get_gimple_rhs_class (subcode))
2471 case GIMPLE_SINGLE_RHS:
2473 tree rhs = gimple_assign_rhs1 (stmt);
2474 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2476 if (TREE_CODE (rhs) == SSA_NAME)
2478 /* If the RHS is an SSA_NAME, return its known constant value,
2480 return (*valueize) (rhs);
2482 /* Handle propagating invariant addresses into address
2484 else if (TREE_CODE (rhs) == ADDR_EXPR
2485 && !is_gimple_min_invariant (rhs))
2487 HOST_WIDE_INT offset = 0;
2489 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2493 && (CONSTANT_CLASS_P (base)
2494 || decl_address_invariant_p (base)))
2495 return build_invariant_address (TREE_TYPE (rhs),
2498 else if (TREE_CODE (rhs) == CONSTRUCTOR
2499 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2500 && (CONSTRUCTOR_NELTS (rhs)
2501 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2506 vec = XALLOCAVEC (tree,
2507 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2508 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2510 val = (*valueize) (val);
2511 if (TREE_CODE (val) == INTEGER_CST
2512 || TREE_CODE (val) == REAL_CST
2513 || TREE_CODE (val) == FIXED_CST)
2519 return build_vector (TREE_TYPE (rhs), vec);
2522 if (kind == tcc_reference)
2524 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2525 || TREE_CODE (rhs) == REALPART_EXPR
2526 || TREE_CODE (rhs) == IMAGPART_EXPR)
2527 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2529 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2530 return fold_unary_loc (EXPR_LOCATION (rhs),
2532 TREE_TYPE (rhs), val);
2534 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2535 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2537 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2538 return fold_ternary_loc (EXPR_LOCATION (rhs),
2540 TREE_TYPE (rhs), val,
2541 TREE_OPERAND (rhs, 1),
2542 TREE_OPERAND (rhs, 2));
2544 else if (TREE_CODE (rhs) == MEM_REF
2545 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2547 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2548 if (TREE_CODE (val) == ADDR_EXPR
2549 && is_gimple_min_invariant (val))
2551 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2553 TREE_OPERAND (rhs, 1));
2558 return fold_const_aggregate_ref_1 (rhs, valueize);
2560 else if (kind == tcc_declaration)
2561 return get_symbol_constant_value (rhs);
2565 case GIMPLE_UNARY_RHS:
2567 /* Handle unary operators that can appear in GIMPLE form.
2568 Note that we know the single operand must be a constant,
2569 so this should almost always return a simplified RHS. */
2570 tree lhs = gimple_assign_lhs (stmt);
2571 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2573 /* Conversions are useless for CCP purposes if they are
2574 value-preserving. Thus the restrictions that
2575 useless_type_conversion_p places for restrict qualification
2576 of pointer types should not apply here.
2577 Substitution later will only substitute to allowed places. */
2578 if (CONVERT_EXPR_CODE_P (subcode)
2579 && POINTER_TYPE_P (TREE_TYPE (lhs))
2580 && POINTER_TYPE_P (TREE_TYPE (op0))
2581 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2582 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2583 && TYPE_MODE (TREE_TYPE (lhs))
2584 == TYPE_MODE (TREE_TYPE (op0)))
2588 fold_unary_ignore_overflow_loc (loc, subcode,
2589 gimple_expr_type (stmt), op0);
2592 case GIMPLE_BINARY_RHS:
2594 /* Handle binary operators that can appear in GIMPLE form. */
2595 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2596 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2598 /* Translate &x + CST into an invariant form suitable for
2599 further propagation. */
2600 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2601 && TREE_CODE (op0) == ADDR_EXPR
2602 && TREE_CODE (op1) == INTEGER_CST)
2604 tree off = fold_convert (ptr_type_node, op1);
2605 return build_fold_addr_expr_loc
2607 fold_build2 (MEM_REF,
2608 TREE_TYPE (TREE_TYPE (op0)),
2609 unshare_expr (op0), off));
2612 return fold_binary_loc (loc, subcode,
2613 gimple_expr_type (stmt), op0, op1);
2616 case GIMPLE_TERNARY_RHS:
2618 /* Handle ternary operators that can appear in GIMPLE form. */
2619 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2620 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2621 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2623 /* Fold embedded expressions in ternary codes. */
2624 if ((subcode == COND_EXPR
2625 || subcode == VEC_COND_EXPR)
2626 && COMPARISON_CLASS_P (op0))
2628 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2629 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2630 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2631 TREE_TYPE (op0), op00, op01);
2636 return fold_ternary_loc (loc, subcode,
2637 gimple_expr_type (stmt), op0, op1, op2);
2649 if (gimple_call_internal_p (stmt))
2650 /* No folding yet for these functions. */
2653 fn = (*valueize) (gimple_call_fn (stmt));
2654 if (TREE_CODE (fn) == ADDR_EXPR
2655 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2656 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2658 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2661 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2662 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2663 call = build_call_array_loc (loc,
2664 gimple_call_return_type (stmt),
2665 fn, gimple_call_num_args (stmt), args);
2666 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2668 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2669 STRIP_NOPS (retval);
2680 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2681 Returns NULL_TREE if folding to a constant is not possible, otherwise
2682 returns a constant according to is_gimple_min_invariant. */
2685 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2687 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2688 if (res && is_gimple_min_invariant (res))
2694 /* The following set of functions are supposed to fold references using
2695 their constant initializers. */
2697 static tree fold_ctor_reference (tree type, tree ctor,
2698 unsigned HOST_WIDE_INT offset,
2699 unsigned HOST_WIDE_INT size, tree);
2701 /* See if we can find constructor defining value of BASE.
2702 When we know the consructor with constant offset (such as
2703 base is array[40] and we do know constructor of array), then
2704 BIT_OFFSET is adjusted accordingly.
2706 As a special case, return error_mark_node when constructor
2707 is not explicitly available, but it is known to be zero
2708 such as 'static const int a;'. */
2710 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2711 tree (*valueize)(tree))
2713 HOST_WIDE_INT bit_offset2, size, max_size;
2714 if (TREE_CODE (base) == MEM_REF)
2716 if (!integer_zerop (TREE_OPERAND (base, 1)))
2718 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2720 *bit_offset += (mem_ref_offset (base).low
2725 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2726 base = valueize (TREE_OPERAND (base, 0));
2727 if (!base || TREE_CODE (base) != ADDR_EXPR)
2729 base = TREE_OPERAND (base, 0);
2732 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2733 DECL_INITIAL. If BASE is a nested reference into another
2734 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2735 the inner reference. */
2736 switch (TREE_CODE (base))
2741 tree init = ctor_for_folding (base);
2743 /* Our semantic is exact opposite of ctor_for_folding;
2744 NULL means unknown, while error_mark_node is 0. */
2745 if (init == error_mark_node)
2748 return error_mark_node;
2754 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2755 if (max_size == -1 || size != max_size)
2757 *bit_offset += bit_offset2;
2758 return get_base_constructor (base, bit_offset, valueize);
2769 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2770 to the memory at bit OFFSET.
2772 We do only simple job of folding byte accesses. */
2775 fold_string_cst_ctor_reference (tree type, tree ctor,
2776 unsigned HOST_WIDE_INT offset,
2777 unsigned HOST_WIDE_INT size)
2779 if (INTEGRAL_TYPE_P (type)
2780 && (TYPE_MODE (type)
2781 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2782 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2784 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2785 && size == BITS_PER_UNIT
2786 && !(offset % BITS_PER_UNIT))
2788 offset /= BITS_PER_UNIT;
2789 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2790 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2793 const char a[20]="hello";
2796 might lead to offset greater than string length. In this case we
2797 know value is either initialized to 0 or out of bounds. Return 0
2799 return build_zero_cst (type);
2804 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2805 SIZE to the memory at bit OFFSET. */
2808 fold_array_ctor_reference (tree type, tree ctor,
2809 unsigned HOST_WIDE_INT offset,
2810 unsigned HOST_WIDE_INT size,
2813 unsigned HOST_WIDE_INT cnt;
2815 double_int low_bound, elt_size;
2816 double_int index, max_index;
2817 double_int access_index;
2818 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2819 HOST_WIDE_INT inner_offset;
2821 /* Compute low bound and elt size. */
2822 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2823 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2824 if (domain_type && TYPE_MIN_VALUE (domain_type))
2826 /* Static constructors for variably sized objects makes no sense. */
2827 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2828 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2829 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2832 low_bound = double_int_zero;
2833 /* Static constructors for variably sized objects makes no sense. */
2834 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2837 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2840 /* We can handle only constantly sized accesses that are known to not
2841 be larger than size of array element. */
2842 if (!TYPE_SIZE_UNIT (type)
2843 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2844 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2847 /* Compute the array index we look for. */
2848 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2849 .udiv (elt_size, TRUNC_DIV_EXPR);
2850 access_index += low_bound;
2852 access_index = access_index.ext (TYPE_PRECISION (index_type),
2853 TYPE_UNSIGNED (index_type));
2855 /* And offset within the access. */
2856 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2858 /* See if the array field is large enough to span whole access. We do not
2859 care to fold accesses spanning multiple array indexes. */
2860 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2863 index = low_bound - double_int_one;
2865 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2867 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2869 /* Array constructor might explicitely set index, or specify range
2870 or leave index NULL meaning that it is next index after previous
2874 if (TREE_CODE (cfield) == INTEGER_CST)
2875 max_index = index = tree_to_double_int (cfield);
2878 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2879 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2880 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2885 index += double_int_one;
2887 index = index.ext (TYPE_PRECISION (index_type),
2888 TYPE_UNSIGNED (index_type));
2892 /* Do we have match? */
2893 if (access_index.cmp (index, 1) >= 0
2894 && access_index.cmp (max_index, 1) <= 0)
2895 return fold_ctor_reference (type, cval, inner_offset, size,
2898 /* When memory is not explicitely mentioned in constructor,
2899 it is 0 (or out of range). */
2900 return build_zero_cst (type);
2903 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2904 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2907 fold_nonarray_ctor_reference (tree type, tree ctor,
2908 unsigned HOST_WIDE_INT offset,
2909 unsigned HOST_WIDE_INT size,
2912 unsigned HOST_WIDE_INT cnt;
2915 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2918 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2919 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2920 tree field_size = DECL_SIZE (cfield);
2921 double_int bitoffset;
2922 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2923 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2924 double_int bitoffset_end, access_end;
2926 /* Variable sized objects in static constructors makes no sense,
2927 but field_size can be NULL for flexible array members. */
2928 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2929 && TREE_CODE (byte_offset) == INTEGER_CST
2930 && (field_size != NULL_TREE
2931 ? TREE_CODE (field_size) == INTEGER_CST
2932 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2934 /* Compute bit offset of the field. */
2935 bitoffset = tree_to_double_int (field_offset)
2936 + byte_offset_cst * bits_per_unit_cst;
2937 /* Compute bit offset where the field ends. */
2938 if (field_size != NULL_TREE)
2939 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2941 bitoffset_end = double_int_zero;
2943 access_end = double_int::from_uhwi (offset)
2944 + double_int::from_uhwi (size);
2946 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2947 [BITOFFSET, BITOFFSET_END)? */
2948 if (access_end.cmp (bitoffset, 0) > 0
2949 && (field_size == NULL_TREE
2950 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2952 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2953 /* We do have overlap. Now see if field is large enough to
2954 cover the access. Give up for accesses spanning multiple
2956 if (access_end.cmp (bitoffset_end, 0) > 0)
2958 if (double_int::from_uhwi (offset).slt (bitoffset))
2960 return fold_ctor_reference (type, cval,
2961 inner_offset.to_uhwi (), size,
2965 /* When memory is not explicitely mentioned in constructor, it is 0. */
2966 return build_zero_cst (type);
2969 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2970 to the memory at bit OFFSET. */
2973 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2974 unsigned HOST_WIDE_INT size, tree from_decl)
2978 /* We found the field with exact match. */
2979 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2981 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2983 /* We are at the end of walk, see if we can view convert the
2985 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2986 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2987 && operand_equal_p (TYPE_SIZE (type),
2988 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2990 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2991 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2996 if (TREE_CODE (ctor) == STRING_CST)
2997 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2998 if (TREE_CODE (ctor) == CONSTRUCTOR)
3001 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3002 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3003 return fold_array_ctor_reference (type, ctor, offset, size,
3006 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3013 /* Return the tree representing the element referenced by T if T is an
3014 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3015 names using VALUEIZE. Return NULL_TREE otherwise. */
3018 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3020 tree ctor, idx, base;
3021 HOST_WIDE_INT offset, size, max_size;
3024 if (TREE_THIS_VOLATILE (t))
3027 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3028 return get_symbol_constant_value (t);
3030 tem = fold_read_from_constant_string (t);
3034 switch (TREE_CODE (t))
3037 case ARRAY_RANGE_REF:
3038 /* Constant indexes are handled well by get_base_constructor.
3039 Only special case variable offsets.
3040 FIXME: This code can't handle nested references with variable indexes
3041 (they will be handled only by iteration of ccp). Perhaps we can bring
3042 get_ref_base_and_extent here and make it use a valueize callback. */
3043 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3045 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3046 && TREE_CODE (idx) == INTEGER_CST)
3048 tree low_bound, unit_size;
3051 /* If the resulting bit-offset is constant, track it. */
3052 if ((low_bound = array_ref_low_bound (t),
3053 TREE_CODE (low_bound) == INTEGER_CST)
3054 && (unit_size = array_ref_element_size (t),
3055 host_integerp (unit_size, 1))
3056 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3057 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3058 doffset.fits_shwi ()))
3060 offset = doffset.to_shwi ();
3061 offset *= TREE_INT_CST_LOW (unit_size);
3062 offset *= BITS_PER_UNIT;
3064 base = TREE_OPERAND (t, 0);
3065 ctor = get_base_constructor (base, &offset, valueize);
3066 /* Empty constructor. Always fold to 0. */
3067 if (ctor == error_mark_node)
3068 return build_zero_cst (TREE_TYPE (t));
3069 /* Out of bound array access. Value is undefined,
3073 /* We can not determine ctor. */
3076 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3077 TREE_INT_CST_LOW (unit_size)
3086 case TARGET_MEM_REF:
3088 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3089 ctor = get_base_constructor (base, &offset, valueize);
3091 /* Empty constructor. Always fold to 0. */
3092 if (ctor == error_mark_node)
3093 return build_zero_cst (TREE_TYPE (t));
3094 /* We do not know precise address. */
3095 if (max_size == -1 || max_size != size)
3097 /* We can not determine ctor. */
3101 /* Out of bound array access. Value is undefined, but don't fold. */
3105 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3111 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3112 if (c && TREE_CODE (c) == COMPLEX_CST)
3113 return fold_build1_loc (EXPR_LOCATION (t),
3114 TREE_CODE (t), TREE_TYPE (t), c);
3126 fold_const_aggregate_ref (tree t)
3128 return fold_const_aggregate_ref_1 (t, NULL);
3131 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3132 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3133 KNOWN_BINFO carries the binfo describing the true type of
3134 OBJ_TYPE_REF_OBJECT(REF). */
3137 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3139 unsigned HOST_WIDE_INT offset, size;
3140 tree v, fn, vtable, init;
3142 vtable = v = BINFO_VTABLE (known_binfo);
3143 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3147 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3149 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3150 v = TREE_OPERAND (v, 0);
3155 if (TREE_CODE (v) != ADDR_EXPR)
3157 v = TREE_OPERAND (v, 0);
3159 if (TREE_CODE (v) != VAR_DECL
3160 || !DECL_VIRTUAL_P (v))
3162 init = ctor_for_folding (v);
3164 /* The virtual tables should always be born with constructors.
3165 and we always should assume that they are avaialble for
3166 folding. At the moment we do not stream them in all cases,
3167 but it should never happen that ctor seem unreachable. */
3169 if (init == error_mark_node)
3171 gcc_assert (in_lto_p);
3174 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3175 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3176 offset += token * size;
3177 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3179 if (!fn || integer_zerop (fn))
3181 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3182 || TREE_CODE (fn) == FDESC_EXPR);
3183 fn = TREE_OPERAND (fn, 0);
3184 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3186 /* When cgraph node is missing and function is not public, we cannot
3187 devirtualize. This can happen in WHOPR when the actual method
3188 ends up in other partition, because we found devirtualization
3189 possibility too late. */
3190 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3193 /* Make sure we create a cgraph node for functions we'll reference.
3194 They can be non-existent if the reference comes from an entry
3195 of an external vtable for example. */
3196 cgraph_get_create_node (fn);
3201 /* Return true iff VAL is a gimple expression that is known to be
3202 non-negative. Restricted to floating-point inputs. */
3205 gimple_val_nonnegative_real_p (tree val)
3209 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3211 /* Use existing logic for non-gimple trees. */
3212 if (tree_expr_nonnegative_p (val))
3215 if (TREE_CODE (val) != SSA_NAME)
3218 /* Currently we look only at the immediately defining statement
3219 to make this determination, since recursion on defining
3220 statements of operands can lead to quadratic behavior in the
3221 worst case. This is expected to catch almost all occurrences
3222 in practice. It would be possible to implement limited-depth
3223 recursion if important cases are lost. Alternatively, passes
3224 that need this information (such as the pow/powi lowering code
3225 in the cse_sincos pass) could be revised to provide it through
3226 dataflow propagation. */
3228 def_stmt = SSA_NAME_DEF_STMT (val);
3230 if (is_gimple_assign (def_stmt))
3234 /* See fold-const.c:tree_expr_nonnegative_p for additional
3235 cases that could be handled with recursion. */
3237 switch (gimple_assign_rhs_code (def_stmt))
3240 /* Always true for floating-point operands. */
3244 /* True if the two operands are identical (since we are
3245 restricted to floating-point inputs). */
3246 op0 = gimple_assign_rhs1 (def_stmt);
3247 op1 = gimple_assign_rhs2 (def_stmt);
3250 || operand_equal_p (op0, op1, 0))
3257 else if (is_gimple_call (def_stmt))
3259 tree fndecl = gimple_call_fndecl (def_stmt);
3261 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3265 switch (DECL_FUNCTION_CODE (fndecl))
3267 CASE_FLT_FN (BUILT_IN_ACOS):
3268 CASE_FLT_FN (BUILT_IN_ACOSH):
3269 CASE_FLT_FN (BUILT_IN_CABS):
3270 CASE_FLT_FN (BUILT_IN_COSH):
3271 CASE_FLT_FN (BUILT_IN_ERFC):
3272 CASE_FLT_FN (BUILT_IN_EXP):
3273 CASE_FLT_FN (BUILT_IN_EXP10):
3274 CASE_FLT_FN (BUILT_IN_EXP2):
3275 CASE_FLT_FN (BUILT_IN_FABS):
3276 CASE_FLT_FN (BUILT_IN_FDIM):
3277 CASE_FLT_FN (BUILT_IN_HYPOT):
3278 CASE_FLT_FN (BUILT_IN_POW10):
3281 CASE_FLT_FN (BUILT_IN_SQRT):
3282 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3283 nonnegative inputs. */
3284 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3289 CASE_FLT_FN (BUILT_IN_POWI):
3290 /* True if the second argument is an even integer. */
3291 arg1 = gimple_call_arg (def_stmt, 1);
3293 if (TREE_CODE (arg1) == INTEGER_CST
3294 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3299 CASE_FLT_FN (BUILT_IN_POW):
3300 /* True if the second argument is an even integer-valued
3302 arg1 = gimple_call_arg (def_stmt, 1);
3304 if (TREE_CODE (arg1) == REAL_CST)
3309 c = TREE_REAL_CST (arg1);
3310 n = real_to_integer (&c);
3314 REAL_VALUE_TYPE cint;
3315 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3316 if (real_identical (&c, &cint))
3332 /* Given a pointer value OP0, return a simplified version of an
3333 indirection through OP0, or NULL_TREE if no simplification is
3334 possible. Note that the resulting type may be different from
3335 the type pointed to in the sense that it is still compatible
3336 from the langhooks point of view. */
3339 gimple_fold_indirect_ref (tree t)
3341 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3346 subtype = TREE_TYPE (sub);
3347 if (!POINTER_TYPE_P (subtype))
3350 if (TREE_CODE (sub) == ADDR_EXPR)
3352 tree op = TREE_OPERAND (sub, 0);
3353 tree optype = TREE_TYPE (op);
3355 if (useless_type_conversion_p (type, optype))
3358 /* *(foo *)&fooarray => fooarray[0] */
3359 if (TREE_CODE (optype) == ARRAY_TYPE
3360 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3361 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3363 tree type_domain = TYPE_DOMAIN (optype);
3364 tree min_val = size_zero_node;
3365 if (type_domain && TYPE_MIN_VALUE (type_domain))
3366 min_val = TYPE_MIN_VALUE (type_domain);
3367 if (TREE_CODE (min_val) == INTEGER_CST)
3368 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3370 /* *(foo *)&complexfoo => __real__ complexfoo */
3371 else if (TREE_CODE (optype) == COMPLEX_TYPE
3372 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3373 return fold_build1 (REALPART_EXPR, type, op);
3374 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3375 else if (TREE_CODE (optype) == VECTOR_TYPE
3376 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3378 tree part_width = TYPE_SIZE (type);
3379 tree index = bitsize_int (0);
3380 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3384 /* *(p + CST) -> ... */
3385 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3386 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3388 tree addr = TREE_OPERAND (sub, 0);
3389 tree off = TREE_OPERAND (sub, 1);
3393 addrtype = TREE_TYPE (addr);
3395 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3396 if (TREE_CODE (addr) == ADDR_EXPR
3397 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3398 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3399 && host_integerp (off, 1))
3401 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
3402 tree part_width = TYPE_SIZE (type);
3403 unsigned HOST_WIDE_INT part_widthi
3404 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
3405 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3406 tree index = bitsize_int (indexi);
3407 if (offset / part_widthi
3408 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3409 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3413 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3414 if (TREE_CODE (addr) == ADDR_EXPR
3415 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3416 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3418 tree size = TYPE_SIZE_UNIT (type);
3419 if (tree_int_cst_equal (size, off))
3420 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3423 /* *(p + CST) -> MEM_REF <p, CST>. */
3424 if (TREE_CODE (addr) != ADDR_EXPR
3425 || DECL_P (TREE_OPERAND (addr, 0)))
3426 return fold_build2 (MEM_REF, type,
3428 build_int_cst_wide (ptype,
3429 TREE_INT_CST_LOW (off),
3430 TREE_INT_CST_HIGH (off)));
3433 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3434 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3435 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3436 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3439 tree min_val = size_zero_node;
3441 sub = gimple_fold_indirect_ref (sub);
3443 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3444 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3445 if (type_domain && TYPE_MIN_VALUE (type_domain))
3446 min_val = TYPE_MIN_VALUE (type_domain);
3447 if (TREE_CODE (min_val) == INTEGER_CST)
3448 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);