1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file.
66 We also may see weakref that is always safe. */
67 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
68 && TREE_CODE (decl) == VAR_DECL)
69 return lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) != NULL;
70 /* When function is public, we always can introduce new reference.
71 Exception are the COMDAT functions where introducing a direct
72 reference imply need to include function body in the curren tunit. */
73 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
75 /* We are not at ltrans stage; so don't worry about WHOPR.
76 Also when still gimplifying all referred comdat functions will be
78 ??? as observed in PR20991 for already optimized out comdat virtual functions
79 we may not neccesarily give up because the copy will be output elsewhere when
80 corresponding vtable is output. */
81 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
83 /* If we already output the function body, we are safe. */
84 if (TREE_ASM_WRITTEN (decl))
86 if (TREE_CODE (decl) == FUNCTION_DECL)
88 node = cgraph_get_node (decl);
89 /* Check that we still have function body and that we didn't took
90 the decision to eliminate offline copy of the function yet.
91 The second is important when devirtualization happens during final
92 compilation stage when making a new reference no longer makes callee
94 if (!node || !node->analyzed || node->global.inlined_to)
97 else if (TREE_CODE (decl) == VAR_DECL)
99 vnode = varpool_get_node (decl);
100 if (!vnode || !vnode->finalized)
106 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
107 acceptable form for is_gimple_min_invariant. */
110 canonicalize_constructor_val (tree cval)
113 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
114 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
116 tree ptr = TREE_OPERAND (cval, 0);
117 if (is_gimple_min_invariant (ptr))
118 cval = build1_loc (EXPR_LOCATION (cval),
119 ADDR_EXPR, TREE_TYPE (ptr),
120 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
122 fold_convert (ptr_type_node,
123 TREE_OPERAND (cval, 1))));
125 if (TREE_CODE (cval) == ADDR_EXPR)
127 tree base = get_base_address (TREE_OPERAND (cval, 0));
131 if ((TREE_CODE (base) == VAR_DECL
132 || TREE_CODE (base) == FUNCTION_DECL)
133 && !can_refer_decl_in_current_unit_p (base))
135 if (TREE_CODE (base) == VAR_DECL)
137 TREE_ADDRESSABLE (base) = 1;
138 if (cfun && gimple_referenced_vars (cfun))
139 add_referenced_var (base);
141 else if (TREE_CODE (base) == FUNCTION_DECL)
143 /* Make sure we create a cgraph node for functions we'll reference.
144 They can be non-existent if the reference comes from an entry
145 of an external vtable for example. */
146 cgraph_get_create_node (base);
148 /* Fixup types in global initializers. */
149 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
150 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
155 /* If SYM is a constant variable with known value, return the value.
156 NULL_TREE is returned otherwise. */
159 get_symbol_constant_value (tree sym)
161 if (const_value_known_p (sym))
163 tree val = DECL_INITIAL (sym);
166 val = canonicalize_constructor_val (val);
167 if (val && is_gimple_min_invariant (val))
172 /* Variables declared 'const' without an initializer
173 have zero as the initializer if they may not be
174 overridden at link or run time. */
176 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
177 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
178 return build_zero_cst (TREE_TYPE (sym));
186 /* Subroutine of fold_stmt. We perform several simplifications of the
187 memory reference tree EXPR and make sure to re-gimplify them properly
188 after propagation of constant addresses. IS_LHS is true if the
189 reference is supposed to be an lvalue. */
192 maybe_fold_reference (tree expr, bool is_lhs)
197 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
198 || TREE_CODE (expr) == REALPART_EXPR
199 || TREE_CODE (expr) == IMAGPART_EXPR)
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_unary_loc (EXPR_LOCATION (expr),
204 TREE_OPERAND (expr, 0));
205 else if (TREE_CODE (expr) == BIT_FIELD_REF
206 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
207 return fold_ternary_loc (EXPR_LOCATION (expr),
210 TREE_OPERAND (expr, 0),
211 TREE_OPERAND (expr, 1),
212 TREE_OPERAND (expr, 2));
214 while (handled_component_p (*t))
215 t = &TREE_OPERAND (*t, 0);
217 /* Canonicalize MEM_REFs invariant address operand. Do this first
218 to avoid feeding non-canonical MEM_REFs elsewhere. */
219 if (TREE_CODE (*t) == MEM_REF
220 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
222 bool volatile_p = TREE_THIS_VOLATILE (*t);
223 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
224 TREE_OPERAND (*t, 0),
225 TREE_OPERAND (*t, 1));
228 TREE_THIS_VOLATILE (tem) = volatile_p;
230 tem = maybe_fold_reference (expr, is_lhs);
238 && (result = fold_const_aggregate_ref (expr))
239 && is_gimple_min_invariant (result))
242 /* Fold back MEM_REFs to reference trees. */
243 if (TREE_CODE (*t) == MEM_REF
244 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
245 && integer_zerop (TREE_OPERAND (*t, 1))
246 && (TREE_THIS_VOLATILE (*t)
247 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
248 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
249 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
250 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
251 /* We have to look out here to not drop a required conversion
252 from the rhs to the lhs if is_lhs, but we don't have the
253 rhs here to verify that. Thus require strict type
255 && types_compatible_p (TREE_TYPE (*t),
256 TREE_TYPE (TREE_OPERAND
257 (TREE_OPERAND (*t, 0), 0))))
260 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
261 tem = maybe_fold_reference (expr, is_lhs);
266 else if (TREE_CODE (*t) == TARGET_MEM_REF)
268 tree tem = maybe_fold_tmr (*t);
272 tem = maybe_fold_reference (expr, is_lhs);
283 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
284 replacement rhs for the statement or NULL_TREE if no simplification
285 could be made. It is assumed that the operands have been previously
289 fold_gimple_assign (gimple_stmt_iterator *si)
291 gimple stmt = gsi_stmt (*si);
292 enum tree_code subcode = gimple_assign_rhs_code (stmt);
293 location_t loc = gimple_location (stmt);
295 tree result = NULL_TREE;
297 switch (get_gimple_rhs_class (subcode))
299 case GIMPLE_SINGLE_RHS:
301 tree rhs = gimple_assign_rhs1 (stmt);
303 if (REFERENCE_CLASS_P (rhs))
304 return maybe_fold_reference (rhs, false);
306 else if (TREE_CODE (rhs) == ADDR_EXPR)
308 tree ref = TREE_OPERAND (rhs, 0);
309 tree tem = maybe_fold_reference (ref, true);
311 && TREE_CODE (tem) == MEM_REF
312 && integer_zerop (TREE_OPERAND (tem, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
315 result = fold_convert (TREE_TYPE (rhs),
316 build_fold_addr_expr_loc (loc, tem));
317 else if (TREE_CODE (ref) == MEM_REF
318 && integer_zerop (TREE_OPERAND (ref, 1)))
319 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
322 else if (TREE_CODE (rhs) == CONSTRUCTOR
323 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
324 && (CONSTRUCTOR_NELTS (rhs)
325 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
327 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
331 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
332 if (TREE_CODE (val) != INTEGER_CST
333 && TREE_CODE (val) != REAL_CST
334 && TREE_CODE (val) != FIXED_CST)
337 return build_vector_from_ctor (TREE_TYPE (rhs),
338 CONSTRUCTOR_ELTS (rhs));
341 else if (DECL_P (rhs))
342 return unshare_expr (get_symbol_constant_value (rhs));
344 /* If we couldn't fold the RHS, hand over to the generic
346 if (result == NULL_TREE)
349 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
350 that may have been added by fold, and "useless" type
351 conversions that might now be apparent due to propagation. */
352 STRIP_USELESS_TYPE_CONVERSION (result);
354 if (result != rhs && valid_gimple_rhs_p (result))
361 case GIMPLE_UNARY_RHS:
363 tree rhs = gimple_assign_rhs1 (stmt);
365 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
368 /* If the operation was a conversion do _not_ mark a
369 resulting constant with TREE_OVERFLOW if the original
370 constant was not. These conversions have implementation
371 defined behavior and retaining the TREE_OVERFLOW flag
372 here would confuse later passes such as VRP. */
373 if (CONVERT_EXPR_CODE_P (subcode)
374 && TREE_CODE (result) == INTEGER_CST
375 && TREE_CODE (rhs) == INTEGER_CST)
376 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
378 STRIP_USELESS_TYPE_CONVERSION (result);
379 if (valid_gimple_rhs_p (result))
385 case GIMPLE_BINARY_RHS:
386 /* Try to canonicalize for boolean-typed X the comparisons
387 X == 0, X == 1, X != 0, and X != 1. */
388 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
389 || gimple_assign_rhs_code (stmt) == NE_EXPR)
391 tree lhs = gimple_assign_lhs (stmt);
392 tree op1 = gimple_assign_rhs1 (stmt);
393 tree op2 = gimple_assign_rhs2 (stmt);
394 tree type = TREE_TYPE (op1);
396 /* Check whether the comparison operands are of the same boolean
397 type as the result type is.
398 Check that second operand is an integer-constant with value
400 if (TREE_CODE (op2) == INTEGER_CST
401 && (integer_zerop (op2) || integer_onep (op2))
402 && useless_type_conversion_p (TREE_TYPE (lhs), type))
404 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
405 bool is_logical_not = false;
407 /* X == 0 and X != 1 is a logical-not.of X
408 X == 1 and X != 0 is X */
409 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
410 || (cmp_code == NE_EXPR && integer_onep (op2)))
411 is_logical_not = true;
413 if (is_logical_not == false)
415 /* Only for one-bit precision typed X the transformation
416 !X -> ~X is valied. */
417 else if (TYPE_PRECISION (type) == 1)
418 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
420 /* Otherwise we use !X -> X ^ 1. */
422 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
423 type, op1, build_int_cst (type, 1));
429 result = fold_binary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt));
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
442 case GIMPLE_TERNARY_RHS:
443 /* Try to fold a conditional expression. */
444 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
446 tree op0 = gimple_assign_rhs1 (stmt);
449 location_t cond_loc = gimple_location (stmt);
451 if (COMPARISON_CLASS_P (op0))
453 fold_defer_overflow_warnings ();
454 tem = fold_binary_loc (cond_loc,
455 TREE_CODE (op0), TREE_TYPE (op0),
456 TREE_OPERAND (op0, 0),
457 TREE_OPERAND (op0, 1));
458 /* This is actually a conditional expression, not a GIMPLE
459 conditional statement, however, the valid_gimple_rhs_p
460 test still applies. */
461 set = (tem && is_gimple_condexpr (tem)
462 && valid_gimple_rhs_p (tem));
463 fold_undefer_overflow_warnings (set, stmt, 0);
465 else if (is_gimple_min_invariant (op0))
474 result = fold_build3_loc (cond_loc, COND_EXPR,
475 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
476 gimple_assign_rhs2 (stmt),
477 gimple_assign_rhs3 (stmt));
481 result = fold_ternary_loc (loc, subcode,
482 TREE_TYPE (gimple_assign_lhs (stmt)),
483 gimple_assign_rhs1 (stmt),
484 gimple_assign_rhs2 (stmt),
485 gimple_assign_rhs3 (stmt));
489 STRIP_USELESS_TYPE_CONVERSION (result);
490 if (valid_gimple_rhs_p (result))
495 case GIMPLE_INVALID_RHS:
502 /* Attempt to fold a conditional statement. Return true if any changes were
503 made. We only attempt to fold the condition expression, and do not perform
504 any transformation that would require alteration of the cfg. It is
505 assumed that the operands have been previously folded. */
508 fold_gimple_cond (gimple stmt)
510 tree result = fold_binary_loc (gimple_location (stmt),
511 gimple_cond_code (stmt),
513 gimple_cond_lhs (stmt),
514 gimple_cond_rhs (stmt));
518 STRIP_USELESS_TYPE_CONVERSION (result);
519 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
521 gimple_cond_set_condition_from_tree (stmt, result);
529 /* Convert EXPR into a GIMPLE value suitable for substitution on the
530 RHS of an assignment. Insert the necessary statements before
531 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
532 is replaced. If the call is expected to produces a result, then it
533 is replaced by an assignment of the new RHS to the result variable.
534 If the result is to be ignored, then the call is replaced by a
535 GIMPLE_NOP. A proper VDEF chain is retained by making the first
536 VUSE and the last VDEF of the whole sequence be the same as the replaced
537 statement and using new SSA names for stores in between. */
540 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
543 gimple stmt, new_stmt;
544 gimple_stmt_iterator i;
545 gimple_seq stmts = NULL;
546 struct gimplify_ctx gctx;
550 stmt = gsi_stmt (*si_p);
552 gcc_assert (is_gimple_call (stmt));
554 push_gimplify_context (&gctx);
555 gctx.into_ssa = gimple_in_ssa_p (cfun);
557 lhs = gimple_call_lhs (stmt);
558 if (lhs == NULL_TREE)
560 gimplify_and_add (expr, &stmts);
561 /* We can end up with folding a memcpy of an empty class assignment
562 which gets optimized away by C++ gimplification. */
563 if (gimple_seq_empty_p (stmts))
565 pop_gimplify_context (NULL);
566 if (gimple_in_ssa_p (cfun))
568 unlink_stmt_vdef (stmt);
571 gsi_remove (si_p, true);
577 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
578 new_stmt = gimple_build_assign (lhs, tmp);
579 i = gsi_last (stmts);
580 gsi_insert_after_without_update (&i, new_stmt,
581 GSI_CONTINUE_LINKING);
584 pop_gimplify_context (NULL);
586 if (gimple_has_location (stmt))
587 annotate_all_with_location (stmts, gimple_location (stmt));
589 /* First iterate over the replacement statements backward, assigning
590 virtual operands to their defining statements. */
592 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
594 new_stmt = gsi_stmt (i);
595 if ((gimple_assign_single_p (new_stmt)
596 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
597 || (is_gimple_call (new_stmt)
598 && (gimple_call_flags (new_stmt)
599 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
603 vdef = gimple_vdef (stmt);
605 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
606 gimple_set_vdef (new_stmt, vdef);
607 if (vdef && TREE_CODE (vdef) == SSA_NAME)
608 SSA_NAME_DEF_STMT (vdef) = new_stmt;
609 laststore = new_stmt;
613 /* Second iterate over the statements forward, assigning virtual
614 operands to their uses. */
615 reaching_vuse = gimple_vuse (stmt);
616 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
618 new_stmt = gsi_stmt (i);
619 /* The replacement can expose previously unreferenced variables. */
620 if (gimple_in_ssa_p (cfun))
621 find_referenced_vars_in (new_stmt);
622 /* If the new statement possibly has a VUSE, update it with exact SSA
623 name we know will reach this one. */
624 if (gimple_has_mem_ops (new_stmt))
625 gimple_set_vuse (new_stmt, reaching_vuse);
626 gimple_set_modified (new_stmt, true);
627 if (gimple_vdef (new_stmt))
628 reaching_vuse = gimple_vdef (new_stmt);
631 /* If the new sequence does not do a store release the virtual
632 definition of the original statement. */
634 && reaching_vuse == gimple_vuse (stmt))
636 tree vdef = gimple_vdef (stmt);
638 && TREE_CODE (vdef) == SSA_NAME)
640 unlink_stmt_vdef (stmt);
641 release_ssa_name (vdef);
645 /* Finally replace the original statement with the sequence. */
646 gsi_replace_with_seq (si_p, stmts, false);
649 /* Return the string length, maximum string length or maximum value of
651 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
652 is not NULL and, for TYPE == 0, its value is not equal to the length
653 we determine or if we are unable to determine the length or value,
654 return false. VISITED is a bitmap of visited variables.
655 TYPE is 0 if string length should be returned, 1 for maximum string
656 length and 2 for maximum value ARG can have. */
659 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
664 if (TREE_CODE (arg) != SSA_NAME)
666 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
667 if (TREE_CODE (arg) == ADDR_EXPR
668 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
669 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
671 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
672 if (TREE_CODE (aop0) == INDIRECT_REF
673 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
674 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
675 length, visited, type);
681 if (TREE_CODE (val) != INTEGER_CST
682 || tree_int_cst_sgn (val) < 0)
686 val = c_strlen (arg, 1);
694 if (TREE_CODE (*length) != INTEGER_CST
695 || TREE_CODE (val) != INTEGER_CST)
698 if (tree_int_cst_lt (*length, val))
702 else if (simple_cst_equal (val, *length) != 1)
710 /* If we were already here, break the infinite cycle. */
711 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
715 def_stmt = SSA_NAME_DEF_STMT (var);
717 switch (gimple_code (def_stmt))
720 /* The RHS of the statement defining VAR must either have a
721 constant length or come from another SSA_NAME with a constant
723 if (gimple_assign_single_p (def_stmt)
724 || gimple_assign_unary_nop_p (def_stmt))
726 tree rhs = gimple_assign_rhs1 (def_stmt);
727 return get_maxval_strlen (rhs, length, visited, type);
729 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
731 tree op2 = gimple_assign_rhs2 (def_stmt);
732 tree op3 = gimple_assign_rhs3 (def_stmt);
733 return get_maxval_strlen (op2, length, visited, type)
734 && get_maxval_strlen (op3, length, visited, type);
740 /* All the arguments of the PHI node must have the same constant
744 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
746 tree arg = gimple_phi_arg (def_stmt, i)->def;
748 /* If this PHI has itself as an argument, we cannot
749 determine the string length of this argument. However,
750 if we can find a constant string length for the other
751 PHI args then we can still be sure that this is a
752 constant string length. So be optimistic and just
753 continue with the next argument. */
754 if (arg == gimple_phi_result (def_stmt))
757 if (!get_maxval_strlen (arg, length, visited, type))
769 /* Fold builtin call in statement STMT. Returns a simplified tree.
770 We may return a non-constant expression, including another call
771 to a different function and with different arguments, e.g.,
772 substituting memcpy for strcpy when the string length is known.
773 Note that some builtins expand into inline code that may not
774 be valid in GIMPLE. Callers must take care. */
777 gimple_fold_builtin (gimple stmt)
785 location_t loc = gimple_location (stmt);
787 gcc_assert (is_gimple_call (stmt));
789 ignore = (gimple_call_lhs (stmt) == NULL);
791 /* First try the generic builtin folder. If that succeeds, return the
793 result = fold_call_stmt (stmt, ignore);
801 /* Ignore MD builtins. */
802 callee = gimple_call_fndecl (stmt);
803 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
806 /* Give up for always_inline inline builtins until they are
808 if (avoid_folding_inline_builtin (callee))
811 /* If the builtin could not be folded, and it has no argument list,
813 nargs = gimple_call_num_args (stmt);
817 /* Limit the work only for builtins we know how to simplify. */
818 switch (DECL_FUNCTION_CODE (callee))
820 case BUILT_IN_STRLEN:
822 case BUILT_IN_FPUTS_UNLOCKED:
826 case BUILT_IN_STRCPY:
827 case BUILT_IN_STRNCPY:
831 case BUILT_IN_MEMCPY_CHK:
832 case BUILT_IN_MEMPCPY_CHK:
833 case BUILT_IN_MEMMOVE_CHK:
834 case BUILT_IN_MEMSET_CHK:
835 case BUILT_IN_STRNCPY_CHK:
836 case BUILT_IN_STPNCPY_CHK:
840 case BUILT_IN_STRCPY_CHK:
841 case BUILT_IN_STPCPY_CHK:
845 case BUILT_IN_SNPRINTF_CHK:
846 case BUILT_IN_VSNPRINTF_CHK:
854 if (arg_idx >= nargs)
857 /* Try to use the dataflow information gathered by the CCP process. */
858 visited = BITMAP_ALLOC (NULL);
859 bitmap_clear (visited);
861 memset (val, 0, sizeof (val));
862 a = gimple_call_arg (stmt, arg_idx);
863 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
864 val[arg_idx] = NULL_TREE;
866 BITMAP_FREE (visited);
869 switch (DECL_FUNCTION_CODE (callee))
871 case BUILT_IN_STRLEN:
872 if (val[0] && nargs == 1)
875 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
877 /* If the result is not a valid gimple value, or not a cast
878 of a valid gimple value, then we cannot use the result. */
879 if (is_gimple_val (new_val)
880 || (CONVERT_EXPR_P (new_val)
881 && is_gimple_val (TREE_OPERAND (new_val, 0))))
886 case BUILT_IN_STRCPY:
887 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
888 result = fold_builtin_strcpy (loc, callee,
889 gimple_call_arg (stmt, 0),
890 gimple_call_arg (stmt, 1),
894 case BUILT_IN_STRNCPY:
895 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
896 result = fold_builtin_strncpy (loc, callee,
897 gimple_call_arg (stmt, 0),
898 gimple_call_arg (stmt, 1),
899 gimple_call_arg (stmt, 2),
905 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
906 gimple_call_arg (stmt, 1),
907 ignore, false, val[0]);
910 case BUILT_IN_FPUTS_UNLOCKED:
912 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
913 gimple_call_arg (stmt, 1),
914 ignore, true, val[0]);
917 case BUILT_IN_MEMCPY_CHK:
918 case BUILT_IN_MEMPCPY_CHK:
919 case BUILT_IN_MEMMOVE_CHK:
920 case BUILT_IN_MEMSET_CHK:
921 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
922 result = fold_builtin_memory_chk (loc, callee,
923 gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 gimple_call_arg (stmt, 2),
926 gimple_call_arg (stmt, 3),
928 DECL_FUNCTION_CODE (callee));
931 case BUILT_IN_STRCPY_CHK:
932 case BUILT_IN_STPCPY_CHK:
933 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
934 result = fold_builtin_stxcpy_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
939 DECL_FUNCTION_CODE (callee));
942 case BUILT_IN_STRNCPY_CHK:
943 case BUILT_IN_STPNCPY_CHK:
944 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
945 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
946 gimple_call_arg (stmt, 1),
947 gimple_call_arg (stmt, 2),
948 gimple_call_arg (stmt, 3),
950 DECL_FUNCTION_CODE (callee));
953 case BUILT_IN_SNPRINTF_CHK:
954 case BUILT_IN_VSNPRINTF_CHK:
955 if (val[1] && is_gimple_val (val[1]))
956 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
957 DECL_FUNCTION_CODE (callee));
964 if (result && ignore)
965 result = fold_ignored_result (result);
970 /* Return a binfo to be used for devirtualization of calls based on an object
971 represented by a declaration (i.e. a global or automatically allocated one)
972 or NULL if it cannot be found or is not safe. CST is expected to be an
973 ADDR_EXPR of such object or the function will return NULL. Currently it is
974 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
977 gimple_extract_devirt_binfo_from_cst (tree cst)
979 HOST_WIDE_INT offset, size, max_size;
980 tree base, type, expected_type, binfo;
981 bool last_artificial = false;
983 if (!flag_devirtualize
984 || TREE_CODE (cst) != ADDR_EXPR
985 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
988 cst = TREE_OPERAND (cst, 0);
989 expected_type = TREE_TYPE (cst);
990 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
991 type = TREE_TYPE (base);
995 || TREE_CODE (type) != RECORD_TYPE)
998 /* Find the sub-object the constant actually refers to and mark whether it is
999 an artificial one (as opposed to a user-defined one). */
1002 HOST_WIDE_INT pos, size;
1005 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1010 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1012 if (TREE_CODE (fld) != FIELD_DECL)
1015 pos = int_bit_position (fld);
1016 size = tree_low_cst (DECL_SIZE (fld), 1);
1017 if (pos <= offset && (pos + size) > offset)
1020 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1023 last_artificial = DECL_ARTIFICIAL (fld);
1024 type = TREE_TYPE (fld);
1027 /* Artifical sub-objects are ancestors, we do not want to use them for
1028 devirtualization, at least not here. */
1029 if (last_artificial)
1031 binfo = TYPE_BINFO (type);
1032 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1038 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1039 The statement may be replaced by another statement, e.g., if the call
1040 simplifies to a constant value. Return true if any changes were made.
1041 It is assumed that the operands have been previously folded. */
1044 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1046 gimple stmt = gsi_stmt (*gsi);
1048 bool changed = false;
1051 /* Fold *& in call arguments. */
1052 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1053 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1055 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1058 gimple_call_set_arg (stmt, i, tmp);
1063 /* Check for virtual calls that became direct calls. */
1064 callee = gimple_call_fn (stmt);
1065 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1067 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1069 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1074 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1075 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1079 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1080 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1083 gimple_call_set_fndecl (stmt, fndecl);
1093 /* Check for builtins that CCP can handle using information not
1094 available in the generic fold routines. */
1095 callee = gimple_call_fndecl (stmt);
1096 if (callee && DECL_BUILT_IN (callee))
1098 tree result = gimple_fold_builtin (stmt);
1101 if (!update_call_from_tree (gsi, result))
1102 gimplify_and_update_call_from_tree (gsi, result);
1110 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1111 distinguishes both cases. */
1114 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1116 bool changed = false;
1117 gimple stmt = gsi_stmt (*gsi);
1119 gimple_stmt_iterator gsinext = *gsi;
1122 gsi_next (&gsinext);
1123 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1125 /* Fold the main computation performed by the statement. */
1126 switch (gimple_code (stmt))
1130 unsigned old_num_ops = gimple_num_ops (stmt);
1131 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1132 tree lhs = gimple_assign_lhs (stmt);
1134 /* First canonicalize operand order. This avoids building new
1135 trees if this is the only thing fold would later do. */
1136 if ((commutative_tree_code (subcode)
1137 || commutative_ternary_tree_code (subcode))
1138 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1139 gimple_assign_rhs2 (stmt), false))
1141 tree tem = gimple_assign_rhs1 (stmt);
1142 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1143 gimple_assign_set_rhs2 (stmt, tem);
1146 new_rhs = fold_gimple_assign (gsi);
1148 && !useless_type_conversion_p (TREE_TYPE (lhs),
1149 TREE_TYPE (new_rhs)))
1150 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1153 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1155 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1162 changed |= fold_gimple_cond (stmt);
1166 changed |= gimple_fold_call (gsi, inplace);
1170 /* Fold *& in asm operands. */
1173 const char **oconstraints;
1174 const char *constraint;
1175 bool allows_mem, allows_reg;
1177 noutputs = gimple_asm_noutputs (stmt);
1178 oconstraints = XALLOCAVEC (const char *, noutputs);
1180 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1182 tree link = gimple_asm_output_op (stmt, i);
1183 tree op = TREE_VALUE (link);
1185 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1186 if (REFERENCE_CLASS_P (op)
1187 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1189 TREE_VALUE (link) = op;
1193 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1195 tree link = gimple_asm_input_op (stmt, i);
1196 tree op = TREE_VALUE (link);
1198 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1199 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1200 oconstraints, &allows_mem, &allows_reg);
1201 if (REFERENCE_CLASS_P (op)
1202 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1205 TREE_VALUE (link) = op;
1213 if (gimple_debug_bind_p (stmt))
1215 tree val = gimple_debug_bind_get_value (stmt);
1217 && REFERENCE_CLASS_P (val))
1219 tree tem = maybe_fold_reference (val, false);
1222 gimple_debug_bind_set_value (stmt, tem);
1227 && TREE_CODE (val) == ADDR_EXPR)
1229 tree ref = TREE_OPERAND (val, 0);
1230 tree tem = maybe_fold_reference (ref, false);
1233 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1234 gimple_debug_bind_set_value (stmt, tem);
1244 /* If stmt folds into nothing and it was the last stmt in a bb,
1245 don't call gsi_stmt. */
1246 if (gsi_end_p (*gsi))
1248 gcc_assert (next_stmt == NULL);
1252 stmt = gsi_stmt (*gsi);
1254 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1255 as we'd changing the next stmt. */
1256 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1258 tree lhs = gimple_get_lhs (stmt);
1259 if (lhs && REFERENCE_CLASS_P (lhs))
1261 tree new_lhs = maybe_fold_reference (lhs, true);
1264 gimple_set_lhs (stmt, new_lhs);
1273 /* Fold the statement pointed to by GSI. In some cases, this function may
1274 replace the whole statement with a new one. Returns true iff folding
1276 The statement pointed to by GSI should be in valid gimple form but may
1277 be in unfolded state as resulting from for example constant propagation
1278 which can produce *&x = 0. */
1281 fold_stmt (gimple_stmt_iterator *gsi)
1283 return fold_stmt_1 (gsi, false);
1286 /* Perform the minimal folding on statement *GSI. Only operations like
1287 *&x created by constant propagation are handled. The statement cannot
1288 be replaced with a new one. Return true if the statement was
1289 changed, false otherwise.
1290 The statement *GSI should be in valid gimple form but may
1291 be in unfolded state as resulting from for example constant propagation
1292 which can produce *&x = 0. */
1295 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1297 gimple stmt = gsi_stmt (*gsi);
1298 bool changed = fold_stmt_1 (gsi, true);
1299 gcc_assert (gsi_stmt (*gsi) == stmt);
1303 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1304 if EXPR is null or we don't know how.
1305 If non-null, the result always has boolean type. */
1308 canonicalize_bool (tree expr, bool invert)
1314 if (integer_nonzerop (expr))
1315 return boolean_false_node;
1316 else if (integer_zerop (expr))
1317 return boolean_true_node;
1318 else if (TREE_CODE (expr) == SSA_NAME)
1319 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1320 build_int_cst (TREE_TYPE (expr), 0));
1321 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1322 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1324 TREE_OPERAND (expr, 0),
1325 TREE_OPERAND (expr, 1));
1331 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1333 if (integer_nonzerop (expr))
1334 return boolean_true_node;
1335 else if (integer_zerop (expr))
1336 return boolean_false_node;
1337 else if (TREE_CODE (expr) == SSA_NAME)
1338 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1339 build_int_cst (TREE_TYPE (expr), 0));
1340 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1341 return fold_build2 (TREE_CODE (expr),
1343 TREE_OPERAND (expr, 0),
1344 TREE_OPERAND (expr, 1));
1350 /* Check to see if a boolean expression EXPR is logically equivalent to the
1351 comparison (OP1 CODE OP2). Check for various identities involving
1355 same_bool_comparison_p (const_tree expr, enum tree_code code,
1356 const_tree op1, const_tree op2)
1360 /* The obvious case. */
1361 if (TREE_CODE (expr) == code
1362 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1363 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1366 /* Check for comparing (name, name != 0) and the case where expr
1367 is an SSA_NAME with a definition matching the comparison. */
1368 if (TREE_CODE (expr) == SSA_NAME
1369 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1371 if (operand_equal_p (expr, op1, 0))
1372 return ((code == NE_EXPR && integer_zerop (op2))
1373 || (code == EQ_EXPR && integer_nonzerop (op2)));
1374 s = SSA_NAME_DEF_STMT (expr);
1375 if (is_gimple_assign (s)
1376 && gimple_assign_rhs_code (s) == code
1377 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1378 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1382 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1383 of name is a comparison, recurse. */
1384 if (TREE_CODE (op1) == SSA_NAME
1385 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1387 s = SSA_NAME_DEF_STMT (op1);
1388 if (is_gimple_assign (s)
1389 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1391 enum tree_code c = gimple_assign_rhs_code (s);
1392 if ((c == NE_EXPR && integer_zerop (op2))
1393 || (c == EQ_EXPR && integer_nonzerop (op2)))
1394 return same_bool_comparison_p (expr, c,
1395 gimple_assign_rhs1 (s),
1396 gimple_assign_rhs2 (s));
1397 if ((c == EQ_EXPR && integer_zerop (op2))
1398 || (c == NE_EXPR && integer_nonzerop (op2)))
1399 return same_bool_comparison_p (expr,
1400 invert_tree_comparison (c, false),
1401 gimple_assign_rhs1 (s),
1402 gimple_assign_rhs2 (s));
1408 /* Check to see if two boolean expressions OP1 and OP2 are logically
1412 same_bool_result_p (const_tree op1, const_tree op2)
1414 /* Simple cases first. */
1415 if (operand_equal_p (op1, op2, 0))
1418 /* Check the cases where at least one of the operands is a comparison.
1419 These are a bit smarter than operand_equal_p in that they apply some
1420 identifies on SSA_NAMEs. */
1421 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1422 && same_bool_comparison_p (op1, TREE_CODE (op2),
1423 TREE_OPERAND (op2, 0),
1424 TREE_OPERAND (op2, 1)))
1426 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1427 && same_bool_comparison_p (op2, TREE_CODE (op1),
1428 TREE_OPERAND (op1, 0),
1429 TREE_OPERAND (op1, 1)))
1436 /* Forward declarations for some mutually recursive functions. */
1439 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1440 enum tree_code code2, tree op2a, tree op2b);
1442 and_var_with_comparison (tree var, bool invert,
1443 enum tree_code code2, tree op2a, tree op2b);
1445 and_var_with_comparison_1 (gimple stmt,
1446 enum tree_code code2, tree op2a, tree op2b);
1448 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1449 enum tree_code code2, tree op2a, tree op2b);
1451 or_var_with_comparison (tree var, bool invert,
1452 enum tree_code code2, tree op2a, tree op2b);
1454 or_var_with_comparison_1 (gimple stmt,
1455 enum tree_code code2, tree op2a, tree op2b);
1457 /* Helper function for and_comparisons_1: try to simplify the AND of the
1458 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1459 If INVERT is true, invert the value of the VAR before doing the AND.
1460 Return NULL_EXPR if we can't simplify this to a single expression. */
1463 and_var_with_comparison (tree var, bool invert,
1464 enum tree_code code2, tree op2a, tree op2b)
1467 gimple stmt = SSA_NAME_DEF_STMT (var);
1469 /* We can only deal with variables whose definitions are assignments. */
1470 if (!is_gimple_assign (stmt))
1473 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1474 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1475 Then we only have to consider the simpler non-inverted cases. */
1477 t = or_var_with_comparison_1 (stmt,
1478 invert_tree_comparison (code2, false),
1481 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1482 return canonicalize_bool (t, invert);
1485 /* Try to simplify the AND of the ssa variable defined by the assignment
1486 STMT with the comparison specified by (OP2A CODE2 OP2B).
1487 Return NULL_EXPR if we can't simplify this to a single expression. */
1490 and_var_with_comparison_1 (gimple stmt,
1491 enum tree_code code2, tree op2a, tree op2b)
1493 tree var = gimple_assign_lhs (stmt);
1494 tree true_test_var = NULL_TREE;
1495 tree false_test_var = NULL_TREE;
1496 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1498 /* Check for identities like (var AND (var == 0)) => false. */
1499 if (TREE_CODE (op2a) == SSA_NAME
1500 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1502 if ((code2 == NE_EXPR && integer_zerop (op2b))
1503 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1505 true_test_var = op2a;
1506 if (var == true_test_var)
1509 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1510 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1512 false_test_var = op2a;
1513 if (var == false_test_var)
1514 return boolean_false_node;
1518 /* If the definition is a comparison, recurse on it. */
1519 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1521 tree t = and_comparisons_1 (innercode,
1522 gimple_assign_rhs1 (stmt),
1523 gimple_assign_rhs2 (stmt),
1531 /* If the definition is an AND or OR expression, we may be able to
1532 simplify by reassociating. */
1533 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1534 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1536 tree inner1 = gimple_assign_rhs1 (stmt);
1537 tree inner2 = gimple_assign_rhs2 (stmt);
1540 tree partial = NULL_TREE;
1541 bool is_and = (innercode == BIT_AND_EXPR);
1543 /* Check for boolean identities that don't require recursive examination
1545 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1546 inner1 AND (inner1 OR inner2) => inner1
1547 !inner1 AND (inner1 AND inner2) => false
1548 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1549 Likewise for similar cases involving inner2. */
1550 if (inner1 == true_test_var)
1551 return (is_and ? var : inner1);
1552 else if (inner2 == true_test_var)
1553 return (is_and ? var : inner2);
1554 else if (inner1 == false_test_var)
1556 ? boolean_false_node
1557 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1558 else if (inner2 == false_test_var)
1560 ? boolean_false_node
1561 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1563 /* Next, redistribute/reassociate the AND across the inner tests.
1564 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1565 if (TREE_CODE (inner1) == SSA_NAME
1566 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1567 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1568 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1569 gimple_assign_rhs1 (s),
1570 gimple_assign_rhs2 (s),
1571 code2, op2a, op2b)))
1573 /* Handle the AND case, where we are reassociating:
1574 (inner1 AND inner2) AND (op2a code2 op2b)
1576 If the partial result t is a constant, we win. Otherwise
1577 continue on to try reassociating with the other inner test. */
1580 if (integer_onep (t))
1582 else if (integer_zerop (t))
1583 return boolean_false_node;
1586 /* Handle the OR case, where we are redistributing:
1587 (inner1 OR inner2) AND (op2a code2 op2b)
1588 => (t OR (inner2 AND (op2a code2 op2b))) */
1589 else if (integer_onep (t))
1590 return boolean_true_node;
1592 /* Save partial result for later. */
1596 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1597 if (TREE_CODE (inner2) == SSA_NAME
1598 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1599 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1600 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1601 gimple_assign_rhs1 (s),
1602 gimple_assign_rhs2 (s),
1603 code2, op2a, op2b)))
1605 /* Handle the AND case, where we are reassociating:
1606 (inner1 AND inner2) AND (op2a code2 op2b)
1607 => (inner1 AND t) */
1610 if (integer_onep (t))
1612 else if (integer_zerop (t))
1613 return boolean_false_node;
1614 /* If both are the same, we can apply the identity
1616 else if (partial && same_bool_result_p (t, partial))
1620 /* Handle the OR case. where we are redistributing:
1621 (inner1 OR inner2) AND (op2a code2 op2b)
1622 => (t OR (inner1 AND (op2a code2 op2b)))
1623 => (t OR partial) */
1626 if (integer_onep (t))
1627 return boolean_true_node;
1630 /* We already got a simplification for the other
1631 operand to the redistributed OR expression. The
1632 interesting case is when at least one is false.
1633 Or, if both are the same, we can apply the identity
1635 if (integer_zerop (partial))
1637 else if (integer_zerop (t))
1639 else if (same_bool_result_p (t, partial))
1648 /* Try to simplify the AND of two comparisons defined by
1649 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1650 If this can be done without constructing an intermediate value,
1651 return the resulting tree; otherwise NULL_TREE is returned.
1652 This function is deliberately asymmetric as it recurses on SSA_DEFs
1653 in the first comparison but not the second. */
1656 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1657 enum tree_code code2, tree op2a, tree op2b)
1659 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1660 if (operand_equal_p (op1a, op2a, 0)
1661 && operand_equal_p (op1b, op2b, 0))
1663 /* Result will be either NULL_TREE, or a combined comparison. */
1664 tree t = combine_comparisons (UNKNOWN_LOCATION,
1665 TRUTH_ANDIF_EXPR, code1, code2,
1666 boolean_type_node, op1a, op1b);
1671 /* Likewise the swapped case of the above. */
1672 if (operand_equal_p (op1a, op2b, 0)
1673 && operand_equal_p (op1b, op2a, 0))
1675 /* Result will be either NULL_TREE, or a combined comparison. */
1676 tree t = combine_comparisons (UNKNOWN_LOCATION,
1677 TRUTH_ANDIF_EXPR, code1,
1678 swap_tree_comparison (code2),
1679 boolean_type_node, op1a, op1b);
1684 /* If both comparisons are of the same value against constants, we might
1685 be able to merge them. */
1686 if (operand_equal_p (op1a, op2a, 0)
1687 && TREE_CODE (op1b) == INTEGER_CST
1688 && TREE_CODE (op2b) == INTEGER_CST)
1690 int cmp = tree_int_cst_compare (op1b, op2b);
1692 /* If we have (op1a == op1b), we should either be able to
1693 return that or FALSE, depending on whether the constant op1b
1694 also satisfies the other comparison against op2b. */
1695 if (code1 == EQ_EXPR)
1701 case EQ_EXPR: val = (cmp == 0); break;
1702 case NE_EXPR: val = (cmp != 0); break;
1703 case LT_EXPR: val = (cmp < 0); break;
1704 case GT_EXPR: val = (cmp > 0); break;
1705 case LE_EXPR: val = (cmp <= 0); break;
1706 case GE_EXPR: val = (cmp >= 0); break;
1707 default: done = false;
1712 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1714 return boolean_false_node;
1717 /* Likewise if the second comparison is an == comparison. */
1718 else if (code2 == EQ_EXPR)
1724 case EQ_EXPR: val = (cmp == 0); break;
1725 case NE_EXPR: val = (cmp != 0); break;
1726 case LT_EXPR: val = (cmp > 0); break;
1727 case GT_EXPR: val = (cmp < 0); break;
1728 case LE_EXPR: val = (cmp >= 0); break;
1729 case GE_EXPR: val = (cmp <= 0); break;
1730 default: done = false;
1735 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1737 return boolean_false_node;
1741 /* Same business with inequality tests. */
1742 else if (code1 == NE_EXPR)
1747 case EQ_EXPR: val = (cmp != 0); break;
1748 case NE_EXPR: val = (cmp == 0); break;
1749 case LT_EXPR: val = (cmp >= 0); break;
1750 case GT_EXPR: val = (cmp <= 0); break;
1751 case LE_EXPR: val = (cmp > 0); break;
1752 case GE_EXPR: val = (cmp < 0); break;
1757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1759 else if (code2 == NE_EXPR)
1764 case EQ_EXPR: val = (cmp == 0); break;
1765 case NE_EXPR: val = (cmp != 0); break;
1766 case LT_EXPR: val = (cmp <= 0); break;
1767 case GT_EXPR: val = (cmp >= 0); break;
1768 case LE_EXPR: val = (cmp < 0); break;
1769 case GE_EXPR: val = (cmp > 0); break;
1774 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1777 /* Chose the more restrictive of two < or <= comparisons. */
1778 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1779 && (code2 == LT_EXPR || code2 == LE_EXPR))
1781 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1782 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1784 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1787 /* Likewise chose the more restrictive of two > or >= comparisons. */
1788 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1789 && (code2 == GT_EXPR || code2 == GE_EXPR))
1791 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1792 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1794 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1797 /* Check for singleton ranges. */
1799 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1800 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1801 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1803 /* Check for disjoint ranges. */
1805 && (code1 == LT_EXPR || code1 == LE_EXPR)
1806 && (code2 == GT_EXPR || code2 == GE_EXPR))
1807 return boolean_false_node;
1809 && (code1 == GT_EXPR || code1 == GE_EXPR)
1810 && (code2 == LT_EXPR || code2 == LE_EXPR))
1811 return boolean_false_node;
1814 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1815 NAME's definition is a truth value. See if there are any simplifications
1816 that can be done against the NAME's definition. */
1817 if (TREE_CODE (op1a) == SSA_NAME
1818 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1819 && (integer_zerop (op1b) || integer_onep (op1b)))
1821 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1822 || (code1 == NE_EXPR && integer_onep (op1b)));
1823 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1824 switch (gimple_code (stmt))
1827 /* Try to simplify by copy-propagating the definition. */
1828 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1831 /* If every argument to the PHI produces the same result when
1832 ANDed with the second comparison, we win.
1833 Do not do this unless the type is bool since we need a bool
1834 result here anyway. */
1835 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1837 tree result = NULL_TREE;
1839 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1841 tree arg = gimple_phi_arg_def (stmt, i);
1843 /* If this PHI has itself as an argument, ignore it.
1844 If all the other args produce the same result,
1846 if (arg == gimple_phi_result (stmt))
1848 else if (TREE_CODE (arg) == INTEGER_CST)
1850 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1853 result = boolean_false_node;
1854 else if (!integer_zerop (result))
1858 result = fold_build2 (code2, boolean_type_node,
1860 else if (!same_bool_comparison_p (result,
1864 else if (TREE_CODE (arg) == SSA_NAME
1865 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1868 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1869 /* In simple cases we can look through PHI nodes,
1870 but we have to be careful with loops.
1872 if (! dom_info_available_p (CDI_DOMINATORS)
1873 || gimple_bb (def_stmt) == gimple_bb (stmt)
1874 || dominated_by_p (CDI_DOMINATORS,
1875 gimple_bb (def_stmt),
1878 temp = and_var_with_comparison (arg, invert, code2,
1884 else if (!same_bool_result_p (result, temp))
1900 /* Try to simplify the AND of two comparisons, specified by
1901 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1902 If this can be simplified to a single expression (without requiring
1903 introducing more SSA variables to hold intermediate values),
1904 return the resulting tree. Otherwise return NULL_TREE.
1905 If the result expression is non-null, it has boolean type. */
1908 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1909 enum tree_code code2, tree op2a, tree op2b)
1911 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1915 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1918 /* Helper function for or_comparisons_1: try to simplify the OR of the
1919 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1920 If INVERT is true, invert the value of VAR before doing the OR.
1921 Return NULL_EXPR if we can't simplify this to a single expression. */
1924 or_var_with_comparison (tree var, bool invert,
1925 enum tree_code code2, tree op2a, tree op2b)
1928 gimple stmt = SSA_NAME_DEF_STMT (var);
1930 /* We can only deal with variables whose definitions are assignments. */
1931 if (!is_gimple_assign (stmt))
1934 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1935 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1936 Then we only have to consider the simpler non-inverted cases. */
1938 t = and_var_with_comparison_1 (stmt,
1939 invert_tree_comparison (code2, false),
1942 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1943 return canonicalize_bool (t, invert);
1946 /* Try to simplify the OR of the ssa variable defined by the assignment
1947 STMT with the comparison specified by (OP2A CODE2 OP2B).
1948 Return NULL_EXPR if we can't simplify this to a single expression. */
1951 or_var_with_comparison_1 (gimple stmt,
1952 enum tree_code code2, tree op2a, tree op2b)
1954 tree var = gimple_assign_lhs (stmt);
1955 tree true_test_var = NULL_TREE;
1956 tree false_test_var = NULL_TREE;
1957 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1959 /* Check for identities like (var OR (var != 0)) => true . */
1960 if (TREE_CODE (op2a) == SSA_NAME
1961 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1963 if ((code2 == NE_EXPR && integer_zerop (op2b))
1964 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1966 true_test_var = op2a;
1967 if (var == true_test_var)
1970 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1971 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1973 false_test_var = op2a;
1974 if (var == false_test_var)
1975 return boolean_true_node;
1979 /* If the definition is a comparison, recurse on it. */
1980 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1982 tree t = or_comparisons_1 (innercode,
1983 gimple_assign_rhs1 (stmt),
1984 gimple_assign_rhs2 (stmt),
1992 /* If the definition is an AND or OR expression, we may be able to
1993 simplify by reassociating. */
1994 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1995 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1997 tree inner1 = gimple_assign_rhs1 (stmt);
1998 tree inner2 = gimple_assign_rhs2 (stmt);
2001 tree partial = NULL_TREE;
2002 bool is_or = (innercode == BIT_IOR_EXPR);
2004 /* Check for boolean identities that don't require recursive examination
2006 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2007 inner1 OR (inner1 AND inner2) => inner1
2008 !inner1 OR (inner1 OR inner2) => true
2009 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2011 if (inner1 == true_test_var)
2012 return (is_or ? var : inner1);
2013 else if (inner2 == true_test_var)
2014 return (is_or ? var : inner2);
2015 else if (inner1 == false_test_var)
2018 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2019 else if (inner2 == false_test_var)
2022 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2024 /* Next, redistribute/reassociate the OR across the inner tests.
2025 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2026 if (TREE_CODE (inner1) == SSA_NAME
2027 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2028 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2029 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2030 gimple_assign_rhs1 (s),
2031 gimple_assign_rhs2 (s),
2032 code2, op2a, op2b)))
2034 /* Handle the OR case, where we are reassociating:
2035 (inner1 OR inner2) OR (op2a code2 op2b)
2037 If the partial result t is a constant, we win. Otherwise
2038 continue on to try reassociating with the other inner test. */
2041 if (integer_onep (t))
2042 return boolean_true_node;
2043 else if (integer_zerop (t))
2047 /* Handle the AND case, where we are redistributing:
2048 (inner1 AND inner2) OR (op2a code2 op2b)
2049 => (t AND (inner2 OR (op2a code op2b))) */
2050 else if (integer_zerop (t))
2051 return boolean_false_node;
2053 /* Save partial result for later. */
2057 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2058 if (TREE_CODE (inner2) == SSA_NAME
2059 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2060 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2061 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2062 gimple_assign_rhs1 (s),
2063 gimple_assign_rhs2 (s),
2064 code2, op2a, op2b)))
2066 /* Handle the OR case, where we are reassociating:
2067 (inner1 OR inner2) OR (op2a code2 op2b)
2069 => (t OR partial) */
2072 if (integer_zerop (t))
2074 else if (integer_onep (t))
2075 return boolean_true_node;
2076 /* If both are the same, we can apply the identity
2078 else if (partial && same_bool_result_p (t, partial))
2082 /* Handle the AND case, where we are redistributing:
2083 (inner1 AND inner2) OR (op2a code2 op2b)
2084 => (t AND (inner1 OR (op2a code2 op2b)))
2085 => (t AND partial) */
2088 if (integer_zerop (t))
2089 return boolean_false_node;
2092 /* We already got a simplification for the other
2093 operand to the redistributed AND expression. The
2094 interesting case is when at least one is true.
2095 Or, if both are the same, we can apply the identity
2097 if (integer_onep (partial))
2099 else if (integer_onep (t))
2101 else if (same_bool_result_p (t, partial))
2110 /* Try to simplify the OR of two comparisons defined by
2111 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2112 If this can be done without constructing an intermediate value,
2113 return the resulting tree; otherwise NULL_TREE is returned.
2114 This function is deliberately asymmetric as it recurses on SSA_DEFs
2115 in the first comparison but not the second. */
2118 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2119 enum tree_code code2, tree op2a, tree op2b)
2121 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2122 if (operand_equal_p (op1a, op2a, 0)
2123 && operand_equal_p (op1b, op2b, 0))
2125 /* Result will be either NULL_TREE, or a combined comparison. */
2126 tree t = combine_comparisons (UNKNOWN_LOCATION,
2127 TRUTH_ORIF_EXPR, code1, code2,
2128 boolean_type_node, op1a, op1b);
2133 /* Likewise the swapped case of the above. */
2134 if (operand_equal_p (op1a, op2b, 0)
2135 && operand_equal_p (op1b, op2a, 0))
2137 /* Result will be either NULL_TREE, or a combined comparison. */
2138 tree t = combine_comparisons (UNKNOWN_LOCATION,
2139 TRUTH_ORIF_EXPR, code1,
2140 swap_tree_comparison (code2),
2141 boolean_type_node, op1a, op1b);
2146 /* If both comparisons are of the same value against constants, we might
2147 be able to merge them. */
2148 if (operand_equal_p (op1a, op2a, 0)
2149 && TREE_CODE (op1b) == INTEGER_CST
2150 && TREE_CODE (op2b) == INTEGER_CST)
2152 int cmp = tree_int_cst_compare (op1b, op2b);
2154 /* If we have (op1a != op1b), we should either be able to
2155 return that or TRUE, depending on whether the constant op1b
2156 also satisfies the other comparison against op2b. */
2157 if (code1 == NE_EXPR)
2163 case EQ_EXPR: val = (cmp == 0); break;
2164 case NE_EXPR: val = (cmp != 0); break;
2165 case LT_EXPR: val = (cmp < 0); break;
2166 case GT_EXPR: val = (cmp > 0); break;
2167 case LE_EXPR: val = (cmp <= 0); break;
2168 case GE_EXPR: val = (cmp >= 0); break;
2169 default: done = false;
2174 return boolean_true_node;
2176 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2179 /* Likewise if the second comparison is a != comparison. */
2180 else if (code2 == NE_EXPR)
2186 case EQ_EXPR: val = (cmp == 0); break;
2187 case NE_EXPR: val = (cmp != 0); break;
2188 case LT_EXPR: val = (cmp > 0); break;
2189 case GT_EXPR: val = (cmp < 0); break;
2190 case LE_EXPR: val = (cmp >= 0); break;
2191 case GE_EXPR: val = (cmp <= 0); break;
2192 default: done = false;
2197 return boolean_true_node;
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2203 /* See if an equality test is redundant with the other comparison. */
2204 else if (code1 == EQ_EXPR)
2209 case EQ_EXPR: val = (cmp == 0); break;
2210 case NE_EXPR: val = (cmp != 0); break;
2211 case LT_EXPR: val = (cmp < 0); break;
2212 case GT_EXPR: val = (cmp > 0); break;
2213 case LE_EXPR: val = (cmp <= 0); break;
2214 case GE_EXPR: val = (cmp >= 0); break;
2219 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2221 else if (code2 == EQ_EXPR)
2226 case EQ_EXPR: val = (cmp == 0); break;
2227 case NE_EXPR: val = (cmp != 0); break;
2228 case LT_EXPR: val = (cmp > 0); break;
2229 case GT_EXPR: val = (cmp < 0); break;
2230 case LE_EXPR: val = (cmp >= 0); break;
2231 case GE_EXPR: val = (cmp <= 0); break;
2236 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2239 /* Chose the less restrictive of two < or <= comparisons. */
2240 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2241 && (code2 == LT_EXPR || code2 == LE_EXPR))
2243 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2244 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2246 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2249 /* Likewise chose the less restrictive of two > or >= comparisons. */
2250 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2251 && (code2 == GT_EXPR || code2 == GE_EXPR))
2253 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2254 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2256 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2259 /* Check for singleton ranges. */
2261 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2262 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2263 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2265 /* Check for less/greater pairs that don't restrict the range at all. */
2267 && (code1 == LT_EXPR || code1 == LE_EXPR)
2268 && (code2 == GT_EXPR || code2 == GE_EXPR))
2269 return boolean_true_node;
2271 && (code1 == GT_EXPR || code1 == GE_EXPR)
2272 && (code2 == LT_EXPR || code2 == LE_EXPR))
2273 return boolean_true_node;
2276 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2277 NAME's definition is a truth value. See if there are any simplifications
2278 that can be done against the NAME's definition. */
2279 if (TREE_CODE (op1a) == SSA_NAME
2280 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2281 && (integer_zerop (op1b) || integer_onep (op1b)))
2283 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2284 || (code1 == NE_EXPR && integer_onep (op1b)));
2285 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2286 switch (gimple_code (stmt))
2289 /* Try to simplify by copy-propagating the definition. */
2290 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2293 /* If every argument to the PHI produces the same result when
2294 ORed with the second comparison, we win.
2295 Do not do this unless the type is bool since we need a bool
2296 result here anyway. */
2297 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2299 tree result = NULL_TREE;
2301 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2303 tree arg = gimple_phi_arg_def (stmt, i);
2305 /* If this PHI has itself as an argument, ignore it.
2306 If all the other args produce the same result,
2308 if (arg == gimple_phi_result (stmt))
2310 else if (TREE_CODE (arg) == INTEGER_CST)
2312 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2315 result = boolean_true_node;
2316 else if (!integer_onep (result))
2320 result = fold_build2 (code2, boolean_type_node,
2322 else if (!same_bool_comparison_p (result,
2326 else if (TREE_CODE (arg) == SSA_NAME
2327 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2330 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2331 /* In simple cases we can look through PHI nodes,
2332 but we have to be careful with loops.
2334 if (! dom_info_available_p (CDI_DOMINATORS)
2335 || gimple_bb (def_stmt) == gimple_bb (stmt)
2336 || dominated_by_p (CDI_DOMINATORS,
2337 gimple_bb (def_stmt),
2340 temp = or_var_with_comparison (arg, invert, code2,
2346 else if (!same_bool_result_p (result, temp))
2362 /* Try to simplify the OR of two comparisons, specified by
2363 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2364 If this can be simplified to a single expression (without requiring
2365 introducing more SSA variables to hold intermediate values),
2366 return the resulting tree. Otherwise return NULL_TREE.
2367 If the result expression is non-null, it has boolean type. */
2370 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2371 enum tree_code code2, tree op2a, tree op2b)
2373 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2377 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2381 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2383 Either NULL_TREE, a simplified but non-constant or a constant
2386 ??? This should go into a gimple-fold-inline.h file to be eventually
2387 privatized with the single valueize function used in the various TUs
2388 to avoid the indirect function call overhead. */
2391 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2393 location_t loc = gimple_location (stmt);
2394 switch (gimple_code (stmt))
2398 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2400 switch (get_gimple_rhs_class (subcode))
2402 case GIMPLE_SINGLE_RHS:
2404 tree rhs = gimple_assign_rhs1 (stmt);
2405 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2407 if (TREE_CODE (rhs) == SSA_NAME)
2409 /* If the RHS is an SSA_NAME, return its known constant value,
2411 return (*valueize) (rhs);
2413 /* Handle propagating invariant addresses into address
2415 else if (TREE_CODE (rhs) == ADDR_EXPR
2416 && !is_gimple_min_invariant (rhs))
2418 HOST_WIDE_INT offset = 0;
2420 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2424 && (CONSTANT_CLASS_P (base)
2425 || decl_address_invariant_p (base)))
2426 return build_invariant_address (TREE_TYPE (rhs),
2429 else if (TREE_CODE (rhs) == CONSTRUCTOR
2430 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2431 && (CONSTRUCTOR_NELTS (rhs)
2432 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2437 vec = XALLOCAVEC (tree,
2438 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2441 val = (*valueize) (val);
2442 if (TREE_CODE (val) == INTEGER_CST
2443 || TREE_CODE (val) == REAL_CST
2444 || TREE_CODE (val) == FIXED_CST)
2450 return build_vector (TREE_TYPE (rhs), vec);
2453 if (kind == tcc_reference)
2455 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2456 || TREE_CODE (rhs) == REALPART_EXPR
2457 || TREE_CODE (rhs) == IMAGPART_EXPR)
2458 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2460 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2461 return fold_unary_loc (EXPR_LOCATION (rhs),
2463 TREE_TYPE (rhs), val);
2465 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2466 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2468 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2469 return fold_ternary_loc (EXPR_LOCATION (rhs),
2471 TREE_TYPE (rhs), val,
2472 TREE_OPERAND (rhs, 1),
2473 TREE_OPERAND (rhs, 2));
2475 else if (TREE_CODE (rhs) == MEM_REF
2476 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2478 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2479 if (TREE_CODE (val) == ADDR_EXPR
2480 && is_gimple_min_invariant (val))
2482 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2484 TREE_OPERAND (rhs, 1));
2489 return fold_const_aggregate_ref_1 (rhs, valueize);
2491 else if (kind == tcc_declaration)
2492 return get_symbol_constant_value (rhs);
2496 case GIMPLE_UNARY_RHS:
2498 /* Handle unary operators that can appear in GIMPLE form.
2499 Note that we know the single operand must be a constant,
2500 so this should almost always return a simplified RHS. */
2501 tree lhs = gimple_assign_lhs (stmt);
2502 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2504 /* Conversions are useless for CCP purposes if they are
2505 value-preserving. Thus the restrictions that
2506 useless_type_conversion_p places for restrict qualification
2507 of pointer types should not apply here.
2508 Substitution later will only substitute to allowed places. */
2509 if (CONVERT_EXPR_CODE_P (subcode)
2510 && POINTER_TYPE_P (TREE_TYPE (lhs))
2511 && POINTER_TYPE_P (TREE_TYPE (op0))
2512 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2513 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2514 && TYPE_MODE (TREE_TYPE (lhs))
2515 == TYPE_MODE (TREE_TYPE (op0)))
2519 fold_unary_ignore_overflow_loc (loc, subcode,
2520 gimple_expr_type (stmt), op0);
2523 case GIMPLE_BINARY_RHS:
2525 /* Handle binary operators that can appear in GIMPLE form. */
2526 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2527 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2529 /* Translate &x + CST into an invariant form suitable for
2530 further propagation. */
2531 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2532 && TREE_CODE (op0) == ADDR_EXPR
2533 && TREE_CODE (op1) == INTEGER_CST)
2535 tree off = fold_convert (ptr_type_node, op1);
2536 return build_fold_addr_expr_loc
2538 fold_build2 (MEM_REF,
2539 TREE_TYPE (TREE_TYPE (op0)),
2540 unshare_expr (op0), off));
2543 return fold_binary_loc (loc, subcode,
2544 gimple_expr_type (stmt), op0, op1);
2547 case GIMPLE_TERNARY_RHS:
2549 /* Handle ternary operators that can appear in GIMPLE form. */
2550 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2551 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2552 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2554 /* Fold embedded expressions in ternary codes. */
2555 if ((subcode == COND_EXPR
2556 || subcode == VEC_COND_EXPR)
2557 && COMPARISON_CLASS_P (op0))
2559 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2560 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2561 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2562 TREE_TYPE (op0), op00, op01);
2567 return fold_ternary_loc (loc, subcode,
2568 gimple_expr_type (stmt), op0, op1, op2);
2580 if (gimple_call_internal_p (stmt))
2581 /* No folding yet for these functions. */
2584 fn = (*valueize) (gimple_call_fn (stmt));
2585 if (TREE_CODE (fn) == ADDR_EXPR
2586 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2587 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2589 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2592 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2593 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2594 call = build_call_array_loc (loc,
2595 gimple_call_return_type (stmt),
2596 fn, gimple_call_num_args (stmt), args);
2597 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2599 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2600 STRIP_NOPS (retval);
2611 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2612 Returns NULL_TREE if folding to a constant is not possible, otherwise
2613 returns a constant according to is_gimple_min_invariant. */
2616 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2618 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2619 if (res && is_gimple_min_invariant (res))
2625 /* The following set of functions are supposed to fold references using
2626 their constant initializers. */
2628 static tree fold_ctor_reference (tree type, tree ctor,
2629 unsigned HOST_WIDE_INT offset,
2630 unsigned HOST_WIDE_INT size);
2632 /* See if we can find constructor defining value of BASE.
2633 When we know the consructor with constant offset (such as
2634 base is array[40] and we do know constructor of array), then
2635 BIT_OFFSET is adjusted accordingly.
2637 As a special case, return error_mark_node when constructor
2638 is not explicitly available, but it is known to be zero
2639 such as 'static const int a;'. */
2641 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2642 tree (*valueize)(tree))
2644 HOST_WIDE_INT bit_offset2, size, max_size;
2645 if (TREE_CODE (base) == MEM_REF)
2647 if (!integer_zerop (TREE_OPERAND (base, 1)))
2649 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2651 *bit_offset += (mem_ref_offset (base).low
2656 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2657 base = valueize (TREE_OPERAND (base, 0));
2658 if (!base || TREE_CODE (base) != ADDR_EXPR)
2660 base = TREE_OPERAND (base, 0);
2663 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2664 DECL_INITIAL. If BASE is a nested reference into another
2665 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2666 the inner reference. */
2667 switch (TREE_CODE (base))
2670 if (!const_value_known_p (base))
2675 if (!DECL_INITIAL (base)
2676 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2677 return error_mark_node;
2678 return DECL_INITIAL (base);
2682 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2683 if (max_size == -1 || size != max_size)
2685 *bit_offset += bit_offset2;
2686 return get_base_constructor (base, bit_offset, valueize);
2697 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2698 to the memory at bit OFFSET.
2700 We do only simple job of folding byte accesses. */
2703 fold_string_cst_ctor_reference (tree type, tree ctor,
2704 unsigned HOST_WIDE_INT offset,
2705 unsigned HOST_WIDE_INT size)
2707 if (INTEGRAL_TYPE_P (type)
2708 && (TYPE_MODE (type)
2709 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2710 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2712 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2713 && size == BITS_PER_UNIT
2714 && !(offset % BITS_PER_UNIT))
2716 offset /= BITS_PER_UNIT;
2717 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2718 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2721 const char a[20]="hello";
2724 might lead to offset greater than string length. In this case we
2725 know value is either initialized to 0 or out of bounds. Return 0
2727 return build_zero_cst (type);
2732 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2733 SIZE to the memory at bit OFFSET. */
2736 fold_array_ctor_reference (tree type, tree ctor,
2737 unsigned HOST_WIDE_INT offset,
2738 unsigned HOST_WIDE_INT size)
2740 unsigned HOST_WIDE_INT cnt;
2742 double_int low_bound, elt_size;
2743 double_int index, max_index;
2744 double_int access_index;
2745 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2746 HOST_WIDE_INT inner_offset;
2748 /* Compute low bound and elt size. */
2749 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2750 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2751 if (domain_type && TYPE_MIN_VALUE (domain_type))
2753 /* Static constructors for variably sized objects makes no sense. */
2754 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2755 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2756 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2759 low_bound = double_int_zero;
2760 /* Static constructors for variably sized objects makes no sense. */
2761 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2764 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2767 /* We can handle only constantly sized accesses that are known to not
2768 be larger than size of array element. */
2769 if (!TYPE_SIZE_UNIT (type)
2770 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2771 || double_int_cmp (elt_size,
2772 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2775 /* Compute the array index we look for. */
2776 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2777 elt_size, TRUNC_DIV_EXPR);
2778 access_index = double_int_add (access_index, low_bound);
2780 access_index = double_int_ext (access_index,
2781 TYPE_PRECISION (index_type),
2782 TYPE_UNSIGNED (index_type));
2784 /* And offset within the access. */
2785 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2787 /* See if the array field is large enough to span whole access. We do not
2788 care to fold accesses spanning multiple array indexes. */
2789 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2792 index = double_int_sub (low_bound, double_int_one);
2794 index = double_int_ext (index,
2795 TYPE_PRECISION (index_type),
2796 TYPE_UNSIGNED (index_type));
2798 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2800 /* Array constructor might explicitely set index, or specify range
2801 or leave index NULL meaning that it is next index after previous
2805 if (TREE_CODE (cfield) == INTEGER_CST)
2806 max_index = index = tree_to_double_int (cfield);
2809 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2810 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2811 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2816 index = double_int_add (index, double_int_one);
2818 index = double_int_ext (index,
2819 TYPE_PRECISION (index_type),
2820 TYPE_UNSIGNED (index_type));
2824 /* Do we have match? */
2825 if (double_int_cmp (access_index, index, 1) >= 0
2826 && double_int_cmp (access_index, max_index, 1) <= 0)
2827 return fold_ctor_reference (type, cval, inner_offset, size);
2829 /* When memory is not explicitely mentioned in constructor,
2830 it is 0 (or out of range). */
2831 return build_zero_cst (type);
2834 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2835 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2838 fold_nonarray_ctor_reference (tree type, tree ctor,
2839 unsigned HOST_WIDE_INT offset,
2840 unsigned HOST_WIDE_INT size)
2842 unsigned HOST_WIDE_INT cnt;
2845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2848 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2849 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2850 tree field_size = DECL_SIZE (cfield);
2851 double_int bitoffset;
2852 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2853 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2854 double_int bitoffset_end, access_end;
2856 /* Variable sized objects in static constructors makes no sense,
2857 but field_size can be NULL for flexible array members. */
2858 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2859 && TREE_CODE (byte_offset) == INTEGER_CST
2860 && (field_size != NULL_TREE
2861 ? TREE_CODE (field_size) == INTEGER_CST
2862 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2864 /* Compute bit offset of the field. */
2865 bitoffset = double_int_add (tree_to_double_int (field_offset),
2866 double_int_mul (byte_offset_cst,
2867 bits_per_unit_cst));
2868 /* Compute bit offset where the field ends. */
2869 if (field_size != NULL_TREE)
2870 bitoffset_end = double_int_add (bitoffset,
2871 tree_to_double_int (field_size));
2873 bitoffset_end = double_int_zero;
2875 access_end = double_int_add (uhwi_to_double_int (offset),
2876 uhwi_to_double_int (size));
2878 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2879 [BITOFFSET, BITOFFSET_END)? */
2880 if (double_int_cmp (access_end, bitoffset, 0) > 0
2881 && (field_size == NULL_TREE
2882 || double_int_cmp (uhwi_to_double_int (offset),
2883 bitoffset_end, 0) < 0))
2885 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2887 /* We do have overlap. Now see if field is large enough to
2888 cover the access. Give up for accesses spanning multiple
2890 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2892 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2894 return fold_ctor_reference (type, cval,
2895 double_int_to_uhwi (inner_offset), size);
2898 /* When memory is not explicitely mentioned in constructor, it is 0. */
2899 return build_zero_cst (type);
2902 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2903 to the memory at bit OFFSET. */
2906 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2907 unsigned HOST_WIDE_INT size)
2911 /* We found the field with exact match. */
2912 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2914 return canonicalize_constructor_val (ctor);
2916 /* We are at the end of walk, see if we can view convert the
2918 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2919 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2920 && operand_equal_p (TYPE_SIZE (type),
2921 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2923 ret = canonicalize_constructor_val (ctor);
2924 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2929 if (TREE_CODE (ctor) == STRING_CST)
2930 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2931 if (TREE_CODE (ctor) == CONSTRUCTOR)
2934 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2935 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2936 return fold_array_ctor_reference (type, ctor, offset, size);
2938 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2944 /* Return the tree representing the element referenced by T if T is an
2945 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2946 names using VALUEIZE. Return NULL_TREE otherwise. */
2949 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2951 tree ctor, idx, base;
2952 HOST_WIDE_INT offset, size, max_size;
2955 if (TREE_THIS_VOLATILE (t))
2958 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2959 return get_symbol_constant_value (t);
2961 tem = fold_read_from_constant_string (t);
2965 switch (TREE_CODE (t))
2968 case ARRAY_RANGE_REF:
2969 /* Constant indexes are handled well by get_base_constructor.
2970 Only special case variable offsets.
2971 FIXME: This code can't handle nested references with variable indexes
2972 (they will be handled only by iteration of ccp). Perhaps we can bring
2973 get_ref_base_and_extent here and make it use a valueize callback. */
2974 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2976 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2977 && TREE_CODE (idx) == INTEGER_CST)
2979 tree low_bound, unit_size;
2982 /* If the resulting bit-offset is constant, track it. */
2983 if ((low_bound = array_ref_low_bound (t),
2984 TREE_CODE (low_bound) == INTEGER_CST)
2985 && (unit_size = array_ref_element_size (t),
2986 host_integerp (unit_size, 1))
2987 && (doffset = double_int_sext
2988 (double_int_sub (TREE_INT_CST (idx),
2989 TREE_INT_CST (low_bound)),
2990 TYPE_PRECISION (TREE_TYPE (idx))),
2991 double_int_fits_in_shwi_p (doffset)))
2993 offset = double_int_to_shwi (doffset);
2994 offset *= TREE_INT_CST_LOW (unit_size);
2995 offset *= BITS_PER_UNIT;
2997 base = TREE_OPERAND (t, 0);
2998 ctor = get_base_constructor (base, &offset, valueize);
2999 /* Empty constructor. Always fold to 0. */
3000 if (ctor == error_mark_node)
3001 return build_zero_cst (TREE_TYPE (t));
3002 /* Out of bound array access. Value is undefined,
3006 /* We can not determine ctor. */
3009 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3010 TREE_INT_CST_LOW (unit_size)
3018 case TARGET_MEM_REF:
3020 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3021 ctor = get_base_constructor (base, &offset, valueize);
3023 /* Empty constructor. Always fold to 0. */
3024 if (ctor == error_mark_node)
3025 return build_zero_cst (TREE_TYPE (t));
3026 /* We do not know precise address. */
3027 if (max_size == -1 || max_size != size)
3029 /* We can not determine ctor. */
3033 /* Out of bound array access. Value is undefined, but don't fold. */
3037 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3042 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3043 if (c && TREE_CODE (c) == COMPLEX_CST)
3044 return fold_build1_loc (EXPR_LOCATION (t),
3045 TREE_CODE (t), TREE_TYPE (t), c);
3057 fold_const_aggregate_ref (tree t)
3059 return fold_const_aggregate_ref_1 (t, NULL);
3062 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3063 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3064 KNOWN_BINFO carries the binfo describing the true type of
3065 OBJ_TYPE_REF_OBJECT(REF). */
3068 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3070 unsigned HOST_WIDE_INT offset, size;
3073 v = BINFO_VTABLE (known_binfo);
3074 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3078 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3080 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3081 v = TREE_OPERAND (v, 0);
3086 if (TREE_CODE (v) != ADDR_EXPR)
3088 v = TREE_OPERAND (v, 0);
3090 if (TREE_CODE (v) != VAR_DECL
3091 || !DECL_VIRTUAL_P (v)
3092 || !DECL_INITIAL (v)
3093 || DECL_INITIAL (v) == error_mark_node)
3095 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3096 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3097 offset += token * size;
3098 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3100 if (!fn || integer_zerop (fn))
3102 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3103 || TREE_CODE (fn) == FDESC_EXPR);
3104 fn = TREE_OPERAND (fn, 0);
3105 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3107 /* When cgraph node is missing and function is not public, we cannot
3108 devirtualize. This can happen in WHOPR when the actual method
3109 ends up in other partition, because we found devirtualization
3110 possibility too late. */
3111 if (!can_refer_decl_in_current_unit_p (fn))
3114 /* Make sure we create a cgraph node for functions we'll reference.
3115 They can be non-existent if the reference comes from an entry
3116 of an external vtable for example. */
3117 cgraph_get_create_node (fn);
3122 /* Return true iff VAL is a gimple expression that is known to be
3123 non-negative. Restricted to floating-point inputs. */
3126 gimple_val_nonnegative_real_p (tree val)
3130 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3132 /* Use existing logic for non-gimple trees. */
3133 if (tree_expr_nonnegative_p (val))
3136 if (TREE_CODE (val) != SSA_NAME)
3139 /* Currently we look only at the immediately defining statement
3140 to make this determination, since recursion on defining
3141 statements of operands can lead to quadratic behavior in the
3142 worst case. This is expected to catch almost all occurrences
3143 in practice. It would be possible to implement limited-depth
3144 recursion if important cases are lost. Alternatively, passes
3145 that need this information (such as the pow/powi lowering code
3146 in the cse_sincos pass) could be revised to provide it through
3147 dataflow propagation. */
3149 def_stmt = SSA_NAME_DEF_STMT (val);
3151 if (is_gimple_assign (def_stmt))
3155 /* See fold-const.c:tree_expr_nonnegative_p for additional
3156 cases that could be handled with recursion. */
3158 switch (gimple_assign_rhs_code (def_stmt))
3161 /* Always true for floating-point operands. */
3165 /* True if the two operands are identical (since we are
3166 restricted to floating-point inputs). */
3167 op0 = gimple_assign_rhs1 (def_stmt);
3168 op1 = gimple_assign_rhs2 (def_stmt);
3171 || operand_equal_p (op0, op1, 0))
3178 else if (is_gimple_call (def_stmt))
3180 tree fndecl = gimple_call_fndecl (def_stmt);
3182 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3186 switch (DECL_FUNCTION_CODE (fndecl))
3188 CASE_FLT_FN (BUILT_IN_ACOS):
3189 CASE_FLT_FN (BUILT_IN_ACOSH):
3190 CASE_FLT_FN (BUILT_IN_CABS):
3191 CASE_FLT_FN (BUILT_IN_COSH):
3192 CASE_FLT_FN (BUILT_IN_ERFC):
3193 CASE_FLT_FN (BUILT_IN_EXP):
3194 CASE_FLT_FN (BUILT_IN_EXP10):
3195 CASE_FLT_FN (BUILT_IN_EXP2):
3196 CASE_FLT_FN (BUILT_IN_FABS):
3197 CASE_FLT_FN (BUILT_IN_FDIM):
3198 CASE_FLT_FN (BUILT_IN_HYPOT):
3199 CASE_FLT_FN (BUILT_IN_POW10):
3202 CASE_FLT_FN (BUILT_IN_SQRT):
3203 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3204 nonnegative inputs. */
3205 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3210 CASE_FLT_FN (BUILT_IN_POWI):
3211 /* True if the second argument is an even integer. */
3212 arg1 = gimple_call_arg (def_stmt, 1);
3214 if (TREE_CODE (arg1) == INTEGER_CST
3215 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3220 CASE_FLT_FN (BUILT_IN_POW):
3221 /* True if the second argument is an even integer-valued
3223 arg1 = gimple_call_arg (def_stmt, 1);
3225 if (TREE_CODE (arg1) == REAL_CST)
3230 c = TREE_REAL_CST (arg1);
3231 n = real_to_integer (&c);
3235 REAL_VALUE_TYPE cint;
3236 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3237 if (real_identical (&c, &cint))