1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node || !node->analyzed || node->global.inlined_to)
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval)
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
137 if ((TREE_CODE (base) == VAR_DECL
138 || TREE_CODE (base) == FUNCTION_DECL)
139 && !can_refer_decl_in_current_unit_p (base))
141 if (TREE_CODE (base) == VAR_DECL)
143 TREE_ADDRESSABLE (base) = 1;
144 if (cfun && gimple_referenced_vars (cfun))
145 add_referenced_var (base);
147 else if (TREE_CODE (base) == FUNCTION_DECL)
149 /* Make sure we create a cgraph node for functions we'll reference.
150 They can be non-existent if the reference comes from an entry
151 of an external vtable for example. */
152 cgraph_get_create_node (base);
154 /* Fixup types in global initializers. */
155 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
156 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
161 /* If SYM is a constant variable with known value, return the value.
162 NULL_TREE is returned otherwise. */
165 get_symbol_constant_value (tree sym)
167 if (const_value_known_p (sym))
169 tree val = DECL_INITIAL (sym);
172 val = canonicalize_constructor_val (val);
173 if (val && is_gimple_min_invariant (val))
178 /* Variables declared 'const' without an initializer
179 have zero as the initializer if they may not be
180 overridden at link or run time. */
182 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
183 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
184 return build_zero_cst (TREE_TYPE (sym));
192 /* Subroutine of fold_stmt. We perform several simplifications of the
193 memory reference tree EXPR and make sure to re-gimplify them properly
194 after propagation of constant addresses. IS_LHS is true if the
195 reference is supposed to be an lvalue. */
198 maybe_fold_reference (tree expr, bool is_lhs)
203 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
204 || TREE_CODE (expr) == REALPART_EXPR
205 || TREE_CODE (expr) == IMAGPART_EXPR)
206 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
207 return fold_unary_loc (EXPR_LOCATION (expr),
210 TREE_OPERAND (expr, 0));
211 else if (TREE_CODE (expr) == BIT_FIELD_REF
212 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
213 return fold_ternary_loc (EXPR_LOCATION (expr),
216 TREE_OPERAND (expr, 0),
217 TREE_OPERAND (expr, 1),
218 TREE_OPERAND (expr, 2));
220 while (handled_component_p (*t))
221 t = &TREE_OPERAND (*t, 0);
223 /* Canonicalize MEM_REFs invariant address operand. Do this first
224 to avoid feeding non-canonical MEM_REFs elsewhere. */
225 if (TREE_CODE (*t) == MEM_REF
226 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
228 bool volatile_p = TREE_THIS_VOLATILE (*t);
229 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
230 TREE_OPERAND (*t, 0),
231 TREE_OPERAND (*t, 1));
234 TREE_THIS_VOLATILE (tem) = volatile_p;
236 tem = maybe_fold_reference (expr, is_lhs);
244 && (result = fold_const_aggregate_ref (expr))
245 && is_gimple_min_invariant (result))
248 /* Fold back MEM_REFs to reference trees. */
249 if (TREE_CODE (*t) == MEM_REF
250 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
251 && integer_zerop (TREE_OPERAND (*t, 1))
252 && (TREE_THIS_VOLATILE (*t)
253 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
254 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
255 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
256 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
257 /* We have to look out here to not drop a required conversion
258 from the rhs to the lhs if is_lhs, but we don't have the
259 rhs here to verify that. Thus require strict type
261 && types_compatible_p (TREE_TYPE (*t),
262 TREE_TYPE (TREE_OPERAND
263 (TREE_OPERAND (*t, 0), 0))))
266 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
267 tem = maybe_fold_reference (expr, is_lhs);
272 else if (TREE_CODE (*t) == TARGET_MEM_REF)
274 tree tem = maybe_fold_tmr (*t);
278 tem = maybe_fold_reference (expr, is_lhs);
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
295 fold_gimple_assign (gimple_stmt_iterator *si)
297 gimple stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
301 tree result = NULL_TREE;
303 switch (get_gimple_rhs_class (subcode))
305 case GIMPLE_SINGLE_RHS:
307 tree rhs = gimple_assign_rhs1 (stmt);
309 if (REFERENCE_CLASS_P (rhs))
310 return maybe_fold_reference (rhs, false);
312 else if (TREE_CODE (rhs) == ADDR_EXPR)
314 tree ref = TREE_OPERAND (rhs, 0);
315 tree tem = maybe_fold_reference (ref, true);
317 && TREE_CODE (tem) == MEM_REF
318 && integer_zerop (TREE_OPERAND (tem, 1)))
319 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
321 result = fold_convert (TREE_TYPE (rhs),
322 build_fold_addr_expr_loc (loc, tem));
323 else if (TREE_CODE (ref) == MEM_REF
324 && integer_zerop (TREE_OPERAND (ref, 1)))
325 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
328 else if (TREE_CODE (rhs) == CONSTRUCTOR
329 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
330 && (CONSTRUCTOR_NELTS (rhs)
331 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
333 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
337 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
338 if (TREE_CODE (val) != INTEGER_CST
339 && TREE_CODE (val) != REAL_CST
340 && TREE_CODE (val) != FIXED_CST)
343 return build_vector_from_ctor (TREE_TYPE (rhs),
344 CONSTRUCTOR_ELTS (rhs));
347 else if (DECL_P (rhs))
348 return unshare_expr (get_symbol_constant_value (rhs));
350 /* If we couldn't fold the RHS, hand over to the generic
352 if (result == NULL_TREE)
355 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
356 that may have been added by fold, and "useless" type
357 conversions that might now be apparent due to propagation. */
358 STRIP_USELESS_TYPE_CONVERSION (result);
360 if (result != rhs && valid_gimple_rhs_p (result))
367 case GIMPLE_UNARY_RHS:
369 tree rhs = gimple_assign_rhs1 (stmt);
371 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
374 /* If the operation was a conversion do _not_ mark a
375 resulting constant with TREE_OVERFLOW if the original
376 constant was not. These conversions have implementation
377 defined behavior and retaining the TREE_OVERFLOW flag
378 here would confuse later passes such as VRP. */
379 if (CONVERT_EXPR_CODE_P (subcode)
380 && TREE_CODE (result) == INTEGER_CST
381 && TREE_CODE (rhs) == INTEGER_CST)
382 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
384 STRIP_USELESS_TYPE_CONVERSION (result);
385 if (valid_gimple_rhs_p (result))
391 case GIMPLE_BINARY_RHS:
392 /* Try to canonicalize for boolean-typed X the comparisons
393 X == 0, X == 1, X != 0, and X != 1. */
394 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
395 || gimple_assign_rhs_code (stmt) == NE_EXPR)
397 tree lhs = gimple_assign_lhs (stmt);
398 tree op1 = gimple_assign_rhs1 (stmt);
399 tree op2 = gimple_assign_rhs2 (stmt);
400 tree type = TREE_TYPE (op1);
402 /* Check whether the comparison operands are of the same boolean
403 type as the result type is.
404 Check that second operand is an integer-constant with value
406 if (TREE_CODE (op2) == INTEGER_CST
407 && (integer_zerop (op2) || integer_onep (op2))
408 && useless_type_conversion_p (TREE_TYPE (lhs), type))
410 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
411 bool is_logical_not = false;
413 /* X == 0 and X != 1 is a logical-not.of X
414 X == 1 and X != 0 is X */
415 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
416 || (cmp_code == NE_EXPR && integer_onep (op2)))
417 is_logical_not = true;
419 if (is_logical_not == false)
421 /* Only for one-bit precision typed X the transformation
422 !X -> ~X is valied. */
423 else if (TYPE_PRECISION (type) == 1)
424 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
426 /* Otherwise we use !X -> X ^ 1. */
428 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
429 type, op1, build_int_cst (type, 1));
435 result = fold_binary_loc (loc, subcode,
436 TREE_TYPE (gimple_assign_lhs (stmt)),
437 gimple_assign_rhs1 (stmt),
438 gimple_assign_rhs2 (stmt));
442 STRIP_USELESS_TYPE_CONVERSION (result);
443 if (valid_gimple_rhs_p (result))
448 case GIMPLE_TERNARY_RHS:
449 /* Try to fold a conditional expression. */
450 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
452 tree op0 = gimple_assign_rhs1 (stmt);
455 location_t cond_loc = gimple_location (stmt);
457 if (COMPARISON_CLASS_P (op0))
459 fold_defer_overflow_warnings ();
460 tem = fold_binary_loc (cond_loc,
461 TREE_CODE (op0), TREE_TYPE (op0),
462 TREE_OPERAND (op0, 0),
463 TREE_OPERAND (op0, 1));
464 /* This is actually a conditional expression, not a GIMPLE
465 conditional statement, however, the valid_gimple_rhs_p
466 test still applies. */
467 set = (tem && is_gimple_condexpr (tem)
468 && valid_gimple_rhs_p (tem));
469 fold_undefer_overflow_warnings (set, stmt, 0);
471 else if (is_gimple_min_invariant (op0))
480 result = fold_build3_loc (cond_loc, COND_EXPR,
481 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
487 result = fold_ternary_loc (loc, subcode,
488 TREE_TYPE (gimple_assign_lhs (stmt)),
489 gimple_assign_rhs1 (stmt),
490 gimple_assign_rhs2 (stmt),
491 gimple_assign_rhs3 (stmt));
495 STRIP_USELESS_TYPE_CONVERSION (result);
496 if (valid_gimple_rhs_p (result))
501 case GIMPLE_INVALID_RHS:
508 /* Attempt to fold a conditional statement. Return true if any changes were
509 made. We only attempt to fold the condition expression, and do not perform
510 any transformation that would require alteration of the cfg. It is
511 assumed that the operands have been previously folded. */
514 fold_gimple_cond (gimple stmt)
516 tree result = fold_binary_loc (gimple_location (stmt),
517 gimple_cond_code (stmt),
519 gimple_cond_lhs (stmt),
520 gimple_cond_rhs (stmt));
524 STRIP_USELESS_TYPE_CONVERSION (result);
525 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
527 gimple_cond_set_condition_from_tree (stmt, result);
535 /* Convert EXPR into a GIMPLE value suitable for substitution on the
536 RHS of an assignment. Insert the necessary statements before
537 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
538 is replaced. If the call is expected to produces a result, then it
539 is replaced by an assignment of the new RHS to the result variable.
540 If the result is to be ignored, then the call is replaced by a
541 GIMPLE_NOP. A proper VDEF chain is retained by making the first
542 VUSE and the last VDEF of the whole sequence be the same as the replaced
543 statement and using new SSA names for stores in between. */
546 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
549 gimple stmt, new_stmt;
550 gimple_stmt_iterator i;
551 gimple_seq stmts = gimple_seq_alloc();
552 struct gimplify_ctx gctx;
557 stmt = gsi_stmt (*si_p);
559 gcc_assert (is_gimple_call (stmt));
561 push_gimplify_context (&gctx);
562 gctx.into_ssa = gimple_in_ssa_p (cfun);
564 lhs = gimple_call_lhs (stmt);
565 if (lhs == NULL_TREE)
567 gimplify_and_add (expr, &stmts);
568 /* We can end up with folding a memcpy of an empty class assignment
569 which gets optimized away by C++ gimplification. */
570 if (gimple_seq_empty_p (stmts))
572 pop_gimplify_context (NULL);
573 if (gimple_in_ssa_p (cfun))
575 unlink_stmt_vdef (stmt);
578 gsi_remove (si_p, true);
584 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
585 new_stmt = gimple_build_assign (lhs, tmp);
586 i = gsi_last (stmts);
587 gsi_insert_after_without_update (&i, new_stmt,
588 GSI_CONTINUE_LINKING);
591 pop_gimplify_context (NULL);
593 if (gimple_has_location (stmt))
594 annotate_all_with_location (stmts, gimple_location (stmt));
596 /* First iterate over the replacement statements backward, assigning
597 virtual operands to their defining statements. */
599 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
601 new_stmt = gsi_stmt (i);
602 if ((gimple_assign_single_p (new_stmt)
603 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
604 || (is_gimple_call (new_stmt)
605 && (gimple_call_flags (new_stmt)
606 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
610 vdef = gimple_vdef (stmt);
612 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
613 gimple_set_vdef (new_stmt, vdef);
614 if (vdef && TREE_CODE (vdef) == SSA_NAME)
615 SSA_NAME_DEF_STMT (vdef) = new_stmt;
616 laststore = new_stmt;
620 /* Second iterate over the statements forward, assigning virtual
621 operands to their uses. */
623 reaching_vuse = gimple_vuse (stmt);
624 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
626 /* Do not insert the last stmt in this loop but remember it
627 for replacing the original statement. */
630 gsi_insert_before (si_p, last, GSI_NEW_STMT);
633 new_stmt = gsi_stmt (i);
634 /* The replacement can expose previously unreferenced variables. */
635 if (gimple_in_ssa_p (cfun))
636 find_new_referenced_vars (new_stmt);
637 /* If the new statement possibly has a VUSE, update it with exact SSA
638 name we know will reach this one. */
639 if (gimple_has_mem_ops (new_stmt))
640 gimple_set_vuse (new_stmt, reaching_vuse);
641 gimple_set_modified (new_stmt, true);
642 if (gimple_vdef (new_stmt))
643 reaching_vuse = gimple_vdef (new_stmt);
647 /* If the new sequence does not do a store release the virtual
648 definition of the original statement. */
650 && reaching_vuse == gimple_vuse (stmt))
652 tree vdef = gimple_vdef (stmt);
654 && TREE_CODE (vdef) == SSA_NAME)
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
661 /* Finally replace rhe original statement with the last. */
662 gsi_replace (si_p, last, false);
665 /* Return the string length, maximum string length or maximum value of
667 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
668 is not NULL and, for TYPE == 0, its value is not equal to the length
669 we determine or if we are unable to determine the length or value,
670 return false. VISITED is a bitmap of visited variables.
671 TYPE is 0 if string length should be returned, 1 for maximum string
672 length and 2 for maximum value ARG can have. */
675 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
680 if (TREE_CODE (arg) != SSA_NAME)
682 if (TREE_CODE (arg) == COND_EXPR)
683 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
684 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
685 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
686 else if (TREE_CODE (arg) == ADDR_EXPR
687 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
688 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
690 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
691 if (TREE_CODE (aop0) == INDIRECT_REF
692 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
693 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
694 length, visited, type);
700 if (TREE_CODE (val) != INTEGER_CST
701 || tree_int_cst_sgn (val) < 0)
705 val = c_strlen (arg, 1);
713 if (TREE_CODE (*length) != INTEGER_CST
714 || TREE_CODE (val) != INTEGER_CST)
717 if (tree_int_cst_lt (*length, val))
721 else if (simple_cst_equal (val, *length) != 1)
729 /* If we were already here, break the infinite cycle. */
730 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
734 def_stmt = SSA_NAME_DEF_STMT (var);
736 switch (gimple_code (def_stmt))
739 /* The RHS of the statement defining VAR must either have a
740 constant length or come from another SSA_NAME with a constant
742 if (gimple_assign_single_p (def_stmt)
743 || gimple_assign_unary_nop_p (def_stmt))
745 tree rhs = gimple_assign_rhs1 (def_stmt);
746 return get_maxval_strlen (rhs, length, visited, type);
752 /* All the arguments of the PHI node must have the same constant
756 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
758 tree arg = gimple_phi_arg (def_stmt, i)->def;
760 /* If this PHI has itself as an argument, we cannot
761 determine the string length of this argument. However,
762 if we can find a constant string length for the other
763 PHI args then we can still be sure that this is a
764 constant string length. So be optimistic and just
765 continue with the next argument. */
766 if (arg == gimple_phi_result (def_stmt))
769 if (!get_maxval_strlen (arg, length, visited, type))
781 /* Fold builtin call in statement STMT. Returns a simplified tree.
782 We may return a non-constant expression, including another call
783 to a different function and with different arguments, e.g.,
784 substituting memcpy for strcpy when the string length is known.
785 Note that some builtins expand into inline code that may not
786 be valid in GIMPLE. Callers must take care. */
789 gimple_fold_builtin (gimple stmt)
797 location_t loc = gimple_location (stmt);
799 gcc_assert (is_gimple_call (stmt));
801 ignore = (gimple_call_lhs (stmt) == NULL);
803 /* First try the generic builtin folder. If that succeeds, return the
805 result = fold_call_stmt (stmt, ignore);
813 /* Ignore MD builtins. */
814 callee = gimple_call_fndecl (stmt);
815 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
818 /* Give up for always_inline inline builtins until they are
820 if (avoid_folding_inline_builtin (callee))
823 /* If the builtin could not be folded, and it has no argument list,
825 nargs = gimple_call_num_args (stmt);
829 /* Limit the work only for builtins we know how to simplify. */
830 switch (DECL_FUNCTION_CODE (callee))
832 case BUILT_IN_STRLEN:
834 case BUILT_IN_FPUTS_UNLOCKED:
838 case BUILT_IN_STRCPY:
839 case BUILT_IN_STRNCPY:
843 case BUILT_IN_MEMCPY_CHK:
844 case BUILT_IN_MEMPCPY_CHK:
845 case BUILT_IN_MEMMOVE_CHK:
846 case BUILT_IN_MEMSET_CHK:
847 case BUILT_IN_STRNCPY_CHK:
848 case BUILT_IN_STPNCPY_CHK:
852 case BUILT_IN_STRCPY_CHK:
853 case BUILT_IN_STPCPY_CHK:
857 case BUILT_IN_SNPRINTF_CHK:
858 case BUILT_IN_VSNPRINTF_CHK:
866 if (arg_idx >= nargs)
869 /* Try to use the dataflow information gathered by the CCP process. */
870 visited = BITMAP_ALLOC (NULL);
871 bitmap_clear (visited);
873 memset (val, 0, sizeof (val));
874 a = gimple_call_arg (stmt, arg_idx);
875 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
876 val[arg_idx] = NULL_TREE;
878 BITMAP_FREE (visited);
881 switch (DECL_FUNCTION_CODE (callee))
883 case BUILT_IN_STRLEN:
884 if (val[0] && nargs == 1)
887 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
889 /* If the result is not a valid gimple value, or not a cast
890 of a valid gimple value, then we cannot use the result. */
891 if (is_gimple_val (new_val)
892 || (CONVERT_EXPR_P (new_val)
893 && is_gimple_val (TREE_OPERAND (new_val, 0))))
898 case BUILT_IN_STRCPY:
899 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
900 result = fold_builtin_strcpy (loc, callee,
901 gimple_call_arg (stmt, 0),
902 gimple_call_arg (stmt, 1),
906 case BUILT_IN_STRNCPY:
907 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
908 result = fold_builtin_strncpy (loc, callee,
909 gimple_call_arg (stmt, 0),
910 gimple_call_arg (stmt, 1),
911 gimple_call_arg (stmt, 2),
917 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
918 gimple_call_arg (stmt, 1),
919 ignore, false, val[0]);
922 case BUILT_IN_FPUTS_UNLOCKED:
924 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
925 gimple_call_arg (stmt, 1),
926 ignore, true, val[0]);
929 case BUILT_IN_MEMCPY_CHK:
930 case BUILT_IN_MEMPCPY_CHK:
931 case BUILT_IN_MEMMOVE_CHK:
932 case BUILT_IN_MEMSET_CHK:
933 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
934 result = fold_builtin_memory_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
938 gimple_call_arg (stmt, 3),
940 DECL_FUNCTION_CODE (callee));
943 case BUILT_IN_STRCPY_CHK:
944 case BUILT_IN_STPCPY_CHK:
945 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
946 result = fold_builtin_stxcpy_chk (loc, callee,
947 gimple_call_arg (stmt, 0),
948 gimple_call_arg (stmt, 1),
949 gimple_call_arg (stmt, 2),
951 DECL_FUNCTION_CODE (callee));
954 case BUILT_IN_STRNCPY_CHK:
955 case BUILT_IN_STPNCPY_CHK:
956 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
957 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
958 gimple_call_arg (stmt, 1),
959 gimple_call_arg (stmt, 2),
960 gimple_call_arg (stmt, 3),
962 DECL_FUNCTION_CODE (callee));
965 case BUILT_IN_SNPRINTF_CHK:
966 case BUILT_IN_VSNPRINTF_CHK:
967 if (val[1] && is_gimple_val (val[1]))
968 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
969 DECL_FUNCTION_CODE (callee));
976 if (result && ignore)
977 result = fold_ignored_result (result);
982 /* Return a binfo to be used for devirtualization of calls based on an object
983 represented by a declaration (i.e. a global or automatically allocated one)
984 or NULL if it cannot be found or is not safe. CST is expected to be an
985 ADDR_EXPR of such object or the function will return NULL. Currently it is
986 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
989 gimple_extract_devirt_binfo_from_cst (tree cst)
991 HOST_WIDE_INT offset, size, max_size;
992 tree base, type, expected_type, binfo;
993 bool last_artificial = false;
995 if (!flag_devirtualize
996 || TREE_CODE (cst) != ADDR_EXPR
997 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1000 cst = TREE_OPERAND (cst, 0);
1001 expected_type = TREE_TYPE (cst);
1002 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1003 type = TREE_TYPE (base);
1007 || TREE_CODE (type) != RECORD_TYPE)
1010 /* Find the sub-object the constant actually refers to and mark whether it is
1011 an artificial one (as opposed to a user-defined one). */
1014 HOST_WIDE_INT pos, size;
1017 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1022 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1024 if (TREE_CODE (fld) != FIELD_DECL)
1027 pos = int_bit_position (fld);
1028 size = tree_low_cst (DECL_SIZE (fld), 1);
1029 if (pos <= offset && (pos + size) > offset)
1032 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1035 last_artificial = DECL_ARTIFICIAL (fld);
1036 type = TREE_TYPE (fld);
1039 /* Artifical sub-objects are ancestors, we do not want to use them for
1040 devirtualization, at least not here. */
1041 if (last_artificial)
1043 binfo = TYPE_BINFO (type);
1044 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1050 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1051 The statement may be replaced by another statement, e.g., if the call
1052 simplifies to a constant value. Return true if any changes were made.
1053 It is assumed that the operands have been previously folded. */
1056 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1058 gimple stmt = gsi_stmt (*gsi);
1060 bool changed = false;
1063 /* Fold *& in call arguments. */
1064 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1065 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1067 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1070 gimple_call_set_arg (stmt, i, tmp);
1075 /* Check for virtual calls that became direct calls. */
1076 callee = gimple_call_fn (stmt);
1077 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1079 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1081 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1086 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1087 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1091 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1092 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1095 gimple_call_set_fndecl (stmt, fndecl);
1105 /* Check for builtins that CCP can handle using information not
1106 available in the generic fold routines. */
1107 callee = gimple_call_fndecl (stmt);
1108 if (callee && DECL_BUILT_IN (callee))
1110 tree result = gimple_fold_builtin (stmt);
1113 if (!update_call_from_tree (gsi, result))
1114 gimplify_and_update_call_from_tree (gsi, result);
1122 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1123 distinguishes both cases. */
1126 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1128 bool changed = false;
1129 gimple stmt = gsi_stmt (*gsi);
1131 gimple_stmt_iterator gsinext = *gsi;
1134 gsi_next (&gsinext);
1135 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1137 /* Fold the main computation performed by the statement. */
1138 switch (gimple_code (stmt))
1142 unsigned old_num_ops = gimple_num_ops (stmt);
1143 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1144 tree lhs = gimple_assign_lhs (stmt);
1146 /* First canonicalize operand order. This avoids building new
1147 trees if this is the only thing fold would later do. */
1148 if ((commutative_tree_code (subcode)
1149 || commutative_ternary_tree_code (subcode))
1150 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1151 gimple_assign_rhs2 (stmt), false))
1153 tree tem = gimple_assign_rhs1 (stmt);
1154 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1155 gimple_assign_set_rhs2 (stmt, tem);
1158 new_rhs = fold_gimple_assign (gsi);
1160 && !useless_type_conversion_p (TREE_TYPE (lhs),
1161 TREE_TYPE (new_rhs)))
1162 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1165 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1167 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1174 changed |= fold_gimple_cond (stmt);
1178 changed |= gimple_fold_call (gsi, inplace);
1182 /* Fold *& in asm operands. */
1185 const char **oconstraints;
1186 const char *constraint;
1187 bool allows_mem, allows_reg;
1189 noutputs = gimple_asm_noutputs (stmt);
1190 oconstraints = XALLOCAVEC (const char *, noutputs);
1192 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1194 tree link = gimple_asm_output_op (stmt, i);
1195 tree op = TREE_VALUE (link);
1197 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1198 if (REFERENCE_CLASS_P (op)
1199 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1201 TREE_VALUE (link) = op;
1205 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1207 tree link = gimple_asm_input_op (stmt, i);
1208 tree op = TREE_VALUE (link);
1210 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1211 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1212 oconstraints, &allows_mem, &allows_reg);
1213 if (REFERENCE_CLASS_P (op)
1214 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1217 TREE_VALUE (link) = op;
1225 if (gimple_debug_bind_p (stmt))
1227 tree val = gimple_debug_bind_get_value (stmt);
1229 && REFERENCE_CLASS_P (val))
1231 tree tem = maybe_fold_reference (val, false);
1234 gimple_debug_bind_set_value (stmt, tem);
1239 && TREE_CODE (val) == ADDR_EXPR)
1241 tree ref = TREE_OPERAND (val, 0);
1242 tree tem = maybe_fold_reference (ref, false);
1245 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1246 gimple_debug_bind_set_value (stmt, tem);
1256 /* If stmt folds into nothing and it was the last stmt in a bb,
1257 don't call gsi_stmt. */
1258 if (gsi_end_p (*gsi))
1260 gcc_assert (next_stmt == NULL);
1264 stmt = gsi_stmt (*gsi);
1266 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1267 as we'd changing the next stmt. */
1268 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1270 tree lhs = gimple_get_lhs (stmt);
1271 if (lhs && REFERENCE_CLASS_P (lhs))
1273 tree new_lhs = maybe_fold_reference (lhs, true);
1276 gimple_set_lhs (stmt, new_lhs);
1285 /* Fold the statement pointed to by GSI. In some cases, this function may
1286 replace the whole statement with a new one. Returns true iff folding
1288 The statement pointed to by GSI should be in valid gimple form but may
1289 be in unfolded state as resulting from for example constant propagation
1290 which can produce *&x = 0. */
1293 fold_stmt (gimple_stmt_iterator *gsi)
1295 return fold_stmt_1 (gsi, false);
1298 /* Perform the minimal folding on statement *GSI. Only operations like
1299 *&x created by constant propagation are handled. The statement cannot
1300 be replaced with a new one. Return true if the statement was
1301 changed, false otherwise.
1302 The statement *GSI should be in valid gimple form but may
1303 be in unfolded state as resulting from for example constant propagation
1304 which can produce *&x = 0. */
1307 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1309 gimple stmt = gsi_stmt (*gsi);
1310 bool changed = fold_stmt_1 (gsi, true);
1311 gcc_assert (gsi_stmt (*gsi) == stmt);
1315 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1316 if EXPR is null or we don't know how.
1317 If non-null, the result always has boolean type. */
1320 canonicalize_bool (tree expr, bool invert)
1326 if (integer_nonzerop (expr))
1327 return boolean_false_node;
1328 else if (integer_zerop (expr))
1329 return boolean_true_node;
1330 else if (TREE_CODE (expr) == SSA_NAME)
1331 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1332 build_int_cst (TREE_TYPE (expr), 0));
1333 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1334 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1336 TREE_OPERAND (expr, 0),
1337 TREE_OPERAND (expr, 1));
1343 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1345 if (integer_nonzerop (expr))
1346 return boolean_true_node;
1347 else if (integer_zerop (expr))
1348 return boolean_false_node;
1349 else if (TREE_CODE (expr) == SSA_NAME)
1350 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1351 build_int_cst (TREE_TYPE (expr), 0));
1352 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1353 return fold_build2 (TREE_CODE (expr),
1355 TREE_OPERAND (expr, 0),
1356 TREE_OPERAND (expr, 1));
1362 /* Check to see if a boolean expression EXPR is logically equivalent to the
1363 comparison (OP1 CODE OP2). Check for various identities involving
1367 same_bool_comparison_p (const_tree expr, enum tree_code code,
1368 const_tree op1, const_tree op2)
1372 /* The obvious case. */
1373 if (TREE_CODE (expr) == code
1374 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1375 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1378 /* Check for comparing (name, name != 0) and the case where expr
1379 is an SSA_NAME with a definition matching the comparison. */
1380 if (TREE_CODE (expr) == SSA_NAME
1381 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1383 if (operand_equal_p (expr, op1, 0))
1384 return ((code == NE_EXPR && integer_zerop (op2))
1385 || (code == EQ_EXPR && integer_nonzerop (op2)));
1386 s = SSA_NAME_DEF_STMT (expr);
1387 if (is_gimple_assign (s)
1388 && gimple_assign_rhs_code (s) == code
1389 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1390 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1394 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1395 of name is a comparison, recurse. */
1396 if (TREE_CODE (op1) == SSA_NAME
1397 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1399 s = SSA_NAME_DEF_STMT (op1);
1400 if (is_gimple_assign (s)
1401 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1403 enum tree_code c = gimple_assign_rhs_code (s);
1404 if ((c == NE_EXPR && integer_zerop (op2))
1405 || (c == EQ_EXPR && integer_nonzerop (op2)))
1406 return same_bool_comparison_p (expr, c,
1407 gimple_assign_rhs1 (s),
1408 gimple_assign_rhs2 (s));
1409 if ((c == EQ_EXPR && integer_zerop (op2))
1410 || (c == NE_EXPR && integer_nonzerop (op2)))
1411 return same_bool_comparison_p (expr,
1412 invert_tree_comparison (c, false),
1413 gimple_assign_rhs1 (s),
1414 gimple_assign_rhs2 (s));
1420 /* Check to see if two boolean expressions OP1 and OP2 are logically
1424 same_bool_result_p (const_tree op1, const_tree op2)
1426 /* Simple cases first. */
1427 if (operand_equal_p (op1, op2, 0))
1430 /* Check the cases where at least one of the operands is a comparison.
1431 These are a bit smarter than operand_equal_p in that they apply some
1432 identifies on SSA_NAMEs. */
1433 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1434 && same_bool_comparison_p (op1, TREE_CODE (op2),
1435 TREE_OPERAND (op2, 0),
1436 TREE_OPERAND (op2, 1)))
1438 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1439 && same_bool_comparison_p (op2, TREE_CODE (op1),
1440 TREE_OPERAND (op1, 0),
1441 TREE_OPERAND (op1, 1)))
1448 /* Forward declarations for some mutually recursive functions. */
1451 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1452 enum tree_code code2, tree op2a, tree op2b);
1454 and_var_with_comparison (tree var, bool invert,
1455 enum tree_code code2, tree op2a, tree op2b);
1457 and_var_with_comparison_1 (gimple stmt,
1458 enum tree_code code2, tree op2a, tree op2b);
1460 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1461 enum tree_code code2, tree op2a, tree op2b);
1463 or_var_with_comparison (tree var, bool invert,
1464 enum tree_code code2, tree op2a, tree op2b);
1466 or_var_with_comparison_1 (gimple stmt,
1467 enum tree_code code2, tree op2a, tree op2b);
1469 /* Helper function for and_comparisons_1: try to simplify the AND of the
1470 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1471 If INVERT is true, invert the value of the VAR before doing the AND.
1472 Return NULL_EXPR if we can't simplify this to a single expression. */
1475 and_var_with_comparison (tree var, bool invert,
1476 enum tree_code code2, tree op2a, tree op2b)
1479 gimple stmt = SSA_NAME_DEF_STMT (var);
1481 /* We can only deal with variables whose definitions are assignments. */
1482 if (!is_gimple_assign (stmt))
1485 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1486 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1487 Then we only have to consider the simpler non-inverted cases. */
1489 t = or_var_with_comparison_1 (stmt,
1490 invert_tree_comparison (code2, false),
1493 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1494 return canonicalize_bool (t, invert);
1497 /* Try to simplify the AND of the ssa variable defined by the assignment
1498 STMT with the comparison specified by (OP2A CODE2 OP2B).
1499 Return NULL_EXPR if we can't simplify this to a single expression. */
1502 and_var_with_comparison_1 (gimple stmt,
1503 enum tree_code code2, tree op2a, tree op2b)
1505 tree var = gimple_assign_lhs (stmt);
1506 tree true_test_var = NULL_TREE;
1507 tree false_test_var = NULL_TREE;
1508 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1510 /* Check for identities like (var AND (var == 0)) => false. */
1511 if (TREE_CODE (op2a) == SSA_NAME
1512 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1514 if ((code2 == NE_EXPR && integer_zerop (op2b))
1515 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1517 true_test_var = op2a;
1518 if (var == true_test_var)
1521 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1522 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1524 false_test_var = op2a;
1525 if (var == false_test_var)
1526 return boolean_false_node;
1530 /* If the definition is a comparison, recurse on it. */
1531 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1533 tree t = and_comparisons_1 (innercode,
1534 gimple_assign_rhs1 (stmt),
1535 gimple_assign_rhs2 (stmt),
1543 /* If the definition is an AND or OR expression, we may be able to
1544 simplify by reassociating. */
1545 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1546 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1548 tree inner1 = gimple_assign_rhs1 (stmt);
1549 tree inner2 = gimple_assign_rhs2 (stmt);
1552 tree partial = NULL_TREE;
1553 bool is_and = (innercode == BIT_AND_EXPR);
1555 /* Check for boolean identities that don't require recursive examination
1557 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1558 inner1 AND (inner1 OR inner2) => inner1
1559 !inner1 AND (inner1 AND inner2) => false
1560 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1561 Likewise for similar cases involving inner2. */
1562 if (inner1 == true_test_var)
1563 return (is_and ? var : inner1);
1564 else if (inner2 == true_test_var)
1565 return (is_and ? var : inner2);
1566 else if (inner1 == false_test_var)
1568 ? boolean_false_node
1569 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1570 else if (inner2 == false_test_var)
1572 ? boolean_false_node
1573 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1575 /* Next, redistribute/reassociate the AND across the inner tests.
1576 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1577 if (TREE_CODE (inner1) == SSA_NAME
1578 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1579 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1580 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1581 gimple_assign_rhs1 (s),
1582 gimple_assign_rhs2 (s),
1583 code2, op2a, op2b)))
1585 /* Handle the AND case, where we are reassociating:
1586 (inner1 AND inner2) AND (op2a code2 op2b)
1588 If the partial result t is a constant, we win. Otherwise
1589 continue on to try reassociating with the other inner test. */
1592 if (integer_onep (t))
1594 else if (integer_zerop (t))
1595 return boolean_false_node;
1598 /* Handle the OR case, where we are redistributing:
1599 (inner1 OR inner2) AND (op2a code2 op2b)
1600 => (t OR (inner2 AND (op2a code2 op2b))) */
1601 else if (integer_onep (t))
1602 return boolean_true_node;
1604 /* Save partial result for later. */
1608 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1609 if (TREE_CODE (inner2) == SSA_NAME
1610 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1611 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1612 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1613 gimple_assign_rhs1 (s),
1614 gimple_assign_rhs2 (s),
1615 code2, op2a, op2b)))
1617 /* Handle the AND case, where we are reassociating:
1618 (inner1 AND inner2) AND (op2a code2 op2b)
1619 => (inner1 AND t) */
1622 if (integer_onep (t))
1624 else if (integer_zerop (t))
1625 return boolean_false_node;
1626 /* If both are the same, we can apply the identity
1628 else if (partial && same_bool_result_p (t, partial))
1632 /* Handle the OR case. where we are redistributing:
1633 (inner1 OR inner2) AND (op2a code2 op2b)
1634 => (t OR (inner1 AND (op2a code2 op2b)))
1635 => (t OR partial) */
1638 if (integer_onep (t))
1639 return boolean_true_node;
1642 /* We already got a simplification for the other
1643 operand to the redistributed OR expression. The
1644 interesting case is when at least one is false.
1645 Or, if both are the same, we can apply the identity
1647 if (integer_zerop (partial))
1649 else if (integer_zerop (t))
1651 else if (same_bool_result_p (t, partial))
1660 /* Try to simplify the AND of two comparisons defined by
1661 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1662 If this can be done without constructing an intermediate value,
1663 return the resulting tree; otherwise NULL_TREE is returned.
1664 This function is deliberately asymmetric as it recurses on SSA_DEFs
1665 in the first comparison but not the second. */
1668 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1669 enum tree_code code2, tree op2a, tree op2b)
1671 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1672 if (operand_equal_p (op1a, op2a, 0)
1673 && operand_equal_p (op1b, op2b, 0))
1675 /* Result will be either NULL_TREE, or a combined comparison. */
1676 tree t = combine_comparisons (UNKNOWN_LOCATION,
1677 TRUTH_ANDIF_EXPR, code1, code2,
1678 boolean_type_node, op1a, op1b);
1683 /* Likewise the swapped case of the above. */
1684 if (operand_equal_p (op1a, op2b, 0)
1685 && operand_equal_p (op1b, op2a, 0))
1687 /* Result will be either NULL_TREE, or a combined comparison. */
1688 tree t = combine_comparisons (UNKNOWN_LOCATION,
1689 TRUTH_ANDIF_EXPR, code1,
1690 swap_tree_comparison (code2),
1691 boolean_type_node, op1a, op1b);
1696 /* If both comparisons are of the same value against constants, we might
1697 be able to merge them. */
1698 if (operand_equal_p (op1a, op2a, 0)
1699 && TREE_CODE (op1b) == INTEGER_CST
1700 && TREE_CODE (op2b) == INTEGER_CST)
1702 int cmp = tree_int_cst_compare (op1b, op2b);
1704 /* If we have (op1a == op1b), we should either be able to
1705 return that or FALSE, depending on whether the constant op1b
1706 also satisfies the other comparison against op2b. */
1707 if (code1 == EQ_EXPR)
1713 case EQ_EXPR: val = (cmp == 0); break;
1714 case NE_EXPR: val = (cmp != 0); break;
1715 case LT_EXPR: val = (cmp < 0); break;
1716 case GT_EXPR: val = (cmp > 0); break;
1717 case LE_EXPR: val = (cmp <= 0); break;
1718 case GE_EXPR: val = (cmp >= 0); break;
1719 default: done = false;
1724 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1726 return boolean_false_node;
1729 /* Likewise if the second comparison is an == comparison. */
1730 else if (code2 == EQ_EXPR)
1736 case EQ_EXPR: val = (cmp == 0); break;
1737 case NE_EXPR: val = (cmp != 0); break;
1738 case LT_EXPR: val = (cmp > 0); break;
1739 case GT_EXPR: val = (cmp < 0); break;
1740 case LE_EXPR: val = (cmp >= 0); break;
1741 case GE_EXPR: val = (cmp <= 0); break;
1742 default: done = false;
1747 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1749 return boolean_false_node;
1753 /* Same business with inequality tests. */
1754 else if (code1 == NE_EXPR)
1759 case EQ_EXPR: val = (cmp != 0); break;
1760 case NE_EXPR: val = (cmp == 0); break;
1761 case LT_EXPR: val = (cmp >= 0); break;
1762 case GT_EXPR: val = (cmp <= 0); break;
1763 case LE_EXPR: val = (cmp > 0); break;
1764 case GE_EXPR: val = (cmp < 0); break;
1769 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1771 else if (code2 == NE_EXPR)
1776 case EQ_EXPR: val = (cmp == 0); break;
1777 case NE_EXPR: val = (cmp != 0); break;
1778 case LT_EXPR: val = (cmp <= 0); break;
1779 case GT_EXPR: val = (cmp >= 0); break;
1780 case LE_EXPR: val = (cmp < 0); break;
1781 case GE_EXPR: val = (cmp > 0); break;
1786 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1789 /* Chose the more restrictive of two < or <= comparisons. */
1790 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1791 && (code2 == LT_EXPR || code2 == LE_EXPR))
1793 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1796 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1799 /* Likewise chose the more restrictive of two > or >= comparisons. */
1800 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1801 && (code2 == GT_EXPR || code2 == GE_EXPR))
1803 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1804 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1806 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1809 /* Check for singleton ranges. */
1811 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1812 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1813 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1815 /* Check for disjoint ranges. */
1817 && (code1 == LT_EXPR || code1 == LE_EXPR)
1818 && (code2 == GT_EXPR || code2 == GE_EXPR))
1819 return boolean_false_node;
1821 && (code1 == GT_EXPR || code1 == GE_EXPR)
1822 && (code2 == LT_EXPR || code2 == LE_EXPR))
1823 return boolean_false_node;
1826 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1827 NAME's definition is a truth value. See if there are any simplifications
1828 that can be done against the NAME's definition. */
1829 if (TREE_CODE (op1a) == SSA_NAME
1830 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1831 && (integer_zerop (op1b) || integer_onep (op1b)))
1833 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1834 || (code1 == NE_EXPR && integer_onep (op1b)));
1835 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1836 switch (gimple_code (stmt))
1839 /* Try to simplify by copy-propagating the definition. */
1840 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1843 /* If every argument to the PHI produces the same result when
1844 ANDed with the second comparison, we win.
1845 Do not do this unless the type is bool since we need a bool
1846 result here anyway. */
1847 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1849 tree result = NULL_TREE;
1851 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1853 tree arg = gimple_phi_arg_def (stmt, i);
1855 /* If this PHI has itself as an argument, ignore it.
1856 If all the other args produce the same result,
1858 if (arg == gimple_phi_result (stmt))
1860 else if (TREE_CODE (arg) == INTEGER_CST)
1862 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1865 result = boolean_false_node;
1866 else if (!integer_zerop (result))
1870 result = fold_build2 (code2, boolean_type_node,
1872 else if (!same_bool_comparison_p (result,
1876 else if (TREE_CODE (arg) == SSA_NAME
1877 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1880 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1881 /* In simple cases we can look through PHI nodes,
1882 but we have to be careful with loops.
1884 if (! dom_info_available_p (CDI_DOMINATORS)
1885 || gimple_bb (def_stmt) == gimple_bb (stmt)
1886 || dominated_by_p (CDI_DOMINATORS,
1887 gimple_bb (def_stmt),
1890 temp = and_var_with_comparison (arg, invert, code2,
1896 else if (!same_bool_result_p (result, temp))
1912 /* Try to simplify the AND of two comparisons, specified by
1913 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1914 If this can be simplified to a single expression (without requiring
1915 introducing more SSA variables to hold intermediate values),
1916 return the resulting tree. Otherwise return NULL_TREE.
1917 If the result expression is non-null, it has boolean type. */
1920 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1921 enum tree_code code2, tree op2a, tree op2b)
1923 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1927 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1930 /* Helper function for or_comparisons_1: try to simplify the OR of the
1931 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1932 If INVERT is true, invert the value of VAR before doing the OR.
1933 Return NULL_EXPR if we can't simplify this to a single expression. */
1936 or_var_with_comparison (tree var, bool invert,
1937 enum tree_code code2, tree op2a, tree op2b)
1940 gimple stmt = SSA_NAME_DEF_STMT (var);
1942 /* We can only deal with variables whose definitions are assignments. */
1943 if (!is_gimple_assign (stmt))
1946 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1947 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1948 Then we only have to consider the simpler non-inverted cases. */
1950 t = and_var_with_comparison_1 (stmt,
1951 invert_tree_comparison (code2, false),
1954 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1955 return canonicalize_bool (t, invert);
1958 /* Try to simplify the OR of the ssa variable defined by the assignment
1959 STMT with the comparison specified by (OP2A CODE2 OP2B).
1960 Return NULL_EXPR if we can't simplify this to a single expression. */
1963 or_var_with_comparison_1 (gimple stmt,
1964 enum tree_code code2, tree op2a, tree op2b)
1966 tree var = gimple_assign_lhs (stmt);
1967 tree true_test_var = NULL_TREE;
1968 tree false_test_var = NULL_TREE;
1969 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1971 /* Check for identities like (var OR (var != 0)) => true . */
1972 if (TREE_CODE (op2a) == SSA_NAME
1973 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1975 if ((code2 == NE_EXPR && integer_zerop (op2b))
1976 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1978 true_test_var = op2a;
1979 if (var == true_test_var)
1982 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1983 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1985 false_test_var = op2a;
1986 if (var == false_test_var)
1987 return boolean_true_node;
1991 /* If the definition is a comparison, recurse on it. */
1992 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1994 tree t = or_comparisons_1 (innercode,
1995 gimple_assign_rhs1 (stmt),
1996 gimple_assign_rhs2 (stmt),
2004 /* If the definition is an AND or OR expression, we may be able to
2005 simplify by reassociating. */
2006 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2007 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2009 tree inner1 = gimple_assign_rhs1 (stmt);
2010 tree inner2 = gimple_assign_rhs2 (stmt);
2013 tree partial = NULL_TREE;
2014 bool is_or = (innercode == BIT_IOR_EXPR);
2016 /* Check for boolean identities that don't require recursive examination
2018 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2019 inner1 OR (inner1 AND inner2) => inner1
2020 !inner1 OR (inner1 OR inner2) => true
2021 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2023 if (inner1 == true_test_var)
2024 return (is_or ? var : inner1);
2025 else if (inner2 == true_test_var)
2026 return (is_or ? var : inner2);
2027 else if (inner1 == false_test_var)
2030 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2031 else if (inner2 == false_test_var)
2034 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2036 /* Next, redistribute/reassociate the OR across the inner tests.
2037 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2038 if (TREE_CODE (inner1) == SSA_NAME
2039 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2040 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2041 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2042 gimple_assign_rhs1 (s),
2043 gimple_assign_rhs2 (s),
2044 code2, op2a, op2b)))
2046 /* Handle the OR case, where we are reassociating:
2047 (inner1 OR inner2) OR (op2a code2 op2b)
2049 If the partial result t is a constant, we win. Otherwise
2050 continue on to try reassociating with the other inner test. */
2053 if (integer_onep (t))
2054 return boolean_true_node;
2055 else if (integer_zerop (t))
2059 /* Handle the AND case, where we are redistributing:
2060 (inner1 AND inner2) OR (op2a code2 op2b)
2061 => (t AND (inner2 OR (op2a code op2b))) */
2062 else if (integer_zerop (t))
2063 return boolean_false_node;
2065 /* Save partial result for later. */
2069 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2070 if (TREE_CODE (inner2) == SSA_NAME
2071 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2072 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2073 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2074 gimple_assign_rhs1 (s),
2075 gimple_assign_rhs2 (s),
2076 code2, op2a, op2b)))
2078 /* Handle the OR case, where we are reassociating:
2079 (inner1 OR inner2) OR (op2a code2 op2b)
2081 => (t OR partial) */
2084 if (integer_zerop (t))
2086 else if (integer_onep (t))
2087 return boolean_true_node;
2088 /* If both are the same, we can apply the identity
2090 else if (partial && same_bool_result_p (t, partial))
2094 /* Handle the AND case, where we are redistributing:
2095 (inner1 AND inner2) OR (op2a code2 op2b)
2096 => (t AND (inner1 OR (op2a code2 op2b)))
2097 => (t AND partial) */
2100 if (integer_zerop (t))
2101 return boolean_false_node;
2104 /* We already got a simplification for the other
2105 operand to the redistributed AND expression. The
2106 interesting case is when at least one is true.
2107 Or, if both are the same, we can apply the identity
2109 if (integer_onep (partial))
2111 else if (integer_onep (t))
2113 else if (same_bool_result_p (t, partial))
2122 /* Try to simplify the OR of two comparisons defined by
2123 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2124 If this can be done without constructing an intermediate value,
2125 return the resulting tree; otherwise NULL_TREE is returned.
2126 This function is deliberately asymmetric as it recurses on SSA_DEFs
2127 in the first comparison but not the second. */
2130 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2131 enum tree_code code2, tree op2a, tree op2b)
2133 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2134 if (operand_equal_p (op1a, op2a, 0)
2135 && operand_equal_p (op1b, op2b, 0))
2137 /* Result will be either NULL_TREE, or a combined comparison. */
2138 tree t = combine_comparisons (UNKNOWN_LOCATION,
2139 TRUTH_ORIF_EXPR, code1, code2,
2140 boolean_type_node, op1a, op1b);
2145 /* Likewise the swapped case of the above. */
2146 if (operand_equal_p (op1a, op2b, 0)
2147 && operand_equal_p (op1b, op2a, 0))
2149 /* Result will be either NULL_TREE, or a combined comparison. */
2150 tree t = combine_comparisons (UNKNOWN_LOCATION,
2151 TRUTH_ORIF_EXPR, code1,
2152 swap_tree_comparison (code2),
2153 boolean_type_node, op1a, op1b);
2158 /* If both comparisons are of the same value against constants, we might
2159 be able to merge them. */
2160 if (operand_equal_p (op1a, op2a, 0)
2161 && TREE_CODE (op1b) == INTEGER_CST
2162 && TREE_CODE (op2b) == INTEGER_CST)
2164 int cmp = tree_int_cst_compare (op1b, op2b);
2166 /* If we have (op1a != op1b), we should either be able to
2167 return that or TRUE, depending on whether the constant op1b
2168 also satisfies the other comparison against op2b. */
2169 if (code1 == NE_EXPR)
2175 case EQ_EXPR: val = (cmp == 0); break;
2176 case NE_EXPR: val = (cmp != 0); break;
2177 case LT_EXPR: val = (cmp < 0); break;
2178 case GT_EXPR: val = (cmp > 0); break;
2179 case LE_EXPR: val = (cmp <= 0); break;
2180 case GE_EXPR: val = (cmp >= 0); break;
2181 default: done = false;
2186 return boolean_true_node;
2188 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2191 /* Likewise if the second comparison is a != comparison. */
2192 else if (code2 == NE_EXPR)
2198 case EQ_EXPR: val = (cmp == 0); break;
2199 case NE_EXPR: val = (cmp != 0); break;
2200 case LT_EXPR: val = (cmp > 0); break;
2201 case GT_EXPR: val = (cmp < 0); break;
2202 case LE_EXPR: val = (cmp >= 0); break;
2203 case GE_EXPR: val = (cmp <= 0); break;
2204 default: done = false;
2209 return boolean_true_node;
2211 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2215 /* See if an equality test is redundant with the other comparison. */
2216 else if (code1 == EQ_EXPR)
2221 case EQ_EXPR: val = (cmp == 0); break;
2222 case NE_EXPR: val = (cmp != 0); break;
2223 case LT_EXPR: val = (cmp < 0); break;
2224 case GT_EXPR: val = (cmp > 0); break;
2225 case LE_EXPR: val = (cmp <= 0); break;
2226 case GE_EXPR: val = (cmp >= 0); break;
2231 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2233 else if (code2 == EQ_EXPR)
2238 case EQ_EXPR: val = (cmp == 0); break;
2239 case NE_EXPR: val = (cmp != 0); break;
2240 case LT_EXPR: val = (cmp > 0); break;
2241 case GT_EXPR: val = (cmp < 0); break;
2242 case LE_EXPR: val = (cmp >= 0); break;
2243 case GE_EXPR: val = (cmp <= 0); break;
2248 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2251 /* Chose the less restrictive of two < or <= comparisons. */
2252 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2253 && (code2 == LT_EXPR || code2 == LE_EXPR))
2255 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2256 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2258 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2261 /* Likewise chose the less restrictive of two > or >= comparisons. */
2262 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2263 && (code2 == GT_EXPR || code2 == GE_EXPR))
2265 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2266 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2268 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2271 /* Check for singleton ranges. */
2273 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2274 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2275 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2277 /* Check for less/greater pairs that don't restrict the range at all. */
2279 && (code1 == LT_EXPR || code1 == LE_EXPR)
2280 && (code2 == GT_EXPR || code2 == GE_EXPR))
2281 return boolean_true_node;
2283 && (code1 == GT_EXPR || code1 == GE_EXPR)
2284 && (code2 == LT_EXPR || code2 == LE_EXPR))
2285 return boolean_true_node;
2288 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2289 NAME's definition is a truth value. See if there are any simplifications
2290 that can be done against the NAME's definition. */
2291 if (TREE_CODE (op1a) == SSA_NAME
2292 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2293 && (integer_zerop (op1b) || integer_onep (op1b)))
2295 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2296 || (code1 == NE_EXPR && integer_onep (op1b)));
2297 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2298 switch (gimple_code (stmt))
2301 /* Try to simplify by copy-propagating the definition. */
2302 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2305 /* If every argument to the PHI produces the same result when
2306 ORed with the second comparison, we win.
2307 Do not do this unless the type is bool since we need a bool
2308 result here anyway. */
2309 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2311 tree result = NULL_TREE;
2313 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2315 tree arg = gimple_phi_arg_def (stmt, i);
2317 /* If this PHI has itself as an argument, ignore it.
2318 If all the other args produce the same result,
2320 if (arg == gimple_phi_result (stmt))
2322 else if (TREE_CODE (arg) == INTEGER_CST)
2324 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2327 result = boolean_true_node;
2328 else if (!integer_onep (result))
2332 result = fold_build2 (code2, boolean_type_node,
2334 else if (!same_bool_comparison_p (result,
2338 else if (TREE_CODE (arg) == SSA_NAME
2339 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2342 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2343 /* In simple cases we can look through PHI nodes,
2344 but we have to be careful with loops.
2346 if (! dom_info_available_p (CDI_DOMINATORS)
2347 || gimple_bb (def_stmt) == gimple_bb (stmt)
2348 || dominated_by_p (CDI_DOMINATORS,
2349 gimple_bb (def_stmt),
2352 temp = or_var_with_comparison (arg, invert, code2,
2358 else if (!same_bool_result_p (result, temp))
2374 /* Try to simplify the OR of two comparisons, specified by
2375 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2376 If this can be simplified to a single expression (without requiring
2377 introducing more SSA variables to hold intermediate values),
2378 return the resulting tree. Otherwise return NULL_TREE.
2379 If the result expression is non-null, it has boolean type. */
2382 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2383 enum tree_code code2, tree op2a, tree op2b)
2385 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2389 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2393 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2395 Either NULL_TREE, a simplified but non-constant or a constant
2398 ??? This should go into a gimple-fold-inline.h file to be eventually
2399 privatized with the single valueize function used in the various TUs
2400 to avoid the indirect function call overhead. */
2403 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2405 location_t loc = gimple_location (stmt);
2406 switch (gimple_code (stmt))
2410 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2412 switch (get_gimple_rhs_class (subcode))
2414 case GIMPLE_SINGLE_RHS:
2416 tree rhs = gimple_assign_rhs1 (stmt);
2417 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2419 if (TREE_CODE (rhs) == SSA_NAME)
2421 /* If the RHS is an SSA_NAME, return its known constant value,
2423 return (*valueize) (rhs);
2425 /* Handle propagating invariant addresses into address
2427 else if (TREE_CODE (rhs) == ADDR_EXPR
2428 && !is_gimple_min_invariant (rhs))
2430 HOST_WIDE_INT offset = 0;
2432 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2436 && (CONSTANT_CLASS_P (base)
2437 || decl_address_invariant_p (base)))
2438 return build_invariant_address (TREE_TYPE (rhs),
2441 else if (TREE_CODE (rhs) == CONSTRUCTOR
2442 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2443 && (CONSTRUCTOR_NELTS (rhs)
2444 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2449 vec = XALLOCAVEC (tree,
2450 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2453 val = (*valueize) (val);
2454 if (TREE_CODE (val) == INTEGER_CST
2455 || TREE_CODE (val) == REAL_CST
2456 || TREE_CODE (val) == FIXED_CST)
2462 return build_vector (TREE_TYPE (rhs), vec);
2465 if (kind == tcc_reference)
2467 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2468 || TREE_CODE (rhs) == REALPART_EXPR
2469 || TREE_CODE (rhs) == IMAGPART_EXPR)
2470 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2472 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2473 return fold_unary_loc (EXPR_LOCATION (rhs),
2475 TREE_TYPE (rhs), val);
2477 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2478 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2480 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2481 return fold_ternary_loc (EXPR_LOCATION (rhs),
2483 TREE_TYPE (rhs), val,
2484 TREE_OPERAND (rhs, 1),
2485 TREE_OPERAND (rhs, 2));
2487 else if (TREE_CODE (rhs) == MEM_REF
2488 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2490 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2491 if (TREE_CODE (val) == ADDR_EXPR
2492 && is_gimple_min_invariant (val))
2494 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2496 TREE_OPERAND (rhs, 1));
2501 return fold_const_aggregate_ref_1 (rhs, valueize);
2503 else if (kind == tcc_declaration)
2504 return get_symbol_constant_value (rhs);
2508 case GIMPLE_UNARY_RHS:
2510 /* Handle unary operators that can appear in GIMPLE form.
2511 Note that we know the single operand must be a constant,
2512 so this should almost always return a simplified RHS. */
2513 tree lhs = gimple_assign_lhs (stmt);
2514 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2516 /* Conversions are useless for CCP purposes if they are
2517 value-preserving. Thus the restrictions that
2518 useless_type_conversion_p places for restrict qualification
2519 of pointer types should not apply here.
2520 Substitution later will only substitute to allowed places. */
2521 if (CONVERT_EXPR_CODE_P (subcode)
2522 && POINTER_TYPE_P (TREE_TYPE (lhs))
2523 && POINTER_TYPE_P (TREE_TYPE (op0))
2524 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2525 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2526 && TYPE_MODE (TREE_TYPE (lhs))
2527 == TYPE_MODE (TREE_TYPE (op0)))
2531 fold_unary_ignore_overflow_loc (loc, subcode,
2532 gimple_expr_type (stmt), op0);
2535 case GIMPLE_BINARY_RHS:
2537 /* Handle binary operators that can appear in GIMPLE form. */
2538 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2539 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2541 /* Translate &x + CST into an invariant form suitable for
2542 further propagation. */
2543 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2544 && TREE_CODE (op0) == ADDR_EXPR
2545 && TREE_CODE (op1) == INTEGER_CST)
2547 tree off = fold_convert (ptr_type_node, op1);
2548 return build_fold_addr_expr_loc
2550 fold_build2 (MEM_REF,
2551 TREE_TYPE (TREE_TYPE (op0)),
2552 unshare_expr (op0), off));
2555 return fold_binary_loc (loc, subcode,
2556 gimple_expr_type (stmt), op0, op1);
2559 case GIMPLE_TERNARY_RHS:
2561 /* Handle ternary operators that can appear in GIMPLE form. */
2562 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2563 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2564 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2566 /* Fold embedded expressions in ternary codes. */
2567 if ((subcode == COND_EXPR
2568 || subcode == VEC_COND_EXPR)
2569 && COMPARISON_CLASS_P (op0))
2571 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2572 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2573 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2574 TREE_TYPE (op0), op00, op01);
2579 return fold_ternary_loc (loc, subcode,
2580 gimple_expr_type (stmt), op0, op1, op2);
2592 if (gimple_call_internal_p (stmt))
2593 /* No folding yet for these functions. */
2596 fn = (*valueize) (gimple_call_fn (stmt));
2597 if (TREE_CODE (fn) == ADDR_EXPR
2598 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2599 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2601 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2604 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2605 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2606 call = build_call_array_loc (loc,
2607 gimple_call_return_type (stmt),
2608 fn, gimple_call_num_args (stmt), args);
2609 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2611 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2612 STRIP_NOPS (retval);
2623 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2624 Returns NULL_TREE if folding to a constant is not possible, otherwise
2625 returns a constant according to is_gimple_min_invariant. */
2628 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2630 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2631 if (res && is_gimple_min_invariant (res))
2637 /* The following set of functions are supposed to fold references using
2638 their constant initializers. */
2640 static tree fold_ctor_reference (tree type, tree ctor,
2641 unsigned HOST_WIDE_INT offset,
2642 unsigned HOST_WIDE_INT size);
2644 /* See if we can find constructor defining value of BASE.
2645 When we know the consructor with constant offset (such as
2646 base is array[40] and we do know constructor of array), then
2647 BIT_OFFSET is adjusted accordingly.
2649 As a special case, return error_mark_node when constructor
2650 is not explicitly available, but it is known to be zero
2651 such as 'static const int a;'. */
2653 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2654 tree (*valueize)(tree))
2656 HOST_WIDE_INT bit_offset2, size, max_size;
2657 if (TREE_CODE (base) == MEM_REF)
2659 if (!integer_zerop (TREE_OPERAND (base, 1)))
2661 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2663 *bit_offset += (mem_ref_offset (base).low
2668 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2669 base = valueize (TREE_OPERAND (base, 0));
2670 if (!base || TREE_CODE (base) != ADDR_EXPR)
2672 base = TREE_OPERAND (base, 0);
2675 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2676 DECL_INITIAL. If BASE is a nested reference into another
2677 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2678 the inner reference. */
2679 switch (TREE_CODE (base))
2682 if (!const_value_known_p (base))
2687 if (!DECL_INITIAL (base)
2688 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2689 return error_mark_node;
2690 return DECL_INITIAL (base);
2694 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2695 if (max_size == -1 || size != max_size)
2697 *bit_offset += bit_offset2;
2698 return get_base_constructor (base, bit_offset, valueize);
2709 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2710 to the memory at bit OFFSET.
2712 We do only simple job of folding byte accesses. */
2715 fold_string_cst_ctor_reference (tree type, tree ctor,
2716 unsigned HOST_WIDE_INT offset,
2717 unsigned HOST_WIDE_INT size)
2719 if (INTEGRAL_TYPE_P (type)
2720 && (TYPE_MODE (type)
2721 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2722 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2724 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2725 && size == BITS_PER_UNIT
2726 && !(offset % BITS_PER_UNIT))
2728 offset /= BITS_PER_UNIT;
2729 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2730 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2733 const char a[20]="hello";
2736 might lead to offset greater than string length. In this case we
2737 know value is either initialized to 0 or out of bounds. Return 0
2739 return build_zero_cst (type);
2744 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2745 SIZE to the memory at bit OFFSET. */
2748 fold_array_ctor_reference (tree type, tree ctor,
2749 unsigned HOST_WIDE_INT offset,
2750 unsigned HOST_WIDE_INT size)
2752 unsigned HOST_WIDE_INT cnt;
2754 double_int low_bound, elt_size;
2755 double_int index, max_index;
2756 double_int access_index;
2757 tree domain_type = NULL_TREE;
2758 HOST_WIDE_INT inner_offset;
2760 /* Compute low bound and elt size. */
2761 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2762 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2763 if (domain_type && TYPE_MIN_VALUE (domain_type))
2765 /* Static constructors for variably sized objects makes no sense. */
2766 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2767 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2770 low_bound = double_int_zero;
2771 /* Static constructors for variably sized objects makes no sense. */
2772 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2775 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2778 /* We can handle only constantly sized accesses that are known to not
2779 be larger than size of array element. */
2780 if (!TYPE_SIZE_UNIT (type)
2781 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2782 || double_int_cmp (elt_size,
2783 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2786 /* Compute the array index we look for. */
2787 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2788 elt_size, TRUNC_DIV_EXPR);
2789 access_index = double_int_add (access_index, low_bound);
2791 /* And offset within the access. */
2792 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2794 /* See if the array field is large enough to span whole access. We do not
2795 care to fold accesses spanning multiple array indexes. */
2796 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2799 index = double_int_sub (low_bound, double_int_one);
2800 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2802 /* Array constructor might explicitely set index, or specify range
2803 or leave index NULL meaning that it is next index after previous
2807 if (TREE_CODE (cfield) == INTEGER_CST)
2808 max_index = index = tree_to_double_int (cfield);
2811 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2812 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2813 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2817 max_index = index = double_int_add (index, double_int_one);
2819 /* Do we have match? */
2820 if (double_int_cmp (access_index, index, 1) >= 0
2821 && double_int_cmp (access_index, max_index, 1) <= 0)
2822 return fold_ctor_reference (type, cval, inner_offset, size);
2824 /* When memory is not explicitely mentioned in constructor,
2825 it is 0 (or out of range). */
2826 return build_zero_cst (type);
2829 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2830 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2833 fold_nonarray_ctor_reference (tree type, tree ctor,
2834 unsigned HOST_WIDE_INT offset,
2835 unsigned HOST_WIDE_INT size)
2837 unsigned HOST_WIDE_INT cnt;
2840 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2843 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2844 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2845 tree field_size = DECL_SIZE (cfield);
2846 double_int bitoffset;
2847 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2848 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2849 double_int bitoffset_end, access_end;
2851 /* Variable sized objects in static constructors makes no sense,
2852 but field_size can be NULL for flexible array members. */
2853 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2854 && TREE_CODE (byte_offset) == INTEGER_CST
2855 && (field_size != NULL_TREE
2856 ? TREE_CODE (field_size) == INTEGER_CST
2857 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2859 /* Compute bit offset of the field. */
2860 bitoffset = double_int_add (tree_to_double_int (field_offset),
2861 double_int_mul (byte_offset_cst,
2862 bits_per_unit_cst));
2863 /* Compute bit offset where the field ends. */
2864 if (field_size != NULL_TREE)
2865 bitoffset_end = double_int_add (bitoffset,
2866 tree_to_double_int (field_size));
2868 bitoffset_end = double_int_zero;
2870 access_end = double_int_add (uhwi_to_double_int (offset),
2871 uhwi_to_double_int (size));
2873 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2874 [BITOFFSET, BITOFFSET_END)? */
2875 if (double_int_cmp (access_end, bitoffset, 0) > 0
2876 && (field_size == NULL_TREE
2877 || double_int_cmp (uhwi_to_double_int (offset),
2878 bitoffset_end, 0) < 0))
2880 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2882 /* We do have overlap. Now see if field is large enough to
2883 cover the access. Give up for accesses spanning multiple
2885 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2887 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2889 return fold_ctor_reference (type, cval,
2890 double_int_to_uhwi (inner_offset), size);
2893 /* When memory is not explicitely mentioned in constructor, it is 0. */
2894 return build_zero_cst (type);
2897 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2898 to the memory at bit OFFSET. */
2901 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2902 unsigned HOST_WIDE_INT size)
2906 /* We found the field with exact match. */
2907 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2909 return canonicalize_constructor_val (ctor);
2911 /* We are at the end of walk, see if we can view convert the
2913 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2914 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2915 && operand_equal_p (TYPE_SIZE (type),
2916 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2918 ret = canonicalize_constructor_val (ctor);
2919 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2924 if (TREE_CODE (ctor) == STRING_CST)
2925 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2926 if (TREE_CODE (ctor) == CONSTRUCTOR)
2929 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2930 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2931 return fold_array_ctor_reference (type, ctor, offset, size);
2933 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2939 /* Return the tree representing the element referenced by T if T is an
2940 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2941 names using VALUEIZE. Return NULL_TREE otherwise. */
2944 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2946 tree ctor, idx, base;
2947 HOST_WIDE_INT offset, size, max_size;
2950 if (TREE_THIS_VOLATILE (t))
2953 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2954 return get_symbol_constant_value (t);
2956 tem = fold_read_from_constant_string (t);
2960 switch (TREE_CODE (t))
2963 case ARRAY_RANGE_REF:
2964 /* Constant indexes are handled well by get_base_constructor.
2965 Only special case variable offsets.
2966 FIXME: This code can't handle nested references with variable indexes
2967 (they will be handled only by iteration of ccp). Perhaps we can bring
2968 get_ref_base_and_extent here and make it use a valueize callback. */
2969 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2971 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2972 && host_integerp (idx, 0))
2974 tree low_bound, unit_size;
2976 /* If the resulting bit-offset is constant, track it. */
2977 if ((low_bound = array_ref_low_bound (t),
2978 host_integerp (low_bound, 0))
2979 && (unit_size = array_ref_element_size (t),
2980 host_integerp (unit_size, 1)))
2982 offset = TREE_INT_CST_LOW (idx);
2983 offset -= TREE_INT_CST_LOW (low_bound);
2984 offset *= TREE_INT_CST_LOW (unit_size);
2985 offset *= BITS_PER_UNIT;
2987 base = TREE_OPERAND (t, 0);
2988 ctor = get_base_constructor (base, &offset, valueize);
2989 /* Empty constructor. Always fold to 0. */
2990 if (ctor == error_mark_node)
2991 return build_zero_cst (TREE_TYPE (t));
2992 /* Out of bound array access. Value is undefined,
2996 /* We can not determine ctor. */
2999 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3000 TREE_INT_CST_LOW (unit_size)
3008 case TARGET_MEM_REF:
3010 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3011 ctor = get_base_constructor (base, &offset, valueize);
3013 /* Empty constructor. Always fold to 0. */
3014 if (ctor == error_mark_node)
3015 return build_zero_cst (TREE_TYPE (t));
3016 /* We do not know precise address. */
3017 if (max_size == -1 || max_size != size)
3019 /* We can not determine ctor. */
3023 /* Out of bound array access. Value is undefined, but don't fold. */
3027 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3032 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3033 if (c && TREE_CODE (c) == COMPLEX_CST)
3034 return fold_build1_loc (EXPR_LOCATION (t),
3035 TREE_CODE (t), TREE_TYPE (t), c);
3047 fold_const_aggregate_ref (tree t)
3049 return fold_const_aggregate_ref_1 (t, NULL);
3052 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3053 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3054 KNOWN_BINFO carries the binfo describing the true type of
3055 OBJ_TYPE_REF_OBJECT(REF). */
3058 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3060 unsigned HOST_WIDE_INT offset, size;
3063 v = BINFO_VTABLE (known_binfo);
3064 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3068 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3070 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3071 v = TREE_OPERAND (v, 0);
3076 if (TREE_CODE (v) != ADDR_EXPR)
3078 v = TREE_OPERAND (v, 0);
3080 if (TREE_CODE (v) != VAR_DECL
3081 || !DECL_VIRTUAL_P (v)
3082 || !DECL_INITIAL (v)
3083 || DECL_INITIAL (v) == error_mark_node)
3085 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3086 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3087 offset += token * size;
3088 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3090 if (!fn || integer_zerop (fn))
3092 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3093 || TREE_CODE (fn) == FDESC_EXPR);
3094 fn = TREE_OPERAND (fn, 0);
3095 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3097 /* When cgraph node is missing and function is not public, we cannot
3098 devirtualize. This can happen in WHOPR when the actual method
3099 ends up in other partition, because we found devirtualization
3100 possibility too late. */
3101 if (!can_refer_decl_in_current_unit_p (fn))
3104 /* Make sure we create a cgraph node for functions we'll reference.
3105 They can be non-existent if the reference comes from an entry
3106 of an external vtable for example. */
3107 cgraph_get_create_node (fn);
3112 /* Return true iff VAL is a gimple expression that is known to be
3113 non-negative. Restricted to floating-point inputs. */
3116 gimple_val_nonnegative_real_p (tree val)
3120 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3122 /* Use existing logic for non-gimple trees. */
3123 if (tree_expr_nonnegative_p (val))
3126 if (TREE_CODE (val) != SSA_NAME)
3129 /* Currently we look only at the immediately defining statement
3130 to make this determination, since recursion on defining
3131 statements of operands can lead to quadratic behavior in the
3132 worst case. This is expected to catch almost all occurrences
3133 in practice. It would be possible to implement limited-depth
3134 recursion if important cases are lost. Alternatively, passes
3135 that need this information (such as the pow/powi lowering code
3136 in the cse_sincos pass) could be revised to provide it through
3137 dataflow propagation. */
3139 def_stmt = SSA_NAME_DEF_STMT (val);
3141 if (is_gimple_assign (def_stmt))
3145 /* See fold-const.c:tree_expr_nonnegative_p for additional
3146 cases that could be handled with recursion. */
3148 switch (gimple_assign_rhs_code (def_stmt))
3151 /* Always true for floating-point operands. */
3155 /* True if the two operands are identical (since we are
3156 restricted to floating-point inputs). */
3157 op0 = gimple_assign_rhs1 (def_stmt);
3158 op1 = gimple_assign_rhs2 (def_stmt);
3161 || operand_equal_p (op0, op1, 0))
3168 else if (is_gimple_call (def_stmt))
3170 tree fndecl = gimple_call_fndecl (def_stmt);
3172 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3176 switch (DECL_FUNCTION_CODE (fndecl))
3178 CASE_FLT_FN (BUILT_IN_ACOS):
3179 CASE_FLT_FN (BUILT_IN_ACOSH):
3180 CASE_FLT_FN (BUILT_IN_CABS):
3181 CASE_FLT_FN (BUILT_IN_COSH):
3182 CASE_FLT_FN (BUILT_IN_ERFC):
3183 CASE_FLT_FN (BUILT_IN_EXP):
3184 CASE_FLT_FN (BUILT_IN_EXP10):
3185 CASE_FLT_FN (BUILT_IN_EXP2):
3186 CASE_FLT_FN (BUILT_IN_FABS):
3187 CASE_FLT_FN (BUILT_IN_FDIM):
3188 CASE_FLT_FN (BUILT_IN_HYPOT):
3189 CASE_FLT_FN (BUILT_IN_POW10):
3192 CASE_FLT_FN (BUILT_IN_SQRT):
3193 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3194 nonnegative inputs. */
3195 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3200 CASE_FLT_FN (BUILT_IN_POWI):
3201 /* True if the second argument is an even integer. */
3202 arg1 = gimple_call_arg (def_stmt, 1);
3204 if (TREE_CODE (arg1) == INTEGER_CST
3205 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3210 CASE_FLT_FN (BUILT_IN_POW):
3211 /* True if the second argument is an even integer-valued
3213 arg1 = gimple_call_arg (def_stmt, 1);
3215 if (TREE_CODE (arg1) == REAL_CST)
3220 c = TREE_REAL_CST (arg1);
3221 n = real_to_integer (&c);
3225 REAL_VALUE_TYPE cint;
3226 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3227 if (real_identical (&c, &cint))