1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
39 get_symbol_constant_value (tree sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
45 tree val = DECL_INITIAL (sym);
49 if (is_gimple_min_invariant (val))
51 if (TREE_CODE (val) == ADDR_EXPR)
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
83 may_propagate_address_into_dereference (tree addr, tree deref)
85 gcc_assert (INDIRECT_REF_P (deref)
86 && TREE_CODE (addr) == ADDR_EXPR);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
112 is the desired result type.
114 LOC is the location of the original expression. */
117 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
119 bool allow_negative_idx)
121 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
122 tree array_type, elt_type, elt_size;
125 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
126 measured in units of the size of elements type) from that ARRAY_REF).
127 We can't do anything if either is variable.
129 The case we handle here is *(&A[N]+O). */
130 if (TREE_CODE (base) == ARRAY_REF)
132 tree low_bound = array_ref_low_bound (base);
134 elt_offset = TREE_OPERAND (base, 1);
135 if (TREE_CODE (low_bound) != INTEGER_CST
136 || TREE_CODE (elt_offset) != INTEGER_CST)
139 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
140 base = TREE_OPERAND (base, 0);
143 /* Ignore stupid user tricks of indexing non-array variables. */
144 array_type = TREE_TYPE (base);
145 if (TREE_CODE (array_type) != ARRAY_TYPE)
147 elt_type = TREE_TYPE (array_type);
148 if (!useless_type_conversion_p (orig_type, elt_type))
151 /* Use signed size type for intermediate computation on the index. */
152 idx_type = ssizetype;
154 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
155 element type (so we can use the alignment if it's not constant).
156 Otherwise, compute the offset as an index by using a division. If the
157 division isn't exact, then don't do anything. */
158 elt_size = TYPE_SIZE_UNIT (elt_type);
161 if (integer_zerop (offset))
163 if (TREE_CODE (elt_size) != INTEGER_CST)
164 elt_size = size_int (TYPE_ALIGN (elt_type));
166 idx = build_int_cst (idx_type, 0);
170 unsigned HOST_WIDE_INT lquo, lrem;
171 HOST_WIDE_INT hquo, hrem;
174 /* The final array offset should be signed, so we need
175 to sign-extend the (possibly pointer) offset here
176 and use signed division. */
177 soffset = double_int_sext (tree_to_double_int (offset),
178 TYPE_PRECISION (TREE_TYPE (offset)));
179 if (TREE_CODE (elt_size) != INTEGER_CST
180 || div_and_round_double (TRUNC_DIV_EXPR, 0,
181 soffset.low, soffset.high,
182 TREE_INT_CST_LOW (elt_size),
183 TREE_INT_CST_HIGH (elt_size),
184 &lquo, &hquo, &lrem, &hrem)
188 idx = build_int_cst_wide (idx_type, lquo, hquo);
191 /* Assume the low bound is zero. If there is a domain type, get the
192 low bound, if any, convert the index into that type, and add the
194 min_idx = build_int_cst (idx_type, 0);
195 domain_type = TYPE_DOMAIN (array_type);
198 idx_type = domain_type;
199 if (TYPE_MIN_VALUE (idx_type))
200 min_idx = TYPE_MIN_VALUE (idx_type);
202 min_idx = fold_convert (idx_type, min_idx);
204 if (TREE_CODE (min_idx) != INTEGER_CST)
207 elt_offset = fold_convert (idx_type, elt_offset);
210 if (!integer_zerop (min_idx))
211 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
212 if (!integer_zerop (elt_offset))
213 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
215 /* Make sure to possibly truncate late after offsetting. */
216 idx = fold_convert (idx_type, idx);
218 /* We don't want to construct access past array bounds. For example
221 should not be simplified into (*c)[14] or tree-vrp will
222 give false warnings. The same is true for
223 struct A { long x; char d[0]; } *a;
225 which should be not folded to &a->d[-8]. */
227 && TYPE_MAX_VALUE (domain_type)
228 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
230 tree up_bound = TYPE_MAX_VALUE (domain_type);
232 if (tree_int_cst_lt (up_bound, idx)
233 /* Accesses after the end of arrays of size 0 (gcc
234 extension) and 1 are likely intentional ("struct
236 && compare_tree_int (up_bound, 1) > 0)
240 && TYPE_MIN_VALUE (domain_type))
242 if (!allow_negative_idx
243 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
244 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
247 else if (!allow_negative_idx
248 && compare_tree_int (idx, 0) < 0)
252 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
253 SET_EXPR_LOCATION (t, loc);
259 /* Attempt to fold *(S+O) to S.X.
260 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
261 is the desired result type.
263 LOC is the location of the original expression. */
266 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
267 tree base, tree offset, tree orig_type)
269 tree f, t, field_type, tail_array_field, field_offset;
273 if (TREE_CODE (record_type) != RECORD_TYPE
274 && TREE_CODE (record_type) != UNION_TYPE
275 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
278 /* Short-circuit silly cases. */
279 if (useless_type_conversion_p (record_type, orig_type))
282 tail_array_field = NULL_TREE;
283 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
287 if (TREE_CODE (f) != FIELD_DECL)
289 if (DECL_BIT_FIELD (f))
292 if (!DECL_FIELD_OFFSET (f))
294 field_offset = byte_position (f);
295 if (TREE_CODE (field_offset) != INTEGER_CST)
298 /* ??? Java creates "interesting" fields for representing base classes.
299 They have no name, and have no context. With no context, we get into
300 trouble with nonoverlapping_component_refs_p. Skip them. */
301 if (!DECL_FIELD_CONTEXT (f))
304 /* The previous array field isn't at the end. */
305 tail_array_field = NULL_TREE;
307 /* Check to see if this offset overlaps with the field. */
308 cmp = tree_int_cst_compare (field_offset, offset);
312 field_type = TREE_TYPE (f);
314 /* Here we exactly match the offset being checked. If the types match,
315 then we can return that field. */
317 && useless_type_conversion_p (orig_type, field_type))
319 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
323 /* Don't care about offsets into the middle of scalars. */
324 if (!AGGREGATE_TYPE_P (field_type))
327 /* Check for array at the end of the struct. This is often
328 used as for flexible array members. We should be able to
329 turn this into an array access anyway. */
330 if (TREE_CODE (field_type) == ARRAY_TYPE)
331 tail_array_field = f;
333 /* Check the end of the field against the offset. */
334 if (!DECL_SIZE_UNIT (f)
335 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
337 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
338 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
341 /* If we matched, then set offset to the displacement into
343 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
344 SET_EXPR_LOCATION (new_base, loc);
346 /* Recurse to possibly find the match. */
347 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
348 f == TYPE_FIELDS (record_type));
351 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
357 if (!tail_array_field)
360 f = tail_array_field;
361 field_type = TREE_TYPE (f);
362 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
364 /* If we get here, we've got an aggregate field, and a possibly
365 nonzero offset into them. Recurse and hope for a valid match. */
366 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
367 SET_EXPR_LOCATION (base, loc);
369 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
370 f == TYPE_FIELDS (record_type));
373 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
377 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
378 or BASE[index] or by combination of those.
380 LOC is the location of original expression.
382 Before attempting the conversion strip off existing ADDR_EXPRs and
383 handled component refs. */
386 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
393 if (TREE_CODE (base) != ADDR_EXPR)
396 base = TREE_OPERAND (base, 0);
398 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
399 so it needs to be removed and new COMPONENT_REF constructed.
400 The wrong COMPONENT_REF are often constructed by folding the
401 (type *)&object within the expression (type *)&object+offset */
402 if (handled_component_p (base))
404 HOST_WIDE_INT sub_offset, size, maxsize;
406 newbase = get_ref_base_and_extent (base, &sub_offset,
408 gcc_assert (newbase);
411 && !(sub_offset & (BITS_PER_UNIT - 1)))
415 offset = int_const_binop (PLUS_EXPR, offset,
416 build_int_cst (TREE_TYPE (offset),
417 sub_offset / BITS_PER_UNIT), 1);
420 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
421 && integer_zerop (offset))
423 type = TREE_TYPE (base);
425 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
427 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
432 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
433 or &BASE[index] or by combination of those.
435 LOC is the location of the original expression.
437 Before attempting the conversion strip off existing component refs. */
440 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
445 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
446 && POINTER_TYPE_P (orig_type));
448 t = maybe_fold_offset_to_reference (loc, addr, offset,
449 TREE_TYPE (orig_type));
455 /* For __builtin_object_size to function correctly we need to
456 make sure not to fold address arithmetic so that we change
457 reference from one array to another. This would happen for
460 struct X { char s1[10]; char s2[10] } s;
461 char *foo (void) { return &s.s2[-4]; }
463 where we need to avoid generating &s.s1[6]. As the C and
464 C++ frontends create different initial trees
465 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
466 sophisticated comparisons here. Note that checking for the
467 condition after the fact is easier than trying to avoid doing
470 if (TREE_CODE (orig) == ADDR_EXPR)
471 orig = TREE_OPERAND (orig, 0);
472 if ((TREE_CODE (orig) == ARRAY_REF
473 || (TREE_CODE (orig) == COMPONENT_REF
474 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
475 && (TREE_CODE (t) == ARRAY_REF
476 || TREE_CODE (t) == COMPONENT_REF)
477 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
478 ? TREE_OPERAND (orig, 0) : orig,
479 TREE_CODE (t) == ARRAY_REF
480 ? TREE_OPERAND (t, 0) : t, 0))
483 ptr_type = build_pointer_type (TREE_TYPE (t));
484 if (!useless_type_conversion_p (orig_type, ptr_type))
486 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
492 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
493 Return the simplified expression, or NULL if nothing could be done. */
496 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
499 bool volatile_p = TREE_THIS_VOLATILE (expr);
500 location_t loc = EXPR_LOCATION (expr);
502 /* We may well have constructed a double-nested PLUS_EXPR via multiple
503 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
504 are sometimes added. */
506 STRIP_TYPE_NOPS (base);
507 TREE_OPERAND (expr, 0) = base;
509 /* One possibility is that the address reduces to a string constant. */
510 t = fold_read_from_constant_string (expr);
514 /* Add in any offset from a POINTER_PLUS_EXPR. */
515 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
519 offset2 = TREE_OPERAND (base, 1);
520 if (TREE_CODE (offset2) != INTEGER_CST)
522 base = TREE_OPERAND (base, 0);
524 offset = fold_convert (sizetype,
525 int_const_binop (PLUS_EXPR, offset, offset2, 1));
528 if (TREE_CODE (base) == ADDR_EXPR)
530 tree base_addr = base;
532 /* Strip the ADDR_EXPR. */
533 base = TREE_OPERAND (base, 0);
535 /* Fold away CONST_DECL to its value, if the type is scalar. */
536 if (TREE_CODE (base) == CONST_DECL
537 && is_gimple_min_invariant (DECL_INITIAL (base)))
538 return DECL_INITIAL (base);
540 /* If there is no offset involved simply return the folded base. */
541 if (integer_zerop (offset))
544 /* Try folding *(&B+O) to B.X. */
545 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
549 /* Preserve volatileness of the original expression.
550 We can end up with a plain decl here which is shared
551 and we shouldn't mess with its flags. */
553 TREE_THIS_VOLATILE (t) = volatile_p;
559 /* We can get here for out-of-range string constant accesses,
560 such as "_"[3]. Bail out of the entire substitution search
561 and arrange for the entire statement to be replaced by a
562 call to __builtin_trap. In all likelihood this will all be
563 constant-folded away, but in the meantime we can't leave with
564 something that get_expr_operands can't understand. */
568 if (TREE_CODE (t) == ADDR_EXPR
569 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
571 /* FIXME: Except that this causes problems elsewhere with dead
572 code not being deleted, and we die in the rtl expanders
573 because we failed to remove some ssa_name. In the meantime,
575 /* FIXME2: This condition should be signaled by
576 fold_read_from_constant_string directly, rather than
577 re-checking for it here. */
578 return integer_zero_node;
581 /* Try folding *(B+O) to B->X. Still an improvement. */
582 if (POINTER_TYPE_P (TREE_TYPE (base)))
584 t = maybe_fold_offset_to_reference (loc, base, offset,
591 /* Otherwise we had an offset that we could not simplify. */
596 /* A quaint feature extant in our address arithmetic is that there
597 can be hidden type changes here. The type of the result need
598 not be the same as the type of the input pointer.
600 What we're after here is an expression of the form
601 (T *)(&array + const)
602 where array is OP0, const is OP1, RES_TYPE is T and
603 the cast doesn't actually exist, but is implicit in the
604 type of the POINTER_PLUS_EXPR. We'd like to turn this into
606 which may be able to propagate further. */
609 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
614 /* The first operand should be an ADDR_EXPR. */
615 if (TREE_CODE (op0) != ADDR_EXPR)
617 op0 = TREE_OPERAND (op0, 0);
619 /* It had better be a constant. */
620 if (TREE_CODE (op1) != INTEGER_CST)
622 /* Or op0 should now be A[0] and the non-constant offset defined
623 via a multiplication by the array element size. */
624 if (TREE_CODE (op0) == ARRAY_REF
625 && integer_zerop (TREE_OPERAND (op0, 1))
626 && TREE_CODE (op1) == SSA_NAME
627 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
629 gimple offset_def = SSA_NAME_DEF_STMT (op1);
630 if (!is_gimple_assign (offset_def))
633 /* As we will end up creating a variable index array access
634 in the outermost array dimension make sure there isn't
635 a more inner array that the index could overflow to. */
636 if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
639 /* Do not build array references of something that we can't
640 see the true number of array dimensions for. */
641 if (!DECL_P (TREE_OPERAND (op0, 0))
642 && !handled_component_p (TREE_OPERAND (op0, 0)))
645 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
646 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
647 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
648 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
649 return build_fold_addr_expr
650 (build4 (ARRAY_REF, TREE_TYPE (op0),
651 TREE_OPERAND (op0, 0),
652 gimple_assign_rhs1 (offset_def),
653 TREE_OPERAND (op0, 2),
654 TREE_OPERAND (op0, 3)));
655 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
656 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
657 return build_fold_addr_expr
658 (build4 (ARRAY_REF, TREE_TYPE (op0),
659 TREE_OPERAND (op0, 0),
661 TREE_OPERAND (op0, 2),
662 TREE_OPERAND (op0, 3)));
667 /* If the first operand is an ARRAY_REF, expand it so that we can fold
668 the offset into it. */
669 while (TREE_CODE (op0) == ARRAY_REF)
671 tree array_obj = TREE_OPERAND (op0, 0);
672 tree array_idx = TREE_OPERAND (op0, 1);
673 tree elt_type = TREE_TYPE (op0);
674 tree elt_size = TYPE_SIZE_UNIT (elt_type);
677 if (TREE_CODE (array_idx) != INTEGER_CST)
679 if (TREE_CODE (elt_size) != INTEGER_CST)
682 /* Un-bias the index by the min index of the array type. */
683 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
686 min_idx = TYPE_MIN_VALUE (min_idx);
689 if (TREE_CODE (min_idx) != INTEGER_CST)
692 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
693 if (!integer_zerop (min_idx))
694 array_idx = int_const_binop (MINUS_EXPR, array_idx,
699 /* Convert the index to a byte offset. */
700 array_idx = fold_convert (sizetype, array_idx);
701 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
703 /* Update the operands for the next round, or for folding. */
704 op1 = int_const_binop (PLUS_EXPR,
709 ptd_type = TREE_TYPE (res_type);
710 /* If we want a pointer to void, reconstruct the reference from the
711 array element type. A pointer to that can be trivially converted
712 to void *. This happens as we fold (void *)(ptr p+ off). */
713 if (VOID_TYPE_P (ptd_type)
714 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
715 ptd_type = TREE_TYPE (TREE_TYPE (op0));
717 /* At which point we can try some of the same things as for indirects. */
718 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
720 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
724 t = build1 (ADDR_EXPR, res_type, t);
725 SET_EXPR_LOCATION (t, loc);
731 /* Subroutine of fold_stmt. We perform several simplifications of the
732 memory reference tree EXPR and make sure to re-gimplify them properly
733 after propagation of constant addresses. IS_LHS is true if the
734 reference is supposed to be an lvalue. */
737 maybe_fold_reference (tree expr, bool is_lhs)
741 if (TREE_CODE (expr) == ARRAY_REF
744 tree tem = fold_read_from_constant_string (expr);
749 /* ??? We might want to open-code the relevant remaining cases
750 to avoid using the generic fold. */
751 if (handled_component_p (*t)
752 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
754 tree tem = fold (*t);
759 while (handled_component_p (*t))
760 t = &TREE_OPERAND (*t, 0);
762 if (TREE_CODE (*t) == INDIRECT_REF)
764 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
766 /* Avoid folding *"abc" = 5 into 'a' = 5. */
767 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
771 /* If we had a good reason for propagating the address here,
772 make sure we end up with valid gimple. See PR34989. */
773 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
778 tem = maybe_fold_reference (expr, is_lhs);
787 tree tem = get_symbol_constant_value (*t);
789 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
791 *t = unshare_expr (tem);
792 tem = maybe_fold_reference (expr, is_lhs);
803 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
804 replacement rhs for the statement or NULL_TREE if no simplification
805 could be made. It is assumed that the operands have been previously
809 fold_gimple_assign (gimple_stmt_iterator *si)
811 gimple stmt = gsi_stmt (*si);
812 enum tree_code subcode = gimple_assign_rhs_code (stmt);
813 location_t loc = gimple_location (stmt);
815 tree result = NULL_TREE;
817 switch (get_gimple_rhs_class (subcode))
819 case GIMPLE_SINGLE_RHS:
821 tree rhs = gimple_assign_rhs1 (stmt);
823 /* Try to fold a conditional expression. */
824 if (TREE_CODE (rhs) == COND_EXPR)
826 tree op0 = COND_EXPR_COND (rhs);
829 location_t cond_loc = EXPR_LOCATION (rhs);
831 if (COMPARISON_CLASS_P (op0))
833 fold_defer_overflow_warnings ();
834 tem = fold_binary_loc (cond_loc,
835 TREE_CODE (op0), TREE_TYPE (op0),
836 TREE_OPERAND (op0, 0),
837 TREE_OPERAND (op0, 1));
838 /* This is actually a conditional expression, not a GIMPLE
839 conditional statement, however, the valid_gimple_rhs_p
840 test still applies. */
841 set = (tem && is_gimple_condexpr (tem)
842 && valid_gimple_rhs_p (tem));
843 fold_undefer_overflow_warnings (set, stmt, 0);
845 else if (is_gimple_min_invariant (op0))
854 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
855 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
858 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
859 return maybe_fold_tmr (rhs);
861 else if (REFERENCE_CLASS_P (rhs))
862 return maybe_fold_reference (rhs, false);
864 else if (TREE_CODE (rhs) == ADDR_EXPR)
866 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
868 result = fold_convert (TREE_TYPE (rhs),
869 build_fold_addr_expr_loc (loc, tem));
872 else if (TREE_CODE (rhs) == CONSTRUCTOR
873 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
874 && (CONSTRUCTOR_NELTS (rhs)
875 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
877 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
881 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
882 if (TREE_CODE (val) != INTEGER_CST
883 && TREE_CODE (val) != REAL_CST
884 && TREE_CODE (val) != FIXED_CST)
887 return build_vector_from_ctor (TREE_TYPE (rhs),
888 CONSTRUCTOR_ELTS (rhs));
891 else if (DECL_P (rhs))
892 return unshare_expr (get_symbol_constant_value (rhs));
894 /* If we couldn't fold the RHS, hand over to the generic
896 if (result == NULL_TREE)
899 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
900 that may have been added by fold, and "useless" type
901 conversions that might now be apparent due to propagation. */
902 STRIP_USELESS_TYPE_CONVERSION (result);
904 if (result != rhs && valid_gimple_rhs_p (result))
911 case GIMPLE_UNARY_RHS:
913 tree rhs = gimple_assign_rhs1 (stmt);
915 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
918 /* If the operation was a conversion do _not_ mark a
919 resulting constant with TREE_OVERFLOW if the original
920 constant was not. These conversions have implementation
921 defined behavior and retaining the TREE_OVERFLOW flag
922 here would confuse later passes such as VRP. */
923 if (CONVERT_EXPR_CODE_P (subcode)
924 && TREE_CODE (result) == INTEGER_CST
925 && TREE_CODE (rhs) == INTEGER_CST)
926 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
928 STRIP_USELESS_TYPE_CONVERSION (result);
929 if (valid_gimple_rhs_p (result))
932 else if (CONVERT_EXPR_CODE_P (subcode)
933 && POINTER_TYPE_P (gimple_expr_type (stmt))
934 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
936 tree type = gimple_expr_type (stmt);
937 tree t = maybe_fold_offset_to_address (loc,
938 gimple_assign_rhs1 (stmt),
939 integer_zero_node, type);
946 case GIMPLE_BINARY_RHS:
947 /* Try to fold pointer addition. */
948 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
950 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
951 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
953 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
954 if (!useless_type_conversion_p
955 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
956 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
958 result = maybe_fold_stmt_addition (gimple_location (stmt),
960 gimple_assign_rhs1 (stmt),
961 gimple_assign_rhs2 (stmt));
965 result = fold_binary_loc (loc, subcode,
966 TREE_TYPE (gimple_assign_lhs (stmt)),
967 gimple_assign_rhs1 (stmt),
968 gimple_assign_rhs2 (stmt));
972 STRIP_USELESS_TYPE_CONVERSION (result);
973 if (valid_gimple_rhs_p (result))
976 /* Fold might have produced non-GIMPLE, so if we trust it blindly
977 we lose canonicalization opportunities. Do not go again
978 through fold here though, or the same non-GIMPLE will be
980 if (commutative_tree_code (subcode)
981 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
982 gimple_assign_rhs2 (stmt), false))
983 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
984 gimple_assign_rhs2 (stmt),
985 gimple_assign_rhs1 (stmt));
989 case GIMPLE_TERNARY_RHS:
990 result = fold_ternary_loc (loc, subcode,
991 TREE_TYPE (gimple_assign_lhs (stmt)),
992 gimple_assign_rhs1 (stmt),
993 gimple_assign_rhs2 (stmt),
994 gimple_assign_rhs3 (stmt));
998 STRIP_USELESS_TYPE_CONVERSION (result);
999 if (valid_gimple_rhs_p (result))
1002 /* Fold might have produced non-GIMPLE, so if we trust it blindly
1003 we lose canonicalization opportunities. Do not go again
1004 through fold here though, or the same non-GIMPLE will be
1006 if (commutative_ternary_tree_code (subcode)
1007 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1008 gimple_assign_rhs2 (stmt), false))
1009 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
1010 gimple_assign_rhs2 (stmt),
1011 gimple_assign_rhs1 (stmt),
1012 gimple_assign_rhs3 (stmt));
1016 case GIMPLE_INVALID_RHS:
1023 /* Attempt to fold a conditional statement. Return true if any changes were
1024 made. We only attempt to fold the condition expression, and do not perform
1025 any transformation that would require alteration of the cfg. It is
1026 assumed that the operands have been previously folded. */
1029 fold_gimple_cond (gimple stmt)
1031 tree result = fold_binary_loc (gimple_location (stmt),
1032 gimple_cond_code (stmt),
1034 gimple_cond_lhs (stmt),
1035 gimple_cond_rhs (stmt));
1039 STRIP_USELESS_TYPE_CONVERSION (result);
1040 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1042 gimple_cond_set_condition_from_tree (stmt, result);
1050 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1051 RHS of an assignment. Insert the necessary statements before
1052 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1053 is replaced. If the call is expected to produces a result, then it
1054 is replaced by an assignment of the new RHS to the result variable.
1055 If the result is to be ignored, then the call is replaced by a
1056 GIMPLE_NOP. A proper VDEF chain is retained by making the first
1057 VUSE and the last VDEF of the whole sequence be the same as the replaced
1058 statement and using new SSA names for stores in between. */
1061 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1064 tree tmp = NULL_TREE; /* Silence warning. */
1065 gimple stmt, new_stmt;
1066 gimple_stmt_iterator i;
1067 gimple_seq stmts = gimple_seq_alloc();
1068 struct gimplify_ctx gctx;
1070 gimple laststore = NULL;
1073 stmt = gsi_stmt (*si_p);
1075 gcc_assert (is_gimple_call (stmt));
1077 lhs = gimple_call_lhs (stmt);
1078 reaching_vuse = gimple_vuse (stmt);
1080 push_gimplify_context (&gctx);
1082 if (lhs == NULL_TREE)
1083 gimplify_and_add (expr, &stmts);
1085 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1087 pop_gimplify_context (NULL);
1089 if (gimple_has_location (stmt))
1090 annotate_all_with_location (stmts, gimple_location (stmt));
1092 /* The replacement can expose previously unreferenced variables. */
1093 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1097 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1100 new_stmt = gsi_stmt (i);
1101 find_new_referenced_vars (new_stmt);
1102 mark_symbols_for_renaming (new_stmt);
1103 /* If the new statement has a VUSE, update it with exact SSA name we
1104 know will reach this one. */
1105 if (gimple_vuse (new_stmt))
1107 /* If we've also seen a previous store create a new VDEF for
1108 the latter one, and make that the new reaching VUSE. */
1111 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1112 gimple_set_vdef (laststore, reaching_vuse);
1113 update_stmt (laststore);
1116 gimple_set_vuse (new_stmt, reaching_vuse);
1117 gimple_set_modified (new_stmt, true);
1119 if (gimple_assign_single_p (new_stmt)
1120 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
1122 laststore = new_stmt;
1127 if (lhs == NULL_TREE)
1129 /* If we replace a call without LHS that has a VDEF and our new
1130 sequence ends with a store we must make that store have the same
1131 vdef in order not to break the sequencing. This can happen
1132 for instance when folding memcpy calls into assignments. */
1133 if (gimple_vdef (stmt) && laststore)
1135 gimple_set_vdef (laststore, gimple_vdef (stmt));
1136 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1137 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1138 update_stmt (laststore);
1142 unlink_stmt_vdef (stmt);
1143 release_defs (stmt);
1151 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1154 if (laststore && is_gimple_reg (lhs))
1156 gimple_set_vdef (laststore, gimple_vdef (stmt));
1157 update_stmt (laststore);
1158 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1159 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1164 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1165 gimple_set_vdef (laststore, reaching_vuse);
1166 update_stmt (laststore);
1169 new_stmt = gimple_build_assign (lhs, tmp);
1170 if (!is_gimple_reg (tmp))
1171 gimple_set_vuse (new_stmt, reaching_vuse);
1172 if (!is_gimple_reg (lhs))
1174 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1175 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1176 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1180 gimple_set_location (new_stmt, gimple_location (stmt));
1181 gsi_replace (si_p, new_stmt, false);
1184 /* Return the string length, maximum string length or maximum value of
1186 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1187 is not NULL and, for TYPE == 0, its value is not equal to the length
1188 we determine or if we are unable to determine the length or value,
1189 return false. VISITED is a bitmap of visited variables.
1190 TYPE is 0 if string length should be returned, 1 for maximum string
1191 length and 2 for maximum value ARG can have. */
1194 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1199 if (TREE_CODE (arg) != SSA_NAME)
1201 if (TREE_CODE (arg) == COND_EXPR)
1202 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1203 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1204 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1205 else if (TREE_CODE (arg) == ADDR_EXPR
1206 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1207 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1209 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1210 if (TREE_CODE (aop0) == INDIRECT_REF
1211 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1212 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1213 length, visited, type);
1219 if (TREE_CODE (val) != INTEGER_CST
1220 || tree_int_cst_sgn (val) < 0)
1224 val = c_strlen (arg, 1);
1232 if (TREE_CODE (*length) != INTEGER_CST
1233 || TREE_CODE (val) != INTEGER_CST)
1236 if (tree_int_cst_lt (*length, val))
1240 else if (simple_cst_equal (val, *length) != 1)
1248 /* If we were already here, break the infinite cycle. */
1249 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1251 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1254 def_stmt = SSA_NAME_DEF_STMT (var);
1256 switch (gimple_code (def_stmt))
1259 /* The RHS of the statement defining VAR must either have a
1260 constant length or come from another SSA_NAME with a constant
1262 if (gimple_assign_single_p (def_stmt)
1263 || gimple_assign_unary_nop_p (def_stmt))
1265 tree rhs = gimple_assign_rhs1 (def_stmt);
1266 return get_maxval_strlen (rhs, length, visited, type);
1272 /* All the arguments of the PHI node must have the same constant
1276 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1278 tree arg = gimple_phi_arg (def_stmt, i)->def;
1280 /* If this PHI has itself as an argument, we cannot
1281 determine the string length of this argument. However,
1282 if we can find a constant string length for the other
1283 PHI args then we can still be sure that this is a
1284 constant string length. So be optimistic and just
1285 continue with the next argument. */
1286 if (arg == gimple_phi_result (def_stmt))
1289 if (!get_maxval_strlen (arg, length, visited, type))
1301 /* Fold builtin call in statement STMT. Returns a simplified tree.
1302 We may return a non-constant expression, including another call
1303 to a different function and with different arguments, e.g.,
1304 substituting memcpy for strcpy when the string length is known.
1305 Note that some builtins expand into inline code that may not
1306 be valid in GIMPLE. Callers must take care. */
1309 gimple_fold_builtin (gimple stmt)
1311 tree result, val[3];
1317 location_t loc = gimple_location (stmt);
1319 gcc_assert (is_gimple_call (stmt));
1321 ignore = (gimple_call_lhs (stmt) == NULL);
1323 /* First try the generic builtin folder. If that succeeds, return the
1325 result = fold_call_stmt (stmt, ignore);
1329 STRIP_NOPS (result);
1333 /* Ignore MD builtins. */
1334 callee = gimple_call_fndecl (stmt);
1335 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1338 /* If the builtin could not be folded, and it has no argument list,
1340 nargs = gimple_call_num_args (stmt);
1344 /* Limit the work only for builtins we know how to simplify. */
1345 switch (DECL_FUNCTION_CODE (callee))
1347 case BUILT_IN_STRLEN:
1348 case BUILT_IN_FPUTS:
1349 case BUILT_IN_FPUTS_UNLOCKED:
1353 case BUILT_IN_STRCPY:
1354 case BUILT_IN_STRNCPY:
1358 case BUILT_IN_MEMCPY_CHK:
1359 case BUILT_IN_MEMPCPY_CHK:
1360 case BUILT_IN_MEMMOVE_CHK:
1361 case BUILT_IN_MEMSET_CHK:
1362 case BUILT_IN_STRNCPY_CHK:
1366 case BUILT_IN_STRCPY_CHK:
1367 case BUILT_IN_STPCPY_CHK:
1371 case BUILT_IN_SNPRINTF_CHK:
1372 case BUILT_IN_VSNPRINTF_CHK:
1380 if (arg_idx >= nargs)
1383 /* Try to use the dataflow information gathered by the CCP process. */
1384 visited = BITMAP_ALLOC (NULL);
1385 bitmap_clear (visited);
1387 memset (val, 0, sizeof (val));
1388 a = gimple_call_arg (stmt, arg_idx);
1389 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1390 val[arg_idx] = NULL_TREE;
1392 BITMAP_FREE (visited);
1395 switch (DECL_FUNCTION_CODE (callee))
1397 case BUILT_IN_STRLEN:
1398 if (val[0] && nargs == 1)
1401 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1403 /* If the result is not a valid gimple value, or not a cast
1404 of a valid gimple value, then we can not use the result. */
1405 if (is_gimple_val (new_val)
1406 || (is_gimple_cast (new_val)
1407 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1412 case BUILT_IN_STRCPY:
1413 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1414 result = fold_builtin_strcpy (loc, callee,
1415 gimple_call_arg (stmt, 0),
1416 gimple_call_arg (stmt, 1),
1420 case BUILT_IN_STRNCPY:
1421 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1422 result = fold_builtin_strncpy (loc, callee,
1423 gimple_call_arg (stmt, 0),
1424 gimple_call_arg (stmt, 1),
1425 gimple_call_arg (stmt, 2),
1429 case BUILT_IN_FPUTS:
1431 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1432 gimple_call_arg (stmt, 1),
1433 ignore, false, val[0]);
1436 case BUILT_IN_FPUTS_UNLOCKED:
1438 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1439 gimple_call_arg (stmt, 1),
1440 ignore, true, val[0]);
1443 case BUILT_IN_MEMCPY_CHK:
1444 case BUILT_IN_MEMPCPY_CHK:
1445 case BUILT_IN_MEMMOVE_CHK:
1446 case BUILT_IN_MEMSET_CHK:
1447 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1448 result = fold_builtin_memory_chk (loc, callee,
1449 gimple_call_arg (stmt, 0),
1450 gimple_call_arg (stmt, 1),
1451 gimple_call_arg (stmt, 2),
1452 gimple_call_arg (stmt, 3),
1454 DECL_FUNCTION_CODE (callee));
1457 case BUILT_IN_STRCPY_CHK:
1458 case BUILT_IN_STPCPY_CHK:
1459 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1460 result = fold_builtin_stxcpy_chk (loc, callee,
1461 gimple_call_arg (stmt, 0),
1462 gimple_call_arg (stmt, 1),
1463 gimple_call_arg (stmt, 2),
1465 DECL_FUNCTION_CODE (callee));
1468 case BUILT_IN_STRNCPY_CHK:
1469 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1470 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1471 gimple_call_arg (stmt, 1),
1472 gimple_call_arg (stmt, 2),
1473 gimple_call_arg (stmt, 3),
1477 case BUILT_IN_SNPRINTF_CHK:
1478 case BUILT_IN_VSNPRINTF_CHK:
1479 if (val[1] && is_gimple_val (val[1]))
1480 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1481 DECL_FUNCTION_CODE (callee));
1488 if (result && ignore)
1489 result = fold_ignored_result (result);
1493 /* Return the first of the base binfos of BINFO that has virtual functions. */
1496 get_first_base_binfo_with_virtuals (tree binfo)
1501 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1502 if (BINFO_VIRTUALS (base_binfo))
1509 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1510 it is found or NULL_TREE if it is not. */
1513 get_base_binfo_for_type (tree binfo, tree type)
1517 tree res = NULL_TREE;
1519 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1520 if (TREE_TYPE (base_binfo) == type)
1529 /* Return a binfo describing the part of object referenced by expression REF.
1530 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1531 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1532 a simple declaration, indirect reference or an SSA_NAME. If the function
1533 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1534 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1535 Otherwise the first non-artificial field declaration or the base declaration
1536 will be examined to get the encapsulating type. */
1539 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1543 if (TREE_CODE (ref) == COMPONENT_REF)
1546 tree binfo, base_binfo;
1547 tree field = TREE_OPERAND (ref, 1);
1549 if (!DECL_ARTIFICIAL (field))
1551 tree type = TREE_TYPE (field);
1552 if (TREE_CODE (type) == RECORD_TYPE)
1553 return TYPE_BINFO (type);
1558 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1559 binfo = TYPE_BINFO (par_type);
1561 || BINFO_N_BASE_BINFOS (binfo) == 0)
1564 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1565 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1569 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1571 /* Get descendant binfo. */
1574 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1577 ref = TREE_OPERAND (ref, 0);
1579 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1580 return TYPE_BINFO (TREE_TYPE (ref));
1581 else if (known_binfo
1582 && (TREE_CODE (ref) == SSA_NAME
1583 || TREE_CODE (ref) == INDIRECT_REF))
1590 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1591 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1592 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1595 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1600 v = BINFO_VIRTUALS (known_binfo);
1604 i += (TARGET_VTABLE_USES_DESCRIPTORS
1605 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1609 fndecl = TREE_VALUE (v);
1610 return build_fold_addr_expr (fndecl);
1614 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1615 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1616 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1617 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1618 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1621 gimple_fold_obj_type_ref (tree ref, tree known_type)
1623 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1624 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1627 if (TREE_CODE (obj) == ADDR_EXPR)
1628 obj = TREE_OPERAND (obj, 0);
1630 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1633 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1634 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1640 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1641 The statement may be replaced by another statement, e.g., if the call
1642 simplifies to a constant value. Return true if any changes were made.
1643 It is assumed that the operands have been previously folded. */
1646 fold_gimple_call (gimple_stmt_iterator *gsi)
1648 gimple stmt = gsi_stmt (*gsi);
1650 tree callee = gimple_call_fndecl (stmt);
1652 /* Check for builtins that CCP can handle using information not
1653 available in the generic fold routines. */
1654 if (callee && DECL_BUILT_IN (callee))
1656 tree result = gimple_fold_builtin (stmt);
1660 if (!update_call_from_tree (gsi, result))
1661 gimplify_and_update_call_from_tree (gsi, result);
1667 /* ??? Should perhaps do this in fold proper. However, doing it
1668 there requires that we create a new CALL_EXPR, and that requires
1669 copying EH region info to the new node. Easier to just do it
1670 here where we can just smash the call operand. */
1671 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1672 callee = gimple_call_fn (stmt);
1673 if (TREE_CODE (callee) == OBJ_TYPE_REF
1674 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1678 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1681 gimple_call_set_fn (stmt, t);
1690 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1691 distinguishes both cases. */
1694 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1696 bool changed = false;
1697 gimple stmt = gsi_stmt (*gsi);
1700 /* Fold the main computation performed by the statement. */
1701 switch (gimple_code (stmt))
1705 unsigned old_num_ops = gimple_num_ops (stmt);
1706 tree new_rhs = fold_gimple_assign (gsi);
1707 tree lhs = gimple_assign_lhs (stmt);
1709 && !useless_type_conversion_p (TREE_TYPE (lhs),
1710 TREE_TYPE (new_rhs)))
1711 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1714 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1716 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1723 changed |= fold_gimple_cond (stmt);
1727 /* Fold *& in call arguments. */
1728 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1729 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1731 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1734 gimple_call_set_arg (stmt, i, tmp);
1738 /* The entire statement may be replaced in this case. */
1740 changed |= fold_gimple_call (gsi);
1744 /* Fold *& in asm operands. */
1745 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1747 tree link = gimple_asm_output_op (stmt, i);
1748 tree op = TREE_VALUE (link);
1749 if (REFERENCE_CLASS_P (op)
1750 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1752 TREE_VALUE (link) = op;
1756 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1758 tree link = gimple_asm_input_op (stmt, i);
1759 tree op = TREE_VALUE (link);
1760 if (REFERENCE_CLASS_P (op)
1761 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1763 TREE_VALUE (link) = op;
1772 stmt = gsi_stmt (*gsi);
1774 /* Fold *& on the lhs. */
1775 if (gimple_has_lhs (stmt))
1777 tree lhs = gimple_get_lhs (stmt);
1778 if (lhs && REFERENCE_CLASS_P (lhs))
1780 tree new_lhs = maybe_fold_reference (lhs, true);
1783 gimple_set_lhs (stmt, new_lhs);
1792 /* Fold the statement pointed to by GSI. In some cases, this function may
1793 replace the whole statement with a new one. Returns true iff folding
1795 The statement pointed to by GSI should be in valid gimple form but may
1796 be in unfolded state as resulting from for example constant propagation
1797 which can produce *&x = 0. */
1800 fold_stmt (gimple_stmt_iterator *gsi)
1802 return fold_stmt_1 (gsi, false);
1805 /* Perform the minimal folding on statement STMT. Only operations like
1806 *&x created by constant propagation are handled. The statement cannot
1807 be replaced with a new one. Return true if the statement was
1808 changed, false otherwise.
1809 The statement STMT should be in valid gimple form but may
1810 be in unfolded state as resulting from for example constant propagation
1811 which can produce *&x = 0. */
1814 fold_stmt_inplace (gimple stmt)
1816 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1817 bool changed = fold_stmt_1 (&gsi, true);
1818 gcc_assert (gsi_stmt (gsi) == stmt);
1822 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1823 if EXPR is null or we don't know how.
1824 If non-null, the result always has boolean type. */
1827 canonicalize_bool (tree expr, bool invert)
1833 if (integer_nonzerop (expr))
1834 return boolean_false_node;
1835 else if (integer_zerop (expr))
1836 return boolean_true_node;
1837 else if (TREE_CODE (expr) == SSA_NAME)
1838 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1839 build_int_cst (TREE_TYPE (expr), 0));
1840 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1841 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1843 TREE_OPERAND (expr, 0),
1844 TREE_OPERAND (expr, 1));
1850 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1852 if (integer_nonzerop (expr))
1853 return boolean_true_node;
1854 else if (integer_zerop (expr))
1855 return boolean_false_node;
1856 else if (TREE_CODE (expr) == SSA_NAME)
1857 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1858 build_int_cst (TREE_TYPE (expr), 0));
1859 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1860 return fold_build2 (TREE_CODE (expr),
1862 TREE_OPERAND (expr, 0),
1863 TREE_OPERAND (expr, 1));
1869 /* Check to see if a boolean expression EXPR is logically equivalent to the
1870 comparison (OP1 CODE OP2). Check for various identities involving
1874 same_bool_comparison_p (const_tree expr, enum tree_code code,
1875 const_tree op1, const_tree op2)
1879 /* The obvious case. */
1880 if (TREE_CODE (expr) == code
1881 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1882 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1885 /* Check for comparing (name, name != 0) and the case where expr
1886 is an SSA_NAME with a definition matching the comparison. */
1887 if (TREE_CODE (expr) == SSA_NAME
1888 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1890 if (operand_equal_p (expr, op1, 0))
1891 return ((code == NE_EXPR && integer_zerop (op2))
1892 || (code == EQ_EXPR && integer_nonzerop (op2)));
1893 s = SSA_NAME_DEF_STMT (expr);
1894 if (is_gimple_assign (s)
1895 && gimple_assign_rhs_code (s) == code
1896 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1897 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1901 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1902 of name is a comparison, recurse. */
1903 if (TREE_CODE (op1) == SSA_NAME
1904 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1906 s = SSA_NAME_DEF_STMT (op1);
1907 if (is_gimple_assign (s)
1908 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1910 enum tree_code c = gimple_assign_rhs_code (s);
1911 if ((c == NE_EXPR && integer_zerop (op2))
1912 || (c == EQ_EXPR && integer_nonzerop (op2)))
1913 return same_bool_comparison_p (expr, c,
1914 gimple_assign_rhs1 (s),
1915 gimple_assign_rhs2 (s));
1916 if ((c == EQ_EXPR && integer_zerop (op2))
1917 || (c == NE_EXPR && integer_nonzerop (op2)))
1918 return same_bool_comparison_p (expr,
1919 invert_tree_comparison (c, false),
1920 gimple_assign_rhs1 (s),
1921 gimple_assign_rhs2 (s));
1927 /* Check to see if two boolean expressions OP1 and OP2 are logically
1931 same_bool_result_p (const_tree op1, const_tree op2)
1933 /* Simple cases first. */
1934 if (operand_equal_p (op1, op2, 0))
1937 /* Check the cases where at least one of the operands is a comparison.
1938 These are a bit smarter than operand_equal_p in that they apply some
1939 identifies on SSA_NAMEs. */
1940 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1941 && same_bool_comparison_p (op1, TREE_CODE (op2),
1942 TREE_OPERAND (op2, 0),
1943 TREE_OPERAND (op2, 1)))
1945 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1946 && same_bool_comparison_p (op2, TREE_CODE (op1),
1947 TREE_OPERAND (op1, 0),
1948 TREE_OPERAND (op1, 1)))
1955 /* Forward declarations for some mutually recursive functions. */
1958 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1959 enum tree_code code2, tree op2a, tree op2b);
1961 and_var_with_comparison (tree var, bool invert,
1962 enum tree_code code2, tree op2a, tree op2b);
1964 and_var_with_comparison_1 (gimple stmt,
1965 enum tree_code code2, tree op2a, tree op2b);
1967 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1968 enum tree_code code2, tree op2a, tree op2b);
1970 or_var_with_comparison (tree var, bool invert,
1971 enum tree_code code2, tree op2a, tree op2b);
1973 or_var_with_comparison_1 (gimple stmt,
1974 enum tree_code code2, tree op2a, tree op2b);
1976 /* Helper function for and_comparisons_1: try to simplify the AND of the
1977 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1978 If INVERT is true, invert the value of the VAR before doing the AND.
1979 Return NULL_EXPR if we can't simplify this to a single expression. */
1982 and_var_with_comparison (tree var, bool invert,
1983 enum tree_code code2, tree op2a, tree op2b)
1986 gimple stmt = SSA_NAME_DEF_STMT (var);
1988 /* We can only deal with variables whose definitions are assignments. */
1989 if (!is_gimple_assign (stmt))
1992 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1993 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1994 Then we only have to consider the simpler non-inverted cases. */
1996 t = or_var_with_comparison_1 (stmt,
1997 invert_tree_comparison (code2, false),
2000 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
2001 return canonicalize_bool (t, invert);
2004 /* Try to simplify the AND of the ssa variable defined by the assignment
2005 STMT with the comparison specified by (OP2A CODE2 OP2B).
2006 Return NULL_EXPR if we can't simplify this to a single expression. */
2009 and_var_with_comparison_1 (gimple stmt,
2010 enum tree_code code2, tree op2a, tree op2b)
2012 tree var = gimple_assign_lhs (stmt);
2013 tree true_test_var = NULL_TREE;
2014 tree false_test_var = NULL_TREE;
2015 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2017 /* Check for identities like (var AND (var == 0)) => false. */
2018 if (TREE_CODE (op2a) == SSA_NAME
2019 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2021 if ((code2 == NE_EXPR && integer_zerop (op2b))
2022 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2024 true_test_var = op2a;
2025 if (var == true_test_var)
2028 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2029 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2031 false_test_var = op2a;
2032 if (var == false_test_var)
2033 return boolean_false_node;
2037 /* If the definition is a comparison, recurse on it. */
2038 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2040 tree t = and_comparisons_1 (innercode,
2041 gimple_assign_rhs1 (stmt),
2042 gimple_assign_rhs2 (stmt),
2050 /* If the definition is an AND or OR expression, we may be able to
2051 simplify by reassociating. */
2052 if (innercode == TRUTH_AND_EXPR
2053 || innercode == TRUTH_OR_EXPR
2054 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2055 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2057 tree inner1 = gimple_assign_rhs1 (stmt);
2058 tree inner2 = gimple_assign_rhs2 (stmt);
2061 tree partial = NULL_TREE;
2062 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
2064 /* Check for boolean identities that don't require recursive examination
2066 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
2067 inner1 AND (inner1 OR inner2) => inner1
2068 !inner1 AND (inner1 AND inner2) => false
2069 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
2070 Likewise for similar cases involving inner2. */
2071 if (inner1 == true_test_var)
2072 return (is_and ? var : inner1);
2073 else if (inner2 == true_test_var)
2074 return (is_and ? var : inner2);
2075 else if (inner1 == false_test_var)
2077 ? boolean_false_node
2078 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2079 else if (inner2 == false_test_var)
2081 ? boolean_false_node
2082 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2084 /* Next, redistribute/reassociate the AND across the inner tests.
2085 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2086 if (TREE_CODE (inner1) == SSA_NAME
2087 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2088 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2089 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2090 gimple_assign_rhs1 (s),
2091 gimple_assign_rhs2 (s),
2092 code2, op2a, op2b)))
2094 /* Handle the AND case, where we are reassociating:
2095 (inner1 AND inner2) AND (op2a code2 op2b)
2097 If the partial result t is a constant, we win. Otherwise
2098 continue on to try reassociating with the other inner test. */
2101 if (integer_onep (t))
2103 else if (integer_zerop (t))
2104 return boolean_false_node;
2107 /* Handle the OR case, where we are redistributing:
2108 (inner1 OR inner2) AND (op2a code2 op2b)
2109 => (t OR (inner2 AND (op2a code2 op2b))) */
2112 if (integer_onep (t))
2113 return boolean_true_node;
2115 /* Save partial result for later. */
2120 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2121 if (TREE_CODE (inner2) == SSA_NAME
2122 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2123 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2124 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2125 gimple_assign_rhs1 (s),
2126 gimple_assign_rhs2 (s),
2127 code2, op2a, op2b)))
2129 /* Handle the AND case, where we are reassociating:
2130 (inner1 AND inner2) AND (op2a code2 op2b)
2131 => (inner1 AND t) */
2134 if (integer_onep (t))
2136 else if (integer_zerop (t))
2137 return boolean_false_node;
2140 /* Handle the OR case. where we are redistributing:
2141 (inner1 OR inner2) AND (op2a code2 op2b)
2142 => (t OR (inner1 AND (op2a code2 op2b)))
2143 => (t OR partial) */
2146 if (integer_onep (t))
2147 return boolean_true_node;
2150 /* We already got a simplification for the other
2151 operand to the redistributed OR expression. The
2152 interesting case is when at least one is false.
2153 Or, if both are the same, we can apply the identity
2155 if (integer_zerop (partial))
2157 else if (integer_zerop (t))
2159 else if (same_bool_result_p (t, partial))
2168 /* Try to simplify the AND of two comparisons defined by
2169 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2170 If this can be done without constructing an intermediate value,
2171 return the resulting tree; otherwise NULL_TREE is returned.
2172 This function is deliberately asymmetric as it recurses on SSA_DEFs
2173 in the first comparison but not the second. */
2176 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2177 enum tree_code code2, tree op2a, tree op2b)
2179 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2180 if (operand_equal_p (op1a, op2a, 0)
2181 && operand_equal_p (op1b, op2b, 0))
2183 tree t = combine_comparisons (UNKNOWN_LOCATION,
2184 TRUTH_ANDIF_EXPR, code1, code2,
2185 boolean_type_node, op1a, op1b);
2190 /* Likewise the swapped case of the above. */
2191 if (operand_equal_p (op1a, op2b, 0)
2192 && operand_equal_p (op1b, op2a, 0))
2194 tree t = combine_comparisons (UNKNOWN_LOCATION,
2195 TRUTH_ANDIF_EXPR, code1,
2196 swap_tree_comparison (code2),
2197 boolean_type_node, op1a, op1b);
2202 /* If both comparisons are of the same value against constants, we might
2203 be able to merge them. */
2204 if (operand_equal_p (op1a, op2a, 0)
2205 && TREE_CODE (op1b) == INTEGER_CST
2206 && TREE_CODE (op2b) == INTEGER_CST)
2208 int cmp = tree_int_cst_compare (op1b, op2b);
2210 /* If we have (op1a == op1b), we should either be able to
2211 return that or FALSE, depending on whether the constant op1b
2212 also satisfies the other comparison against op2b. */
2213 if (code1 == EQ_EXPR)
2219 case EQ_EXPR: val = (cmp == 0); break;
2220 case NE_EXPR: val = (cmp != 0); break;
2221 case LT_EXPR: val = (cmp < 0); break;
2222 case GT_EXPR: val = (cmp > 0); break;
2223 case LE_EXPR: val = (cmp <= 0); break;
2224 case GE_EXPR: val = (cmp >= 0); break;
2225 default: done = false;
2230 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2232 return boolean_false_node;
2235 /* Likewise if the second comparison is an == comparison. */
2236 else if (code2 == EQ_EXPR)
2242 case EQ_EXPR: val = (cmp == 0); break;
2243 case NE_EXPR: val = (cmp != 0); break;
2244 case LT_EXPR: val = (cmp > 0); break;
2245 case GT_EXPR: val = (cmp < 0); break;
2246 case LE_EXPR: val = (cmp >= 0); break;
2247 case GE_EXPR: val = (cmp <= 0); break;
2248 default: done = false;
2253 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2255 return boolean_false_node;
2259 /* Same business with inequality tests. */
2260 else if (code1 == NE_EXPR)
2265 case EQ_EXPR: val = (cmp != 0); break;
2266 case NE_EXPR: val = (cmp == 0); break;
2267 case LT_EXPR: val = (cmp >= 0); break;
2268 case GT_EXPR: val = (cmp <= 0); break;
2269 case LE_EXPR: val = (cmp > 0); break;
2270 case GE_EXPR: val = (cmp < 0); break;
2275 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2277 else if (code2 == NE_EXPR)
2282 case EQ_EXPR: val = (cmp == 0); break;
2283 case NE_EXPR: val = (cmp != 0); break;
2284 case LT_EXPR: val = (cmp <= 0); break;
2285 case GT_EXPR: val = (cmp >= 0); break;
2286 case LE_EXPR: val = (cmp < 0); break;
2287 case GE_EXPR: val = (cmp > 0); break;
2292 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2295 /* Chose the more restrictive of two < or <= comparisons. */
2296 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2297 && (code2 == LT_EXPR || code2 == LE_EXPR))
2299 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2300 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2302 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2305 /* Likewise chose the more restrictive of two > or >= comparisons. */
2306 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2307 && (code2 == GT_EXPR || code2 == GE_EXPR))
2309 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2310 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2312 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2315 /* Check for singleton ranges. */
2317 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2318 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2319 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2321 /* Check for disjoint ranges. */
2323 && (code1 == LT_EXPR || code1 == LE_EXPR)
2324 && (code2 == GT_EXPR || code2 == GE_EXPR))
2325 return boolean_false_node;
2327 && (code1 == GT_EXPR || code1 == GE_EXPR)
2328 && (code2 == LT_EXPR || code2 == LE_EXPR))
2329 return boolean_false_node;
2332 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2333 NAME's definition is a truth value. See if there are any simplifications
2334 that can be done against the NAME's definition. */
2335 if (TREE_CODE (op1a) == SSA_NAME
2336 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2337 && (integer_zerop (op1b) || integer_onep (op1b)))
2339 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2340 || (code1 == NE_EXPR && integer_onep (op1b)));
2341 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2342 switch (gimple_code (stmt))
2345 /* Try to simplify by copy-propagating the definition. */
2346 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2349 /* If every argument to the PHI produces the same result when
2350 ANDed with the second comparison, we win.
2351 Do not do this unless the type is bool since we need a bool
2352 result here anyway. */
2353 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2355 tree result = NULL_TREE;
2357 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2359 tree arg = gimple_phi_arg_def (stmt, i);
2361 /* If this PHI has itself as an argument, ignore it.
2362 If all the other args produce the same result,
2364 if (arg == gimple_phi_result (stmt))
2366 else if (TREE_CODE (arg) == INTEGER_CST)
2368 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2371 result = boolean_false_node;
2372 else if (!integer_zerop (result))
2376 result = fold_build2 (code2, boolean_type_node,
2378 else if (!same_bool_comparison_p (result,
2382 else if (TREE_CODE (arg) == SSA_NAME)
2384 tree temp = and_var_with_comparison (arg, invert,
2390 else if (!same_bool_result_p (result, temp))
2406 /* Try to simplify the AND of two comparisons, specified by
2407 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2408 If this can be simplified to a single expression (without requiring
2409 introducing more SSA variables to hold intermediate values),
2410 return the resulting tree. Otherwise return NULL_TREE.
2411 If the result expression is non-null, it has boolean type. */
2414 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2415 enum tree_code code2, tree op2a, tree op2b)
2417 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2421 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2424 /* Helper function for or_comparisons_1: try to simplify the OR of the
2425 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2426 If INVERT is true, invert the value of VAR before doing the OR.
2427 Return NULL_EXPR if we can't simplify this to a single expression. */
2430 or_var_with_comparison (tree var, bool invert,
2431 enum tree_code code2, tree op2a, tree op2b)
2434 gimple stmt = SSA_NAME_DEF_STMT (var);
2436 /* We can only deal with variables whose definitions are assignments. */
2437 if (!is_gimple_assign (stmt))
2440 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2441 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2442 Then we only have to consider the simpler non-inverted cases. */
2444 t = and_var_with_comparison_1 (stmt,
2445 invert_tree_comparison (code2, false),
2448 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2449 return canonicalize_bool (t, invert);
2452 /* Try to simplify the OR of the ssa variable defined by the assignment
2453 STMT with the comparison specified by (OP2A CODE2 OP2B).
2454 Return NULL_EXPR if we can't simplify this to a single expression. */
2457 or_var_with_comparison_1 (gimple stmt,
2458 enum tree_code code2, tree op2a, tree op2b)
2460 tree var = gimple_assign_lhs (stmt);
2461 tree true_test_var = NULL_TREE;
2462 tree false_test_var = NULL_TREE;
2463 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2465 /* Check for identities like (var OR (var != 0)) => true . */
2466 if (TREE_CODE (op2a) == SSA_NAME
2467 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2469 if ((code2 == NE_EXPR && integer_zerop (op2b))
2470 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2472 true_test_var = op2a;
2473 if (var == true_test_var)
2476 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2477 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2479 false_test_var = op2a;
2480 if (var == false_test_var)
2481 return boolean_true_node;
2485 /* If the definition is a comparison, recurse on it. */
2486 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2488 tree t = or_comparisons_1 (innercode,
2489 gimple_assign_rhs1 (stmt),
2490 gimple_assign_rhs2 (stmt),
2498 /* If the definition is an AND or OR expression, we may be able to
2499 simplify by reassociating. */
2500 if (innercode == TRUTH_AND_EXPR
2501 || innercode == TRUTH_OR_EXPR
2502 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2503 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2505 tree inner1 = gimple_assign_rhs1 (stmt);
2506 tree inner2 = gimple_assign_rhs2 (stmt);
2509 tree partial = NULL_TREE;
2510 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2512 /* Check for boolean identities that don't require recursive examination
2514 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2515 inner1 OR (inner1 AND inner2) => inner1
2516 !inner1 OR (inner1 OR inner2) => true
2517 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2519 if (inner1 == true_test_var)
2520 return (is_or ? var : inner1);
2521 else if (inner2 == true_test_var)
2522 return (is_or ? var : inner2);
2523 else if (inner1 == false_test_var)
2526 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2527 else if (inner2 == false_test_var)
2530 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2532 /* Next, redistribute/reassociate the OR across the inner tests.
2533 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2534 if (TREE_CODE (inner1) == SSA_NAME
2535 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2536 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2537 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2538 gimple_assign_rhs1 (s),
2539 gimple_assign_rhs2 (s),
2540 code2, op2a, op2b)))
2542 /* Handle the OR case, where we are reassociating:
2543 (inner1 OR inner2) OR (op2a code2 op2b)
2545 If the partial result t is a constant, we win. Otherwise
2546 continue on to try reassociating with the other inner test. */
2547 if (innercode == TRUTH_OR_EXPR)
2549 if (integer_onep (t))
2550 return boolean_true_node;
2551 else if (integer_zerop (t))
2555 /* Handle the AND case, where we are redistributing:
2556 (inner1 AND inner2) OR (op2a code2 op2b)
2557 => (t AND (inner2 OR (op2a code op2b))) */
2560 if (integer_zerop (t))
2561 return boolean_false_node;
2563 /* Save partial result for later. */
2568 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2569 if (TREE_CODE (inner2) == SSA_NAME
2570 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2572 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2573 gimple_assign_rhs1 (s),
2574 gimple_assign_rhs2 (s),
2575 code2, op2a, op2b)))
2577 /* Handle the OR case, where we are reassociating:
2578 (inner1 OR inner2) OR (op2a code2 op2b)
2580 if (innercode == TRUTH_OR_EXPR)
2582 if (integer_zerop (t))
2584 else if (integer_onep (t))
2585 return boolean_true_node;
2588 /* Handle the AND case, where we are redistributing:
2589 (inner1 AND inner2) OR (op2a code2 op2b)
2590 => (t AND (inner1 OR (op2a code2 op2b)))
2591 => (t AND partial) */
2594 if (integer_zerop (t))
2595 return boolean_false_node;
2598 /* We already got a simplification for the other
2599 operand to the redistributed AND expression. The
2600 interesting case is when at least one is true.
2601 Or, if both are the same, we can apply the identity
2602 (x AND x) == true. */
2603 if (integer_onep (partial))
2605 else if (integer_onep (t))
2607 else if (same_bool_result_p (t, partial))
2608 return boolean_true_node;
2616 /* Try to simplify the OR of two comparisons defined by
2617 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2618 If this can be done without constructing an intermediate value,
2619 return the resulting tree; otherwise NULL_TREE is returned.
2620 This function is deliberately asymmetric as it recurses on SSA_DEFs
2621 in the first comparison but not the second. */
2624 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2625 enum tree_code code2, tree op2a, tree op2b)
2627 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2628 if (operand_equal_p (op1a, op2a, 0)
2629 && operand_equal_p (op1b, op2b, 0))
2631 tree t = combine_comparisons (UNKNOWN_LOCATION,
2632 TRUTH_ORIF_EXPR, code1, code2,
2633 boolean_type_node, op1a, op1b);
2638 /* Likewise the swapped case of the above. */
2639 if (operand_equal_p (op1a, op2b, 0)
2640 && operand_equal_p (op1b, op2a, 0))
2642 tree t = combine_comparisons (UNKNOWN_LOCATION,
2643 TRUTH_ORIF_EXPR, code1,
2644 swap_tree_comparison (code2),
2645 boolean_type_node, op1a, op1b);
2650 /* If both comparisons are of the same value against constants, we might
2651 be able to merge them. */
2652 if (operand_equal_p (op1a, op2a, 0)
2653 && TREE_CODE (op1b) == INTEGER_CST
2654 && TREE_CODE (op2b) == INTEGER_CST)
2656 int cmp = tree_int_cst_compare (op1b, op2b);
2658 /* If we have (op1a != op1b), we should either be able to
2659 return that or TRUE, depending on whether the constant op1b
2660 also satisfies the other comparison against op2b. */
2661 if (code1 == NE_EXPR)
2667 case EQ_EXPR: val = (cmp == 0); break;
2668 case NE_EXPR: val = (cmp != 0); break;
2669 case LT_EXPR: val = (cmp < 0); break;
2670 case GT_EXPR: val = (cmp > 0); break;
2671 case LE_EXPR: val = (cmp <= 0); break;
2672 case GE_EXPR: val = (cmp >= 0); break;
2673 default: done = false;
2678 return boolean_true_node;
2680 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2683 /* Likewise if the second comparison is a != comparison. */
2684 else if (code2 == NE_EXPR)
2690 case EQ_EXPR: val = (cmp == 0); break;
2691 case NE_EXPR: val = (cmp != 0); break;
2692 case LT_EXPR: val = (cmp > 0); break;
2693 case GT_EXPR: val = (cmp < 0); break;
2694 case LE_EXPR: val = (cmp >= 0); break;
2695 case GE_EXPR: val = (cmp <= 0); break;
2696 default: done = false;
2701 return boolean_true_node;
2703 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2707 /* See if an equality test is redundant with the other comparison. */
2708 else if (code1 == EQ_EXPR)
2713 case EQ_EXPR: val = (cmp == 0); break;
2714 case NE_EXPR: val = (cmp != 0); break;
2715 case LT_EXPR: val = (cmp < 0); break;
2716 case GT_EXPR: val = (cmp > 0); break;
2717 case LE_EXPR: val = (cmp <= 0); break;
2718 case GE_EXPR: val = (cmp >= 0); break;
2723 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2725 else if (code2 == EQ_EXPR)
2730 case EQ_EXPR: val = (cmp == 0); break;
2731 case NE_EXPR: val = (cmp != 0); break;
2732 case LT_EXPR: val = (cmp > 0); break;
2733 case GT_EXPR: val = (cmp < 0); break;
2734 case LE_EXPR: val = (cmp >= 0); break;
2735 case GE_EXPR: val = (cmp <= 0); break;
2740 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2743 /* Chose the less restrictive of two < or <= comparisons. */
2744 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2745 && (code2 == LT_EXPR || code2 == LE_EXPR))
2747 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2748 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2750 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2753 /* Likewise chose the less restrictive of two > or >= comparisons. */
2754 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2755 && (code2 == GT_EXPR || code2 == GE_EXPR))
2757 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2758 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2760 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2763 /* Check for singleton ranges. */
2765 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2766 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2767 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2769 /* Check for less/greater pairs that don't restrict the range at all. */
2771 && (code1 == LT_EXPR || code1 == LE_EXPR)
2772 && (code2 == GT_EXPR || code2 == GE_EXPR))
2773 return boolean_true_node;
2775 && (code1 == GT_EXPR || code1 == GE_EXPR)
2776 && (code2 == LT_EXPR || code2 == LE_EXPR))
2777 return boolean_true_node;
2780 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2781 NAME's definition is a truth value. See if there are any simplifications
2782 that can be done against the NAME's definition. */
2783 if (TREE_CODE (op1a) == SSA_NAME
2784 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2785 && (integer_zerop (op1b) || integer_onep (op1b)))
2787 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2788 || (code1 == NE_EXPR && integer_onep (op1b)));
2789 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2790 switch (gimple_code (stmt))
2793 /* Try to simplify by copy-propagating the definition. */
2794 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2797 /* If every argument to the PHI produces the same result when
2798 ORed with the second comparison, we win.
2799 Do not do this unless the type is bool since we need a bool
2800 result here anyway. */
2801 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2803 tree result = NULL_TREE;
2805 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2807 tree arg = gimple_phi_arg_def (stmt, i);
2809 /* If this PHI has itself as an argument, ignore it.
2810 If all the other args produce the same result,
2812 if (arg == gimple_phi_result (stmt))
2814 else if (TREE_CODE (arg) == INTEGER_CST)
2816 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2819 result = boolean_true_node;
2820 else if (!integer_onep (result))
2824 result = fold_build2 (code2, boolean_type_node,
2826 else if (!same_bool_comparison_p (result,
2830 else if (TREE_CODE (arg) == SSA_NAME)
2832 tree temp = or_var_with_comparison (arg, invert,
2838 else if (!same_bool_result_p (result, temp))
2854 /* Try to simplify the OR of two comparisons, specified by
2855 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2856 If this can be simplified to a single expression (without requiring
2857 introducing more SSA variables to hold intermediate values),
2858 return the resulting tree. Otherwise return NULL_TREE.
2859 If the result expression is non-null, it has boolean type. */
2862 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2863 enum tree_code code2, tree op2a, tree op2b)
2865 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2869 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);