1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
184 SET_EXPR_LOCATION (x, loc);
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
269 if (gimple_no_warning_p (stmt))
272 /* Use the smallest code level when deciding to issue the
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
281 locus = input_location;
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
327 negate_mathfn_p (enum built_in_function code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
368 /* Check whether we may negate an integer constant T without causing
372 may_negate_without_overflow_p (const_tree t)
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t)
396 type = TREE_TYPE (t);
399 switch (TREE_CODE (t))
402 if (TYPE_OVERFLOW_WRAPS (type))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
443 return negate_expr_p (TREE_OPERAND (t, 0));
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return negate_expr_p (TREE_OPERAND (t, 1));
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
509 return negate_expr_p (tem);
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 tem = fold_negate_const (t, type);
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
598 return build_vector (type, elts);
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
615 return TREE_OPERAND (t, 0);
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
651 if (TYPE_UNSIGNED (type))
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
773 tem = fold_negate_expr (loc, t);
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
854 *minus_litp = *litp, *litp = 0;
856 *conp = negate_expr (*conp);
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
982 res = wi::bit_or (arg1, arg2);
986 res = wi::bit_xor (arg1, arg2);
990 res = wi::bit_and (arg1, arg2);
995 if (wi::neg_p (arg2))
998 if (code == RSHIFT_EXPR)
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1010 res = wi::lshift (arg1, arg2);
1015 if (wi::neg_p (arg2))
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1027 res = wi::lrotate (arg1, arg2);
1031 res = wi::add (arg1, arg2, sign, &overflow);
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 case FLOOR_DIV_EXPR:
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 case ROUND_DIV_EXPR:
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 case TRUNC_MOD_EXPR:
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 case FLOOR_MOD_EXPR:
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 case ROUND_MOD_EXPR:
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1096 res = wi::min (arg1, arg2, sign);
1100 res = wi::max (arg1, arg2, sign);
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1149 /* The following codes are handled by real_arithmetic. */
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1187 else if (REAL_VALUE_ISNAN (d2))
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1226 /* The following codes are handled by fixed_arithmetic. */
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1298 case TRUNC_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1360 /* In the FALSE branch, we compute
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 if (code == VEC_RSHIFT_EXPR)
1423 if (!tree_fits_uhwi_p (arg2))
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1441 if (i + offset < 0 || i + offset >= count)
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1460 return build_vector (type, elts);
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 tree type = TREE_TYPE (arg0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1501 else if (code == MINUS_EXPR)
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1506 else if (code == MULT_EXPR)
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 tree type = TREE_TYPE (arg0);
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538 if (type == sizetype)
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1543 ctype = signed_type_for (type);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 bool overflow = false;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1620 val = wi::zero (TYPE_PRECISION (type));
1623 /* See if R is less than the lower bound or greater than the
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1639 tree ut = TYPE_MAX_VALUE (type);
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1665 double_int temp, temp_trunc;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 REAL_VALUE_TYPE value;
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 REAL_VALUE_TYPE value;
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 FIXED_VALUE_TYPE value;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 FIXED_VALUE_TYPE value;
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 if (TREE_TYPE (arg1) == type)
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1866 /* Construct a vector of zero elements of vector type TYPE. */
1869 build_zero_vector (tree type)
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1880 fold_convertible_p (const_tree type, const_tree arg)
1882 tree orig = TREE_TYPE (arg);
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1895 switch (TREE_CODE (type))
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 case FIXED_POINT_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1924 tree orig = TREE_TYPE (arg);
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1935 switch (TREE_CODE (type))
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1966 if (TREE_CODE (arg) == INTEGER_CST)
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1972 else if (TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1978 else if (TREE_CODE (arg) == FIXED_CST)
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1985 switch (TREE_CODE (orig))
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2015 switch (TREE_CODE (orig))
2017 case FIXED_POINT_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2033 switch (TREE_CODE (orig))
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2087 protected_set_expr_location_unshare (tem, loc);
2091 /* Return false if expr can be assumed not to be an lvalue, true
2095 maybe_lvalue_p (const_tree x)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2111 case ARRAY_RANGE_REF:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2142 non_lvalue_loc (location_t loc, tree x)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2149 if (! maybe_lvalue_p (x))
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2154 /* When pedantic, return an expr equal to X but certainly not valid as a
2155 pedantic lvalue. Otherwise, return X. */
2158 pedantic_non_lvalue_loc (location_t loc, tree x)
2160 return protected_set_expr_location_unshare (x, loc);
2163 /* Given a tree comparison code, return the code that is the logical inverse.
2164 It is generally not safe to do this for floating-point comparisons, except
2165 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2166 ERROR_MARK in this case. */
2169 invert_tree_comparison (enum tree_code code, bool honor_nans)
2171 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2172 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2182 return honor_nans ? UNLE_EXPR : LE_EXPR;
2184 return honor_nans ? UNLT_EXPR : LT_EXPR;
2186 return honor_nans ? UNGE_EXPR : GE_EXPR;
2188 return honor_nans ? UNGT_EXPR : GT_EXPR;
2202 return UNORDERED_EXPR;
2203 case UNORDERED_EXPR:
2204 return ORDERED_EXPR;
2210 /* Similar, but return the comparison that results if the operands are
2211 swapped. This is safe for floating-point. */
2214 swap_tree_comparison (enum tree_code code)
2221 case UNORDERED_EXPR:
2247 /* Convert a comparison tree code from an enum tree_code representation
2248 into a compcode bit-based encoding. This function is the inverse of
2249 compcode_to_comparison. */
2251 static enum comparison_code
2252 comparison_to_compcode (enum tree_code code)
2269 return COMPCODE_ORD;
2270 case UNORDERED_EXPR:
2271 return COMPCODE_UNORD;
2273 return COMPCODE_UNLT;
2275 return COMPCODE_UNEQ;
2277 return COMPCODE_UNLE;
2279 return COMPCODE_UNGT;
2281 return COMPCODE_LTGT;
2283 return COMPCODE_UNGE;
2289 /* Convert a compcode bit-based encoding of a comparison operator back
2290 to GCC's enum tree_code representation. This function is the
2291 inverse of comparison_to_compcode. */
2293 static enum tree_code
2294 compcode_to_comparison (enum comparison_code code)
2311 return ORDERED_EXPR;
2312 case COMPCODE_UNORD:
2313 return UNORDERED_EXPR;
2331 /* Return a tree for the comparison which is the combination of
2332 doing the AND or OR (depending on CODE) of the two operations LCODE
2333 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2334 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2335 if this makes the transformation invalid. */
2338 combine_comparisons (location_t loc,
2339 enum tree_code code, enum tree_code lcode,
2340 enum tree_code rcode, tree truth_type,
2341 tree ll_arg, tree lr_arg)
2343 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2344 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2345 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2350 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2351 compcode = lcompcode & rcompcode;
2354 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2355 compcode = lcompcode | rcompcode;
2364 /* Eliminate unordered comparisons, as well as LTGT and ORD
2365 which are not used unless the mode has NaNs. */
2366 compcode &= ~COMPCODE_UNORD;
2367 if (compcode == COMPCODE_LTGT)
2368 compcode = COMPCODE_NE;
2369 else if (compcode == COMPCODE_ORD)
2370 compcode = COMPCODE_TRUE;
2372 else if (flag_trapping_math)
2374 /* Check that the original operation and the optimized ones will trap
2375 under the same condition. */
2376 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2377 && (lcompcode != COMPCODE_EQ)
2378 && (lcompcode != COMPCODE_ORD);
2379 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2380 && (rcompcode != COMPCODE_EQ)
2381 && (rcompcode != COMPCODE_ORD);
2382 bool trap = (compcode & COMPCODE_UNORD) == 0
2383 && (compcode != COMPCODE_EQ)
2384 && (compcode != COMPCODE_ORD);
2386 /* In a short-circuited boolean expression the LHS might be
2387 such that the RHS, if evaluated, will never trap. For
2388 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2389 if neither x nor y is NaN. (This is a mixed blessing: for
2390 example, the expression above will never trap, hence
2391 optimizing it to x < y would be invalid). */
2392 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2393 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2396 /* If the comparison was short-circuited, and only the RHS
2397 trapped, we may now generate a spurious trap. */
2399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2402 /* If we changed the conditions that cause a trap, we lose. */
2403 if ((ltrap || rtrap) != trap)
2407 if (compcode == COMPCODE_TRUE)
2408 return constant_boolean_node (true, truth_type);
2409 else if (compcode == COMPCODE_FALSE)
2410 return constant_boolean_node (false, truth_type);
2413 enum tree_code tcode;
2415 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2416 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2420 /* Return nonzero if two operands (typically of the same tree node)
2421 are necessarily equal. If either argument has side-effects this
2422 function returns zero. FLAGS modifies behavior as follows:
2424 If OEP_ONLY_CONST is set, only return nonzero for constants.
2425 This function tests whether the operands are indistinguishable;
2426 it does not test whether they are equal using C's == operation.
2427 The distinction is important for IEEE floating point, because
2428 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2429 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2431 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2432 even though it may hold multiple values during a function.
2433 This is because a GCC tree node guarantees that nothing else is
2434 executed between the evaluation of its "operands" (which may often
2435 be evaluated in arbitrary order). Hence if the operands themselves
2436 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2437 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2438 unset means assuming isochronic (or instantaneous) tree equivalence.
2439 Unless comparing arbitrary expression trees, such as from different
2440 statements, this flag can usually be left unset.
2442 If OEP_PURE_SAME is set, then pure functions with identical arguments
2443 are considered the same. It is used when the caller has other ways
2444 to ensure that global memory is unchanged in between. */
2447 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2449 /* If either is ERROR_MARK, they aren't equal. */
2450 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2451 || TREE_TYPE (arg0) == error_mark_node
2452 || TREE_TYPE (arg1) == error_mark_node)
2455 /* Similar, if either does not have a type (like a released SSA name),
2456 they aren't equal. */
2457 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2460 /* Check equality of integer constants before bailing out due to
2461 precision differences. */
2462 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2463 return tree_int_cst_equal (arg0, arg1);
2465 /* If both types don't have the same signedness, then we can't consider
2466 them equal. We must check this before the STRIP_NOPS calls
2467 because they may change the signedness of the arguments. As pointers
2468 strictly don't have a signedness, require either two pointers or
2469 two non-pointers as well. */
2470 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2471 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2474 /* We cannot consider pointers to different address space equal. */
2475 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2476 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2477 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2480 /* If both types don't have the same precision, then it is not safe
2482 if (element_precision (TREE_TYPE (arg0))
2483 != element_precision (TREE_TYPE (arg1)))
2489 /* In case both args are comparisons but with different comparison
2490 code, try to swap the comparison operands of one arg to produce
2491 a match and compare that variant. */
2492 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2493 && COMPARISON_CLASS_P (arg0)
2494 && COMPARISON_CLASS_P (arg1))
2496 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2498 if (TREE_CODE (arg0) == swap_code)
2499 return operand_equal_p (TREE_OPERAND (arg0, 0),
2500 TREE_OPERAND (arg1, 1), flags)
2501 && operand_equal_p (TREE_OPERAND (arg0, 1),
2502 TREE_OPERAND (arg1, 0), flags);
2505 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2506 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2507 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2510 /* This is needed for conversions and for COMPONENT_REF.
2511 Might as well play it safe and always test this. */
2512 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2513 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2514 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2517 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2518 We don't care about side effects in that case because the SAVE_EXPR
2519 takes care of that for us. In all other cases, two expressions are
2520 equal if they have no side effects. If we have two identical
2521 expressions with side effects that should be treated the same due
2522 to the only side effects being identical SAVE_EXPR's, that will
2523 be detected in the recursive calls below.
2524 If we are taking an invariant address of two identical objects
2525 they are necessarily equal as well. */
2526 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2527 && (TREE_CODE (arg0) == SAVE_EXPR
2528 || (flags & OEP_CONSTANT_ADDRESS_OF)
2529 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2532 /* Next handle constant cases, those for which we can return 1 even
2533 if ONLY_CONST is set. */
2534 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2535 switch (TREE_CODE (arg0))
2538 return tree_int_cst_equal (arg0, arg1);
2541 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2542 TREE_FIXED_CST (arg1));
2545 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2546 TREE_REAL_CST (arg1)))
2550 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2552 /* If we do not distinguish between signed and unsigned zero,
2553 consider them equal. */
2554 if (real_zerop (arg0) && real_zerop (arg1))
2563 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2566 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2568 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2569 VECTOR_CST_ELT (arg1, i), flags))
2576 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2578 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2582 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2583 && ! memcmp (TREE_STRING_POINTER (arg0),
2584 TREE_STRING_POINTER (arg1),
2585 TREE_STRING_LENGTH (arg0)));
2588 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2589 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2590 ? OEP_CONSTANT_ADDRESS_OF : 0);
2595 if (flags & OEP_ONLY_CONST)
2598 /* Define macros to test an operand from arg0 and arg1 for equality and a
2599 variant that allows null and views null as being different from any
2600 non-null value. In the latter case, if either is null, the both
2601 must be; otherwise, do the normal comparison. */
2602 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2603 TREE_OPERAND (arg1, N), flags)
2605 #define OP_SAME_WITH_NULL(N) \
2606 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2607 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2609 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2612 /* Two conversions are equal only if signedness and modes match. */
2613 switch (TREE_CODE (arg0))
2616 case FIX_TRUNC_EXPR:
2617 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2618 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2628 case tcc_comparison:
2630 if (OP_SAME (0) && OP_SAME (1))
2633 /* For commutative ops, allow the other order. */
2634 return (commutative_tree_code (TREE_CODE (arg0))
2635 && operand_equal_p (TREE_OPERAND (arg0, 0),
2636 TREE_OPERAND (arg1, 1), flags)
2637 && operand_equal_p (TREE_OPERAND (arg0, 1),
2638 TREE_OPERAND (arg1, 0), flags));
2641 /* If either of the pointer (or reference) expressions we are
2642 dereferencing contain a side effect, these cannot be equal,
2643 but their addresses can be. */
2644 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2645 && (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1)))
2649 switch (TREE_CODE (arg0))
2652 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2659 case TARGET_MEM_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 /* Require equal extra operands and then fall through to MEM_REF
2662 handling of the two common operands. */
2663 if (!OP_SAME_WITH_NULL (2)
2664 || !OP_SAME_WITH_NULL (3)
2665 || !OP_SAME_WITH_NULL (4))
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal access sizes, and similar pointer types.
2671 We can have incomplete types for array references of
2672 variable-sized arrays from the Fortran frontend
2673 though. Also verify the types are compatible. */
2674 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2675 || (TYPE_SIZE (TREE_TYPE (arg0))
2676 && TYPE_SIZE (TREE_TYPE (arg1))
2677 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2678 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2679 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2680 && alias_ptr_types_compatible_p
2681 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2682 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2683 && OP_SAME (0) && OP_SAME (1));
2686 case ARRAY_RANGE_REF:
2687 /* Operands 2 and 3 may be null.
2688 Compare the array index by value if it is constant first as we
2689 may have different types but same value here. */
2692 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2693 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2694 TREE_OPERAND (arg1, 1))
2696 && OP_SAME_WITH_NULL (2)
2697 && OP_SAME_WITH_NULL (3));
2700 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2701 may be NULL when we're called to compare MEM_EXPRs. */
2702 if (!OP_SAME_WITH_NULL (0)
2705 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 return OP_SAME_WITH_NULL (2);
2711 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2712 return OP_SAME (1) && OP_SAME (2);
2718 case tcc_expression:
2719 switch (TREE_CODE (arg0))
2722 case TRUTH_NOT_EXPR:
2725 case TRUTH_ANDIF_EXPR:
2726 case TRUTH_ORIF_EXPR:
2727 return OP_SAME (0) && OP_SAME (1);
2730 case WIDEN_MULT_PLUS_EXPR:
2731 case WIDEN_MULT_MINUS_EXPR:
2734 /* The multiplcation operands are commutative. */
2737 case TRUTH_AND_EXPR:
2739 case TRUTH_XOR_EXPR:
2740 if (OP_SAME (0) && OP_SAME (1))
2743 /* Otherwise take into account this is a commutative operation. */
2744 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2745 TREE_OPERAND (arg1, 1), flags)
2746 && operand_equal_p (TREE_OPERAND (arg0, 1),
2747 TREE_OPERAND (arg1, 0), flags));
2752 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2759 switch (TREE_CODE (arg0))
2762 /* If the CALL_EXPRs call different functions, then they
2763 clearly can not be equal. */
2764 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2769 unsigned int cef = call_expr_flags (arg0);
2770 if (flags & OEP_PURE_SAME)
2771 cef &= ECF_CONST | ECF_PURE;
2778 /* Now see if all the arguments are the same. */
2780 const_call_expr_arg_iterator iter0, iter1;
2782 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2783 a1 = first_const_call_expr_arg (arg1, &iter1);
2785 a0 = next_const_call_expr_arg (&iter0),
2786 a1 = next_const_call_expr_arg (&iter1))
2787 if (! operand_equal_p (a0, a1, flags))
2790 /* If we get here and both argument lists are exhausted
2791 then the CALL_EXPRs are equal. */
2792 return ! (a0 || a1);
2798 case tcc_declaration:
2799 /* Consider __builtin_sqrt equal to sqrt. */
2800 return (TREE_CODE (arg0) == FUNCTION_DECL
2801 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2802 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2803 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2810 #undef OP_SAME_WITH_NULL
2813 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2814 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2816 When in doubt, return 0. */
2819 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2821 int unsignedp1, unsignedpo;
2822 tree primarg0, primarg1, primother;
2823 unsigned int correct_width;
2825 if (operand_equal_p (arg0, arg1, 0))
2828 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2829 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2832 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2833 and see if the inner values are the same. This removes any
2834 signedness comparison, which doesn't matter here. */
2835 primarg0 = arg0, primarg1 = arg1;
2836 STRIP_NOPS (primarg0);
2837 STRIP_NOPS (primarg1);
2838 if (operand_equal_p (primarg0, primarg1, 0))
2841 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2842 actual comparison operand, ARG0.
2844 First throw away any conversions to wider types
2845 already present in the operands. */
2847 primarg1 = get_narrower (arg1, &unsignedp1);
2848 primother = get_narrower (other, &unsignedpo);
2850 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2851 if (unsignedp1 == unsignedpo
2852 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2853 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2855 tree type = TREE_TYPE (arg0);
2857 /* Make sure shorter operand is extended the right way
2858 to match the longer operand. */
2859 primarg1 = fold_convert (signed_or_unsigned_type_for
2860 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2862 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2869 /* See if ARG is an expression that is either a comparison or is performing
2870 arithmetic on comparisons. The comparisons must only be comparing
2871 two different values, which will be stored in *CVAL1 and *CVAL2; if
2872 they are nonzero it means that some operands have already been found.
2873 No variables may be used anywhere else in the expression except in the
2874 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2875 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2877 If this is true, return 1. Otherwise, return zero. */
2880 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2882 enum tree_code code = TREE_CODE (arg);
2883 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2885 /* We can handle some of the tcc_expression cases here. */
2886 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2888 else if (tclass == tcc_expression
2889 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2890 || code == COMPOUND_EXPR))
2891 tclass = tcc_binary;
2893 else if (tclass == tcc_expression && code == SAVE_EXPR
2894 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2896 /* If we've already found a CVAL1 or CVAL2, this expression is
2897 two complex to handle. */
2898 if (*cval1 || *cval2)
2908 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2911 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2912 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2913 cval1, cval2, save_p));
2918 case tcc_expression:
2919 if (code == COND_EXPR)
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2921 cval1, cval2, save_p)
2922 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2923 cval1, cval2, save_p)
2924 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2925 cval1, cval2, save_p));
2928 case tcc_comparison:
2929 /* First see if we can handle the first operand, then the second. For
2930 the second operand, we know *CVAL1 can't be zero. It must be that
2931 one side of the comparison is each of the values; test for the
2932 case where this isn't true by failing if the two operands
2935 if (operand_equal_p (TREE_OPERAND (arg, 0),
2936 TREE_OPERAND (arg, 1), 0))
2940 *cval1 = TREE_OPERAND (arg, 0);
2941 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2943 else if (*cval2 == 0)
2944 *cval2 = TREE_OPERAND (arg, 0);
2945 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2950 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 1);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2966 /* ARG is a tree that is known to contain just arithmetic operations and
2967 comparisons. Evaluate the operations in the tree substituting NEW0 for
2968 any occurrence of OLD0 as an operand of a comparison and likewise for
2972 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2973 tree old1, tree new1)
2975 tree type = TREE_TYPE (arg);
2976 enum tree_code code = TREE_CODE (arg);
2977 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2979 /* We can handle some of the tcc_expression cases here. */
2980 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2982 else if (tclass == tcc_expression
2983 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2984 tclass = tcc_binary;
2989 return fold_build1_loc (loc, code, type,
2990 eval_subst (loc, TREE_OPERAND (arg, 0),
2991 old0, new0, old1, new1));
2994 return fold_build2_loc (loc, code, type,
2995 eval_subst (loc, TREE_OPERAND (arg, 0),
2996 old0, new0, old1, new1),
2997 eval_subst (loc, TREE_OPERAND (arg, 1),
2998 old0, new0, old1, new1));
3000 case tcc_expression:
3004 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3008 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3012 return fold_build3_loc (loc, code, type,
3013 eval_subst (loc, TREE_OPERAND (arg, 0),
3014 old0, new0, old1, new1),
3015 eval_subst (loc, TREE_OPERAND (arg, 1),
3016 old0, new0, old1, new1),
3017 eval_subst (loc, TREE_OPERAND (arg, 2),
3018 old0, new0, old1, new1));
3022 /* Fall through - ??? */
3024 case tcc_comparison:
3026 tree arg0 = TREE_OPERAND (arg, 0);
3027 tree arg1 = TREE_OPERAND (arg, 1);
3029 /* We need to check both for exact equality and tree equality. The
3030 former will be true if the operand has a side-effect. In that
3031 case, we know the operand occurred exactly once. */
3033 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3035 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3038 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3040 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3043 return fold_build2_loc (loc, code, type, arg0, arg1);
3051 /* Return a tree for the case when the result of an expression is RESULT
3052 converted to TYPE and OMITTED was previously an operand of the expression
3053 but is now not needed (e.g., we folded OMITTED * 0).
3055 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3056 the conversion of RESULT to TYPE. */
3059 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3061 tree t = fold_convert_loc (loc, type, result);
3063 /* If the resulting operand is an empty statement, just return the omitted
3064 statement casted to void. */
3065 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3066 return build1_loc (loc, NOP_EXPR, void_type_node,
3067 fold_ignored_result (omitted));
3069 if (TREE_SIDE_EFFECTS (omitted))
3070 return build2_loc (loc, COMPOUND_EXPR, type,
3071 fold_ignored_result (omitted), t);
3073 return non_lvalue_loc (loc, t);
3076 /* Return a tree for the case when the result of an expression is RESULT
3077 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3078 of the expression but are now not needed.
3080 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3081 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3082 evaluated before OMITTED2. Otherwise, if neither has side effects,
3083 just do the conversion of RESULT to TYPE. */
3086 omit_two_operands_loc (location_t loc, tree type, tree result,
3087 tree omitted1, tree omitted2)
3089 tree t = fold_convert_loc (loc, type, result);
3091 if (TREE_SIDE_EFFECTS (omitted2))
3092 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3093 if (TREE_SIDE_EFFECTS (omitted1))
3094 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3096 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3100 /* Return a simplified tree node for the truth-negation of ARG. This
3101 never alters ARG itself. We assume that ARG is an operation that
3102 returns a truth value (0 or 1).
3104 FIXME: one would think we would fold the result, but it causes
3105 problems with the dominator optimizer. */
3108 fold_truth_not_expr (location_t loc, tree arg)
3110 tree type = TREE_TYPE (arg);
3111 enum tree_code code = TREE_CODE (arg);
3112 location_t loc1, loc2;
3114 /* If this is a comparison, we can simply invert it, except for
3115 floating-point non-equality comparisons, in which case we just
3116 enclose a TRUTH_NOT_EXPR around what we have. */
3118 if (TREE_CODE_CLASS (code) == tcc_comparison)
3120 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3121 if (FLOAT_TYPE_P (op_type)
3122 && flag_trapping_math
3123 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3124 && code != NE_EXPR && code != EQ_EXPR)
3127 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3128 if (code == ERROR_MARK)
3131 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3132 TREE_OPERAND (arg, 1));
3138 return constant_boolean_node (integer_zerop (arg), type);
3140 case TRUTH_AND_EXPR:
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3142 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3143 return build2_loc (loc, TRUTH_OR_EXPR, type,
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3145 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3148 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3149 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3150 return build2_loc (loc, TRUTH_AND_EXPR, type,
3151 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3152 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3154 case TRUTH_XOR_EXPR:
3155 /* Here we can invert either operand. We invert the first operand
3156 unless the second operand is a TRUTH_NOT_EXPR in which case our
3157 result is the XOR of the first operand with the inside of the
3158 negation of the second operand. */
3160 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3161 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3162 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3164 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3165 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3166 TREE_OPERAND (arg, 1));
3168 case TRUTH_ANDIF_EXPR:
3169 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3171 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3173 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3175 case TRUTH_ORIF_EXPR:
3176 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3177 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3179 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3180 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3182 case TRUTH_NOT_EXPR:
3183 return TREE_OPERAND (arg, 0);
3187 tree arg1 = TREE_OPERAND (arg, 1);
3188 tree arg2 = TREE_OPERAND (arg, 2);
3190 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3191 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3193 /* A COND_EXPR may have a throw as one operand, which
3194 then has void type. Just leave void operands
3196 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3197 VOID_TYPE_P (TREE_TYPE (arg1))
3198 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3199 VOID_TYPE_P (TREE_TYPE (arg2))
3200 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3204 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3205 return build2_loc (loc, COMPOUND_EXPR, type,
3206 TREE_OPERAND (arg, 0),
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3209 case NON_LVALUE_EXPR:
3210 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3211 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3214 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3215 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3217 /* ... fall through ... */
3220 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3221 return build1_loc (loc, TREE_CODE (arg), type,
3222 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3225 if (!integer_onep (TREE_OPERAND (arg, 1)))
3227 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3230 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3232 case CLEANUP_POINT_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3234 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3235 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3242 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3243 assume that ARG is an operation that returns a truth value (0 or 1
3244 for scalars, 0 or -1 for vectors). Return the folded expression if
3245 folding is successful. Otherwise, return NULL_TREE. */
3248 fold_invert_truthvalue (location_t loc, tree arg)
3250 tree type = TREE_TYPE (arg);
3251 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3257 /* Return a simplified tree node for the truth-negation of ARG. This
3258 never alters ARG itself. We assume that ARG is an operation that
3259 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3262 invert_truthvalue_loc (location_t loc, tree arg)
3264 if (TREE_CODE (arg) == ERROR_MARK)
3267 tree type = TREE_TYPE (arg);
3268 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3274 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3275 operands are another bit-wise operation with a common input. If so,
3276 distribute the bit operations to save an operation and possibly two if
3277 constants are involved. For example, convert
3278 (A | B) & (A | C) into A | (B & C)
3279 Further simplification will occur if B and C are constants.
3281 If this optimization cannot be done, 0 will be returned. */
3284 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3285 tree arg0, tree arg1)
3290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3291 || TREE_CODE (arg0) == code
3292 || (TREE_CODE (arg0) != BIT_AND_EXPR
3293 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3296 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3298 common = TREE_OPERAND (arg0, 0);
3299 left = TREE_OPERAND (arg0, 1);
3300 right = TREE_OPERAND (arg1, 1);
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3304 common = TREE_OPERAND (arg0, 0);
3305 left = TREE_OPERAND (arg0, 1);
3306 right = TREE_OPERAND (arg1, 0);
3308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3310 common = TREE_OPERAND (arg0, 1);
3311 left = TREE_OPERAND (arg0, 0);
3312 right = TREE_OPERAND (arg1, 1);
3314 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3316 common = TREE_OPERAND (arg0, 1);
3317 left = TREE_OPERAND (arg0, 0);
3318 right = TREE_OPERAND (arg1, 0);
3323 common = fold_convert_loc (loc, type, common);
3324 left = fold_convert_loc (loc, type, left);
3325 right = fold_convert_loc (loc, type, right);
3326 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3327 fold_build2_loc (loc, code, type, left, right));
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3333 distribute_real_division (location_t loc, enum tree_code code, tree type,
3334 tree arg0, tree arg1)
3336 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3337 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3339 /* (A / C) +- (B / C) -> (A +- B) / C. */
3341 && operand_equal_p (TREE_OPERAND (arg0, 1),
3342 TREE_OPERAND (arg1, 1), 0))
3343 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3344 fold_build2_loc (loc, code, type,
3345 TREE_OPERAND (arg0, 0),
3346 TREE_OPERAND (arg1, 0)),
3347 TREE_OPERAND (arg0, 1));
3349 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3350 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3351 TREE_OPERAND (arg1, 0), 0)
3352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3353 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3355 REAL_VALUE_TYPE r0, r1;
3356 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3357 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3359 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3361 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3362 real_arithmetic (&r0, code, &r0, &r1);
3363 return fold_build2_loc (loc, MULT_EXPR, type,
3364 TREE_OPERAND (arg0, 0),
3365 build_real (type, r0));
3371 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3372 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3375 make_bit_field_ref (location_t loc, tree inner, tree type,
3376 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3378 tree result, bftype;
3382 tree size = TYPE_SIZE (TREE_TYPE (inner));
3383 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3384 || POINTER_TYPE_P (TREE_TYPE (inner)))
3385 && tree_fits_shwi_p (size)
3386 && tree_to_shwi (size) == bitsize)
3387 return fold_convert_loc (loc, type, inner);
3391 if (TYPE_PRECISION (bftype) != bitsize
3392 || TYPE_UNSIGNED (bftype) == !unsignedp)
3393 bftype = build_nonstandard_integer_type (bitsize, 0);
3395 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3396 size_int (bitsize), bitsize_int (bitpos));
3399 result = fold_convert_loc (loc, type, result);
3404 /* Optimize a bit-field compare.
3406 There are two cases: First is a compare against a constant and the
3407 second is a comparison of two items where the fields are at the same
3408 bit position relative to the start of a chunk (byte, halfword, word)
3409 large enough to contain it. In these cases we can avoid the shift
3410 implicit in bitfield extractions.
3412 For constants, we emit a compare of the shifted constant with the
3413 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3414 compared. For two fields at the same position, we do the ANDs with the
3415 similar mask and compare the result of the ANDs.
3417 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3418 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3419 are the left and right operands of the comparison, respectively.
3421 If the optimization described above can be done, we return the resulting
3422 tree. Otherwise we return zero. */
3425 optimize_bit_field_compare (location_t loc, enum tree_code code,
3426 tree compare_type, tree lhs, tree rhs)
3428 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3429 tree type = TREE_TYPE (lhs);
3431 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3432 machine_mode lmode, rmode, nmode;
3433 int lunsignedp, runsignedp;
3434 int lvolatilep = 0, rvolatilep = 0;
3435 tree linner, rinner = NULL_TREE;
3439 /* Get all the information about the extractions being done. If the bit size
3440 if the same as the size of the underlying object, we aren't doing an
3441 extraction at all and so can do nothing. We also don't want to
3442 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3443 then will no longer be able to replace it. */
3444 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3445 &lunsignedp, &lvolatilep, false);
3446 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3447 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3452 /* If this is not a constant, we can only do something if bit positions,
3453 sizes, and signedness are the same. */
3454 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3455 &runsignedp, &rvolatilep, false);
3457 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3458 || lunsignedp != runsignedp || offset != 0
3459 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3463 /* See if we can find a mode to refer to this field. We should be able to,
3464 but fail if we can't. */
3465 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3466 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3467 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3468 TYPE_ALIGN (TREE_TYPE (rinner))),
3470 if (nmode == VOIDmode)
3473 /* Set signed and unsigned types of the precision of this mode for the
3475 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3477 /* Compute the bit position and size for the new reference and our offset
3478 within it. If the new reference is the same size as the original, we
3479 won't optimize anything, so return zero. */
3480 nbitsize = GET_MODE_BITSIZE (nmode);
3481 nbitpos = lbitpos & ~ (nbitsize - 1);
3483 if (nbitsize == lbitsize)
3486 if (BYTES_BIG_ENDIAN)
3487 lbitpos = nbitsize - lbitsize - lbitpos;
3489 /* Make the mask to be used against the extracted field. */
3490 mask = build_int_cst_type (unsigned_type, -1);
3491 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3492 mask = const_binop (RSHIFT_EXPR, mask,
3493 size_int (nbitsize - lbitsize - lbitpos));
3496 /* If not comparing with constant, just rework the comparison
3498 return fold_build2_loc (loc, code, compare_type,
3499 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3500 make_bit_field_ref (loc, linner,
3505 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3506 make_bit_field_ref (loc, rinner,
3512 /* Otherwise, we are handling the constant case. See if the constant is too
3513 big for the field. Warn and return a tree of for 0 (false) if so. We do
3514 this not only for its own sake, but to avoid having to test for this
3515 error case below. If we didn't, we might generate wrong code.
3517 For unsigned fields, the constant shifted right by the field length should
3518 be all zero. For signed fields, the high-order bits should agree with
3523 if (wi::lrshift (rhs, lbitsize) != 0)
3525 warning (0, "comparison is always %d due to width of bit-field",
3527 return constant_boolean_node (code == NE_EXPR, compare_type);
3532 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3533 if (tem != 0 && tem != -1)
3535 warning (0, "comparison is always %d due to width of bit-field",
3537 return constant_boolean_node (code == NE_EXPR, compare_type);
3541 /* Single-bit compares should always be against zero. */
3542 if (lbitsize == 1 && ! integer_zerop (rhs))
3544 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3545 rhs = build_int_cst (type, 0);
3548 /* Make a new bitfield reference, shift the constant over the
3549 appropriate number of bits and mask it with the computed mask
3550 (in case this was a signed field). If we changed it, make a new one. */
3551 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3553 rhs = const_binop (BIT_AND_EXPR,
3554 const_binop (LSHIFT_EXPR,
3555 fold_convert_loc (loc, unsigned_type, rhs),
3556 size_int (lbitpos)),
3559 lhs = build2_loc (loc, code, compare_type,
3560 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3564 /* Subroutine for fold_truth_andor_1: decode a field reference.
3566 If EXP is a comparison reference, we return the innermost reference.
3568 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3569 set to the starting bit number.
3571 If the innermost field can be completely contained in a mode-sized
3572 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3574 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3575 otherwise it is not changed.
3577 *PUNSIGNEDP is set to the signedness of the field.
3579 *PMASK is set to the mask used. This is either contained in a
3580 BIT_AND_EXPR or derived from the width of the field.
3582 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3584 Return 0 if this is not a component reference or is one that we can't
3585 do anything with. */
3588 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3589 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3590 int *punsignedp, int *pvolatilep,
3591 tree *pmask, tree *pand_mask)
3593 tree outer_type = 0;
3595 tree mask, inner, offset;
3597 unsigned int precision;
3599 /* All the optimizations using this function assume integer fields.
3600 There are problems with FP fields since the type_for_size call
3601 below can fail for, e.g., XFmode. */
3602 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3605 /* We are interested in the bare arrangement of bits, so strip everything
3606 that doesn't affect the machine mode. However, record the type of the
3607 outermost expression if it may matter below. */
3608 if (CONVERT_EXPR_P (exp)
3609 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3610 outer_type = TREE_TYPE (exp);
3613 if (TREE_CODE (exp) == BIT_AND_EXPR)
3615 and_mask = TREE_OPERAND (exp, 1);
3616 exp = TREE_OPERAND (exp, 0);
3617 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3618 if (TREE_CODE (and_mask) != INTEGER_CST)
3622 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3623 punsignedp, pvolatilep, false);
3624 if ((inner == exp && and_mask == 0)
3625 || *pbitsize < 0 || offset != 0
3626 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3629 /* If the number of bits in the reference is the same as the bitsize of
3630 the outer type, then the outer type gives the signedness. Otherwise
3631 (in case of a small bitfield) the signedness is unchanged. */
3632 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3633 *punsignedp = TYPE_UNSIGNED (outer_type);
3635 /* Compute the mask to access the bitfield. */
3636 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3637 precision = TYPE_PRECISION (unsigned_type);
3639 mask = build_int_cst_type (unsigned_type, -1);
3641 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3642 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3644 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3646 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3647 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3650 *pand_mask = and_mask;
3654 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3655 bit positions and MASK is SIGNED. */
3658 all_ones_mask_p (const_tree mask, unsigned int size)
3660 tree type = TREE_TYPE (mask);
3661 unsigned int precision = TYPE_PRECISION (type);
3663 /* If this function returns true when the type of the mask is
3664 UNSIGNED, then there will be errors. In particular see
3665 gcc.c-torture/execute/990326-1.c. There does not appear to be
3666 any documentation paper trail as to why this is so. But the pre
3667 wide-int worked with that restriction and it has been preserved
3669 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3672 return wi::mask (size, false, precision) == mask;
3675 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3676 represents the sign bit of EXP's type. If EXP represents a sign
3677 or zero extension, also test VAL against the unextended type.
3678 The return value is the (sub)expression whose sign bit is VAL,
3679 or NULL_TREE otherwise. */
3682 sign_bit_p (tree exp, const_tree val)
3687 /* Tree EXP must have an integral type. */
3688 t = TREE_TYPE (exp);
3689 if (! INTEGRAL_TYPE_P (t))
3692 /* Tree VAL must be an integer constant. */
3693 if (TREE_CODE (val) != INTEGER_CST
3694 || TREE_OVERFLOW (val))
3697 width = TYPE_PRECISION (t);
3698 if (wi::only_sign_bit_p (val, width))
3701 /* Handle extension from a narrower type. */
3702 if (TREE_CODE (exp) == NOP_EXPR
3703 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3704 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3709 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3710 to be evaluated unconditionally. */
3713 simple_operand_p (const_tree exp)
3715 /* Strip any conversions that don't change the machine mode. */
3718 return (CONSTANT_CLASS_P (exp)
3719 || TREE_CODE (exp) == SSA_NAME
3721 && ! TREE_ADDRESSABLE (exp)
3722 && ! TREE_THIS_VOLATILE (exp)
3723 && ! DECL_NONLOCAL (exp)
3724 /* Don't regard global variables as simple. They may be
3725 allocated in ways unknown to the compiler (shared memory,
3726 #pragma weak, etc). */
3727 && ! TREE_PUBLIC (exp)
3728 && ! DECL_EXTERNAL (exp)
3729 /* Weakrefs are not safe to be read, since they can be NULL.
3730 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3731 have DECL_WEAK flag set. */
3732 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3733 /* Loading a static variable is unduly expensive, but global
3734 registers aren't expensive. */
3735 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3738 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3739 to be evaluated unconditionally.
3740 I addition to simple_operand_p, we assume that comparisons, conversions,
3741 and logic-not operations are simple, if their operands are simple, too. */
3744 simple_operand_p_2 (tree exp)
3746 enum tree_code code;
3748 if (TREE_SIDE_EFFECTS (exp)
3749 || tree_could_trap_p (exp))
3752 while (CONVERT_EXPR_P (exp))
3753 exp = TREE_OPERAND (exp, 0);
3755 code = TREE_CODE (exp);
3757 if (TREE_CODE_CLASS (code) == tcc_comparison)
3758 return (simple_operand_p (TREE_OPERAND (exp, 0))
3759 && simple_operand_p (TREE_OPERAND (exp, 1)));
3761 if (code == TRUTH_NOT_EXPR)
3762 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3764 return simple_operand_p (exp);
3768 /* The following functions are subroutines to fold_range_test and allow it to
3769 try to change a logical combination of comparisons into a range test.
3772 X == 2 || X == 3 || X == 4 || X == 5
3776 (unsigned) (X - 2) <= 3
3778 We describe each set of comparisons as being either inside or outside
3779 a range, using a variable named like IN_P, and then describe the
3780 range with a lower and upper bound. If one of the bounds is omitted,
3781 it represents either the highest or lowest value of the type.
3783 In the comments below, we represent a range by two numbers in brackets
3784 preceded by a "+" to designate being inside that range, or a "-" to
3785 designate being outside that range, so the condition can be inverted by
3786 flipping the prefix. An omitted bound is represented by a "-". For
3787 example, "- [-, 10]" means being outside the range starting at the lowest
3788 possible value and ending at 10, in other words, being greater than 10.
3789 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3792 We set up things so that the missing bounds are handled in a consistent
3793 manner so neither a missing bound nor "true" and "false" need to be
3794 handled using a special case. */
3796 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3797 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3798 and UPPER1_P are nonzero if the respective argument is an upper bound
3799 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3800 must be specified for a comparison. ARG1 will be converted to ARG0's
3801 type if both are specified. */
3804 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3805 tree arg1, int upper1_p)
3811 /* If neither arg represents infinity, do the normal operation.
3812 Else, if not a comparison, return infinity. Else handle the special
3813 comparison rules. Note that most of the cases below won't occur, but
3814 are handled for consistency. */
3816 if (arg0 != 0 && arg1 != 0)
3818 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3819 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3821 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3824 if (TREE_CODE_CLASS (code) != tcc_comparison)
3827 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3828 for neither. In real maths, we cannot assume open ended ranges are
3829 the same. But, this is computer arithmetic, where numbers are finite.
3830 We can therefore make the transformation of any unbounded range with
3831 the value Z, Z being greater than any representable number. This permits
3832 us to treat unbounded ranges as equal. */
3833 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3834 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3838 result = sgn0 == sgn1;
3841 result = sgn0 != sgn1;
3844 result = sgn0 < sgn1;
3847 result = sgn0 <= sgn1;
3850 result = sgn0 > sgn1;
3853 result = sgn0 >= sgn1;
3859 return constant_boolean_node (result, type);
3862 /* Helper routine for make_range. Perform one step for it, return
3863 new expression if the loop should continue or NULL_TREE if it should
3867 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3868 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3869 bool *strict_overflow_p)
3871 tree arg0_type = TREE_TYPE (arg0);
3872 tree n_low, n_high, low = *p_low, high = *p_high;
3873 int in_p = *p_in_p, n_in_p;
3877 case TRUTH_NOT_EXPR:
3878 /* We can only do something if the range is testing for zero. */
3879 if (low == NULL_TREE || high == NULL_TREE
3880 || ! integer_zerop (low) || ! integer_zerop (high))
3885 case EQ_EXPR: case NE_EXPR:
3886 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3887 /* We can only do something if the range is testing for zero
3888 and if the second operand is an integer constant. Note that
3889 saying something is "in" the range we make is done by
3890 complementing IN_P since it will set in the initial case of
3891 being not equal to zero; "out" is leaving it alone. */
3892 if (low == NULL_TREE || high == NULL_TREE
3893 || ! integer_zerop (low) || ! integer_zerop (high)
3894 || TREE_CODE (arg1) != INTEGER_CST)
3899 case NE_EXPR: /* - [c, c] */
3902 case EQ_EXPR: /* + [c, c] */
3903 in_p = ! in_p, low = high = arg1;
3905 case GT_EXPR: /* - [-, c] */
3906 low = 0, high = arg1;
3908 case GE_EXPR: /* + [c, -] */
3909 in_p = ! in_p, low = arg1, high = 0;
3911 case LT_EXPR: /* - [c, -] */
3912 low = arg1, high = 0;
3914 case LE_EXPR: /* + [-, c] */
3915 in_p = ! in_p, low = 0, high = arg1;
3921 /* If this is an unsigned comparison, we also know that EXP is
3922 greater than or equal to zero. We base the range tests we make
3923 on that fact, so we record it here so we can parse existing
3924 range tests. We test arg0_type since often the return type
3925 of, e.g. EQ_EXPR, is boolean. */
3926 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3928 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3930 build_int_cst (arg0_type, 0),
3934 in_p = n_in_p, low = n_low, high = n_high;
3936 /* If the high bound is missing, but we have a nonzero low
3937 bound, reverse the range so it goes from zero to the low bound
3939 if (high == 0 && low && ! integer_zerop (low))
3942 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3943 build_int_cst (TREE_TYPE (low), 1), 0);
3944 low = build_int_cst (arg0_type, 0);
3954 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3955 low and high are non-NULL, then normalize will DTRT. */
3956 if (!TYPE_UNSIGNED (arg0_type)
3957 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3959 if (low == NULL_TREE)
3960 low = TYPE_MIN_VALUE (arg0_type);
3961 if (high == NULL_TREE)
3962 high = TYPE_MAX_VALUE (arg0_type);
3965 /* (-x) IN [a,b] -> x in [-b, -a] */
3966 n_low = range_binop (MINUS_EXPR, exp_type,
3967 build_int_cst (exp_type, 0),
3969 n_high = range_binop (MINUS_EXPR, exp_type,
3970 build_int_cst (exp_type, 0),
3972 if (n_high != 0 && TREE_OVERFLOW (n_high))
3978 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3979 build_int_cst (exp_type, 1));
3983 if (TREE_CODE (arg1) != INTEGER_CST)
3986 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3987 move a constant to the other side. */
3988 if (!TYPE_UNSIGNED (arg0_type)
3989 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3992 /* If EXP is signed, any overflow in the computation is undefined,
3993 so we don't worry about it so long as our computations on
3994 the bounds don't overflow. For unsigned, overflow is defined
3995 and this is exactly the right thing. */
3996 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3997 arg0_type, low, 0, arg1, 0);
3998 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3999 arg0_type, high, 1, arg1, 0);
4000 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4001 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4004 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4005 *strict_overflow_p = true;
4008 /* Check for an unsigned range which has wrapped around the maximum
4009 value thus making n_high < n_low, and normalize it. */
4010 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4012 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4013 build_int_cst (TREE_TYPE (n_high), 1), 0);
4014 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4015 build_int_cst (TREE_TYPE (n_low), 1), 0);
4017 /* If the range is of the form +/- [ x+1, x ], we won't
4018 be able to normalize it. But then, it represents the
4019 whole range or the empty set, so make it
4021 if (tree_int_cst_equal (n_low, low)
4022 && tree_int_cst_equal (n_high, high))
4028 low = n_low, high = n_high;
4036 case NON_LVALUE_EXPR:
4037 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4040 if (! INTEGRAL_TYPE_P (arg0_type)
4041 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4042 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4045 n_low = low, n_high = high;
4048 n_low = fold_convert_loc (loc, arg0_type, n_low);
4051 n_high = fold_convert_loc (loc, arg0_type, n_high);
4053 /* If we're converting arg0 from an unsigned type, to exp,
4054 a signed type, we will be doing the comparison as unsigned.
4055 The tests above have already verified that LOW and HIGH
4058 So we have to ensure that we will handle large unsigned
4059 values the same way that the current signed bounds treat
4062 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4066 /* For fixed-point modes, we need to pass the saturating flag
4067 as the 2nd parameter. */
4068 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4070 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4071 TYPE_SATURATING (arg0_type));
4074 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4076 /* A range without an upper bound is, naturally, unbounded.
4077 Since convert would have cropped a very large value, use
4078 the max value for the destination type. */
4080 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4081 : TYPE_MAX_VALUE (arg0_type);
4083 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4084 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4085 fold_convert_loc (loc, arg0_type,
4087 build_int_cst (arg0_type, 1));
4089 /* If the low bound is specified, "and" the range with the
4090 range for which the original unsigned value will be
4094 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4095 1, fold_convert_loc (loc, arg0_type,
4100 in_p = (n_in_p == in_p);
4104 /* Otherwise, "or" the range with the range of the input
4105 that will be interpreted as negative. */
4106 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4107 1, fold_convert_loc (loc, arg0_type,
4112 in_p = (in_p != n_in_p);
4126 /* Given EXP, a logical expression, set the range it is testing into
4127 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4128 actually being tested. *PLOW and *PHIGH will be made of the same
4129 type as the returned expression. If EXP is not a comparison, we
4130 will most likely not be returning a useful value and range. Set
4131 *STRICT_OVERFLOW_P to true if the return value is only valid
4132 because signed overflow is undefined; otherwise, do not change
4133 *STRICT_OVERFLOW_P. */
4136 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4137 bool *strict_overflow_p)
4139 enum tree_code code;
4140 tree arg0, arg1 = NULL_TREE;
4141 tree exp_type, nexp;
4144 location_t loc = EXPR_LOCATION (exp);
4146 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4147 and see if we can refine the range. Some of the cases below may not
4148 happen, but it doesn't seem worth worrying about this. We "continue"
4149 the outer loop when we've changed something; otherwise we "break"
4150 the switch, which will "break" the while. */
4153 low = high = build_int_cst (TREE_TYPE (exp), 0);
4157 code = TREE_CODE (exp);
4158 exp_type = TREE_TYPE (exp);
4161 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4163 if (TREE_OPERAND_LENGTH (exp) > 0)
4164 arg0 = TREE_OPERAND (exp, 0);
4165 if (TREE_CODE_CLASS (code) == tcc_binary
4166 || TREE_CODE_CLASS (code) == tcc_comparison
4167 || (TREE_CODE_CLASS (code) == tcc_expression
4168 && TREE_OPERAND_LENGTH (exp) > 1))
4169 arg1 = TREE_OPERAND (exp, 1);
4171 if (arg0 == NULL_TREE)
4174 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4175 &high, &in_p, strict_overflow_p);
4176 if (nexp == NULL_TREE)
4181 /* If EXP is a constant, we can evaluate whether this is true or false. */
4182 if (TREE_CODE (exp) == INTEGER_CST)
4184 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4186 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4192 *pin_p = in_p, *plow = low, *phigh = high;
4196 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4197 type, TYPE, return an expression to test if EXP is in (or out of, depending
4198 on IN_P) the range. Return 0 if the test couldn't be created. */
4201 build_range_check (location_t loc, tree type, tree exp, int in_p,
4202 tree low, tree high)
4204 tree etype = TREE_TYPE (exp), value;
4206 #ifdef HAVE_canonicalize_funcptr_for_compare
4207 /* Disable this optimization for function pointer expressions
4208 on targets that require function pointer canonicalization. */
4209 if (HAVE_canonicalize_funcptr_for_compare
4210 && TREE_CODE (etype) == POINTER_TYPE
4211 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4217 value = build_range_check (loc, type, exp, 1, low, high);
4219 return invert_truthvalue_loc (loc, value);
4224 if (low == 0 && high == 0)
4225 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4228 return fold_build2_loc (loc, LE_EXPR, type, exp,
4229 fold_convert_loc (loc, etype, high));
4232 return fold_build2_loc (loc, GE_EXPR, type, exp,
4233 fold_convert_loc (loc, etype, low));
4235 if (operand_equal_p (low, high, 0))
4236 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4237 fold_convert_loc (loc, etype, low));
4239 if (integer_zerop (low))
4241 if (! TYPE_UNSIGNED (etype))
4243 etype = unsigned_type_for (etype);
4244 high = fold_convert_loc (loc, etype, high);
4245 exp = fold_convert_loc (loc, etype, exp);
4247 return build_range_check (loc, type, exp, 1, 0, high);
4250 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4251 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4253 int prec = TYPE_PRECISION (etype);
4255 if (wi::mask (prec - 1, false, prec) == high)
4257 if (TYPE_UNSIGNED (etype))
4259 tree signed_etype = signed_type_for (etype);
4260 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4262 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4264 etype = signed_etype;
4265 exp = fold_convert_loc (loc, etype, exp);
4267 return fold_build2_loc (loc, GT_EXPR, type, exp,
4268 build_int_cst (etype, 0));
4272 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4273 This requires wrap-around arithmetics for the type of the expression.
4274 First make sure that arithmetics in this type is valid, then make sure
4275 that it wraps around. */
4276 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4277 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4278 TYPE_UNSIGNED (etype));
4280 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4282 tree utype, minv, maxv;
4284 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4285 for the type in question, as we rely on this here. */
4286 utype = unsigned_type_for (etype);
4287 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4288 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4289 build_int_cst (TREE_TYPE (maxv), 1), 1);
4290 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4292 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4299 high = fold_convert_loc (loc, etype, high);
4300 low = fold_convert_loc (loc, etype, low);
4301 exp = fold_convert_loc (loc, etype, exp);
4303 value = const_binop (MINUS_EXPR, high, low);
4306 if (POINTER_TYPE_P (etype))
4308 if (value != 0 && !TREE_OVERFLOW (value))
4310 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4311 return build_range_check (loc, type,
4312 fold_build_pointer_plus_loc (loc, exp, low),
4313 1, build_int_cst (etype, 0), value);
4318 if (value != 0 && !TREE_OVERFLOW (value))
4319 return build_range_check (loc, type,
4320 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4321 1, build_int_cst (etype, 0), value);
4326 /* Return the predecessor of VAL in its type, handling the infinite case. */
4329 range_predecessor (tree val)
4331 tree type = TREE_TYPE (val);
4333 if (INTEGRAL_TYPE_P (type)
4334 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4337 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4338 build_int_cst (TREE_TYPE (val), 1), 0);
4341 /* Return the successor of VAL in its type, handling the infinite case. */
4344 range_successor (tree val)
4346 tree type = TREE_TYPE (val);
4348 if (INTEGRAL_TYPE_P (type)
4349 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4352 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4353 build_int_cst (TREE_TYPE (val), 1), 0);
4356 /* Given two ranges, see if we can merge them into one. Return 1 if we
4357 can, 0 if we can't. Set the output range into the specified parameters. */
4360 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4361 tree high0, int in1_p, tree low1, tree high1)
4369 int lowequal = ((low0 == 0 && low1 == 0)
4370 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4371 low0, 0, low1, 0)));
4372 int highequal = ((high0 == 0 && high1 == 0)
4373 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4374 high0, 1, high1, 1)));
4376 /* Make range 0 be the range that starts first, or ends last if they
4377 start at the same value. Swap them if it isn't. */
4378 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4381 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4382 high1, 1, high0, 1))))
4384 temp = in0_p, in0_p = in1_p, in1_p = temp;
4385 tem = low0, low0 = low1, low1 = tem;
4386 tem = high0, high0 = high1, high1 = tem;
4389 /* Now flag two cases, whether the ranges are disjoint or whether the
4390 second range is totally subsumed in the first. Note that the tests
4391 below are simplified by the ones above. */
4392 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4393 high0, 1, low1, 0));
4394 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4395 high1, 1, high0, 1));
4397 /* We now have four cases, depending on whether we are including or
4398 excluding the two ranges. */
4401 /* If they don't overlap, the result is false. If the second range
4402 is a subset it is the result. Otherwise, the range is from the start
4403 of the second to the end of the first. */
4405 in_p = 0, low = high = 0;
4407 in_p = 1, low = low1, high = high1;
4409 in_p = 1, low = low1, high = high0;
4412 else if (in0_p && ! in1_p)
4414 /* If they don't overlap, the result is the first range. If they are
4415 equal, the result is false. If the second range is a subset of the
4416 first, and the ranges begin at the same place, we go from just after
4417 the end of the second range to the end of the first. If the second
4418 range is not a subset of the first, or if it is a subset and both
4419 ranges end at the same place, the range starts at the start of the
4420 first range and ends just before the second range.
4421 Otherwise, we can't describe this as a single range. */
4423 in_p = 1, low = low0, high = high0;
4424 else if (lowequal && highequal)
4425 in_p = 0, low = high = 0;
4426 else if (subset && lowequal)
4428 low = range_successor (high1);
4433 /* We are in the weird situation where high0 > high1 but
4434 high1 has no successor. Punt. */
4438 else if (! subset || highequal)
4441 high = range_predecessor (low1);
4445 /* low0 < low1 but low1 has no predecessor. Punt. */
4453 else if (! in0_p && in1_p)
4455 /* If they don't overlap, the result is the second range. If the second
4456 is a subset of the first, the result is false. Otherwise,
4457 the range starts just after the first range and ends at the
4458 end of the second. */
4460 in_p = 1, low = low1, high = high1;
4461 else if (subset || highequal)
4462 in_p = 0, low = high = 0;
4465 low = range_successor (high0);
4470 /* high1 > high0 but high0 has no successor. Punt. */
4478 /* The case where we are excluding both ranges. Here the complex case
4479 is if they don't overlap. In that case, the only time we have a
4480 range is if they are adjacent. If the second is a subset of the
4481 first, the result is the first. Otherwise, the range to exclude
4482 starts at the beginning of the first range and ends at the end of the
4486 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4487 range_successor (high0),
4489 in_p = 0, low = low0, high = high1;
4492 /* Canonicalize - [min, x] into - [-, x]. */
4493 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4494 switch (TREE_CODE (TREE_TYPE (low0)))
4497 if (TYPE_PRECISION (TREE_TYPE (low0))
4498 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4502 if (tree_int_cst_equal (low0,
4503 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4507 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4508 && integer_zerop (low0))
4515 /* Canonicalize - [x, max] into - [x, -]. */
4516 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4517 switch (TREE_CODE (TREE_TYPE (high1)))
4520 if (TYPE_PRECISION (TREE_TYPE (high1))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4525 if (tree_int_cst_equal (high1,
4526 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4530 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4531 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4533 build_int_cst (TREE_TYPE (high1), 1),
4541 /* The ranges might be also adjacent between the maximum and
4542 minimum values of the given type. For
4543 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4544 return + [x + 1, y - 1]. */
4545 if (low0 == 0 && high1 == 0)
4547 low = range_successor (high0);
4548 high = range_predecessor (low1);
4549 if (low == 0 || high == 0)
4559 in_p = 0, low = low0, high = high0;
4561 in_p = 0, low = low0, high = high1;
4564 *pin_p = in_p, *plow = low, *phigh = high;
4569 /* Subroutine of fold, looking inside expressions of the form
4570 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4571 of the COND_EXPR. This function is being used also to optimize
4572 A op B ? C : A, by reversing the comparison first.
4574 Return a folded expression whose code is not a COND_EXPR
4575 anymore, or NULL_TREE if no folding opportunity is found. */
4578 fold_cond_expr_with_comparison (location_t loc, tree type,
4579 tree arg0, tree arg1, tree arg2)
4581 enum tree_code comp_code = TREE_CODE (arg0);
4582 tree arg00 = TREE_OPERAND (arg0, 0);
4583 tree arg01 = TREE_OPERAND (arg0, 1);
4584 tree arg1_type = TREE_TYPE (arg1);
4590 /* If we have A op 0 ? A : -A, consider applying the following
4593 A == 0? A : -A same as -A
4594 A != 0? A : -A same as A
4595 A >= 0? A : -A same as abs (A)
4596 A > 0? A : -A same as abs (A)
4597 A <= 0? A : -A same as -abs (A)
4598 A < 0? A : -A same as -abs (A)
4600 None of these transformations work for modes with signed
4601 zeros. If A is +/-0, the first two transformations will
4602 change the sign of the result (from +0 to -0, or vice
4603 versa). The last four will fix the sign of the result,
4604 even though the original expressions could be positive or
4605 negative, depending on the sign of A.
4607 Note that all these transformations are correct if A is
4608 NaN, since the two alternatives (A and -A) are also NaNs. */
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4610 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4611 ? real_zerop (arg01)
4612 : integer_zerop (arg01))
4613 && ((TREE_CODE (arg2) == NEGATE_EXPR
4614 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4615 /* In the case that A is of the form X-Y, '-A' (arg2) may
4616 have already been folded to Y-X, check for that. */
4617 || (TREE_CODE (arg1) == MINUS_EXPR
4618 && TREE_CODE (arg2) == MINUS_EXPR
4619 && operand_equal_p (TREE_OPERAND (arg1, 0),
4620 TREE_OPERAND (arg2, 1), 0)
4621 && operand_equal_p (TREE_OPERAND (arg1, 1),
4622 TREE_OPERAND (arg2, 0), 0))))
4627 tem = fold_convert_loc (loc, arg1_type, arg1);
4628 return pedantic_non_lvalue_loc (loc,
4629 fold_convert_loc (loc, type,
4630 negate_expr (tem)));
4633 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4636 if (flag_trapping_math)
4641 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4642 arg1 = fold_convert_loc (loc, signed_type_for
4643 (TREE_TYPE (arg1)), arg1);
4644 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4645 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4648 if (flag_trapping_math)
4652 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4653 arg1 = fold_convert_loc (loc, signed_type_for
4654 (TREE_TYPE (arg1)), arg1);
4655 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4656 return negate_expr (fold_convert_loc (loc, type, tem));
4658 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4662 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4663 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4664 both transformations are correct when A is NaN: A != 0
4665 is then true, and A == 0 is false. */
4667 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4668 && integer_zerop (arg01) && integer_zerop (arg2))
4670 if (comp_code == NE_EXPR)
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4672 else if (comp_code == EQ_EXPR)
4673 return build_zero_cst (type);
4676 /* Try some transformations of A op B ? A : B.
4678 A == B? A : B same as B
4679 A != B? A : B same as A
4680 A >= B? A : B same as max (A, B)
4681 A > B? A : B same as max (B, A)
4682 A <= B? A : B same as min (A, B)
4683 A < B? A : B same as min (B, A)
4685 As above, these transformations don't work in the presence
4686 of signed zeros. For example, if A and B are zeros of
4687 opposite sign, the first two transformations will change
4688 the sign of the result. In the last four, the original
4689 expressions give different results for (A=+0, B=-0) and
4690 (A=-0, B=+0), but the transformed expressions do not.
4692 The first two transformations are correct if either A or B
4693 is a NaN. In the first transformation, the condition will
4694 be false, and B will indeed be chosen. In the case of the
4695 second transformation, the condition A != B will be true,
4696 and A will be chosen.
4698 The conversions to max() and min() are not correct if B is
4699 a number and A is not. The conditions in the original
4700 expressions will be false, so all four give B. The min()
4701 and max() versions would give a NaN instead. */
4702 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4703 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4704 /* Avoid these transformations if the COND_EXPR may be used
4705 as an lvalue in the C++ front-end. PR c++/19199. */
4707 || VECTOR_TYPE_P (type)
4708 || (strcmp (lang_hooks.name, "GNU C++") != 0
4709 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4710 || ! maybe_lvalue_p (arg1)
4711 || ! maybe_lvalue_p (arg2)))
4713 tree comp_op0 = arg00;
4714 tree comp_op1 = arg01;
4715 tree comp_type = TREE_TYPE (comp_op0);
4717 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4718 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4728 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4730 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4735 /* In C++ a ?: expression can be an lvalue, so put the
4736 operand which will be used if they are equal first
4737 so that we can convert this back to the
4738 corresponding COND_EXPR. */
4739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4741 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4742 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4743 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4744 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4745 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4746 comp_op1, comp_op0);
4747 return pedantic_non_lvalue_loc (loc,
4748 fold_convert_loc (loc, type, tem));
4755 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4757 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4758 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4759 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4760 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4761 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4762 comp_op1, comp_op0);
4763 return pedantic_non_lvalue_loc (loc,
4764 fold_convert_loc (loc, type, tem));
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, arg2));
4773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4774 return pedantic_non_lvalue_loc (loc,
4775 fold_convert_loc (loc, type, arg1));
4778 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4783 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4784 we might still be able to simplify this. For example,
4785 if C1 is one less or one more than C2, this might have started
4786 out as a MIN or MAX and been transformed by this function.
4787 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4789 if (INTEGRAL_TYPE_P (type)
4790 && TREE_CODE (arg01) == INTEGER_CST
4791 && TREE_CODE (arg2) == INTEGER_CST)
4795 if (TREE_CODE (arg1) == INTEGER_CST)
4797 /* We can replace A with C1 in this case. */
4798 arg1 = fold_convert_loc (loc, type, arg01);
4799 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4802 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4803 MIN_EXPR, to preserve the signedness of the comparison. */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4806 && operand_equal_p (arg01,
4807 const_binop (PLUS_EXPR, arg2,
4808 build_int_cst (type, 1)),
4811 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4812 fold_convert_loc (loc, TREE_TYPE (arg00),
4814 return pedantic_non_lvalue_loc (loc,
4815 fold_convert_loc (loc, type, tem));
4820 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4822 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4824 && operand_equal_p (arg01,
4825 const_binop (MINUS_EXPR, arg2,
4826 build_int_cst (type, 1)),
4829 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4832 return pedantic_non_lvalue_loc (loc,
4833 fold_convert_loc (loc, type, tem));
4838 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4839 MAX_EXPR, to preserve the signedness of the comparison. */
4840 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4842 && operand_equal_p (arg01,
4843 const_binop (MINUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4847 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4850 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4855 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4856 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4858 && operand_equal_p (arg01,
4859 const_binop (PLUS_EXPR, arg2,
4860 build_int_cst (type, 1)),
4863 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4864 fold_convert_loc (loc, TREE_TYPE (arg00),
4866 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4880 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4881 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4882 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4886 /* EXP is some logical combination of boolean tests. See if we can
4887 merge it into some range test. Return the new tree if so. */
4890 fold_range_test (location_t loc, enum tree_code code, tree type,
4893 int or_op = (code == TRUTH_ORIF_EXPR
4894 || code == TRUTH_OR_EXPR);
4895 int in0_p, in1_p, in_p;
4896 tree low0, low1, low, high0, high1, high;
4897 bool strict_overflow_p = false;
4899 const char * const warnmsg = G_("assuming signed overflow does not occur "
4900 "when simplifying range test");
4902 if (!INTEGRAL_TYPE_P (type))
4905 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4906 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4908 /* If this is an OR operation, invert both sides; we will invert
4909 again at the end. */
4911 in0_p = ! in0_p, in1_p = ! in1_p;
4913 /* If both expressions are the same, if we can merge the ranges, and we
4914 can build the range test, return it or it inverted. If one of the
4915 ranges is always true or always false, consider it to be the same
4916 expression as the other. */
4917 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4918 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4920 && 0 != (tem = (build_range_check (loc, type,
4922 : rhs != 0 ? rhs : integer_zero_node,
4925 if (strict_overflow_p)
4926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4927 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4930 /* On machines where the branch cost is expensive, if this is a
4931 short-circuited branch and the underlying object on both sides
4932 is the same, make a non-short-circuit operation. */
4933 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4934 && lhs != 0 && rhs != 0
4935 && (code == TRUTH_ANDIF_EXPR
4936 || code == TRUTH_ORIF_EXPR)
4937 && operand_equal_p (lhs, rhs, 0))
4939 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4940 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4941 which cases we can't do this. */
4942 if (simple_operand_p (lhs))
4943 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4944 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4947 else if (!lang_hooks.decls.global_bindings_p ()
4948 && !CONTAINS_PLACEHOLDER_P (lhs))
4950 tree common = save_expr (lhs);
4952 if (0 != (lhs = build_range_check (loc, type, common,
4953 or_op ? ! in0_p : in0_p,
4955 && (0 != (rhs = build_range_check (loc, type, common,
4956 or_op ? ! in1_p : in1_p,
4959 if (strict_overflow_p)
4960 fold_overflow_warning (warnmsg,
4961 WARN_STRICT_OVERFLOW_COMPARISON);
4962 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4972 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4973 bit value. Arrange things so the extra bits will be set to zero if and
4974 only if C is signed-extended to its full width. If MASK is nonzero,
4975 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4978 unextend (tree c, int p, int unsignedp, tree mask)
4980 tree type = TREE_TYPE (c);
4981 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4984 if (p == modesize || unsignedp)
4987 /* We work by getting just the sign bit into the low-order bit, then
4988 into the high-order bit, then sign-extend. We then XOR that value
4990 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
4992 /* We must use a signed type in order to get an arithmetic right shift.
4993 However, we must also avoid introducing accidental overflows, so that
4994 a subsequent call to integer_zerop will work. Hence we must
4995 do the type conversion here. At this point, the constant is either
4996 zero or one, and the conversion to a signed type can never overflow.
4997 We could get an overflow if this conversion is done anywhere else. */
4998 if (TYPE_UNSIGNED (type))
4999 temp = fold_convert (signed_type_for (type), temp);
5001 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5002 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5004 temp = const_binop (BIT_AND_EXPR, temp,
5005 fold_convert (TREE_TYPE (c), mask));
5006 /* If necessary, convert the type back to match the type of C. */
5007 if (TYPE_UNSIGNED (type))
5008 temp = fold_convert (type, temp);
5010 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5013 /* For an expression that has the form
5017 we can drop one of the inner expressions and simplify to
5021 LOC is the location of the resulting expression. OP is the inner
5022 logical operation; the left-hand side in the examples above, while CMPOP
5023 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5024 removing a condition that guards another, as in
5025 (A != NULL && A->...) || A == NULL
5026 which we must not transform. If RHS_ONLY is true, only eliminate the
5027 right-most operand of the inner logical operation. */
5030 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5033 tree type = TREE_TYPE (cmpop);
5034 enum tree_code code = TREE_CODE (cmpop);
5035 enum tree_code truthop_code = TREE_CODE (op);
5036 tree lhs = TREE_OPERAND (op, 0);
5037 tree rhs = TREE_OPERAND (op, 1);
5038 tree orig_lhs = lhs, orig_rhs = rhs;
5039 enum tree_code rhs_code = TREE_CODE (rhs);
5040 enum tree_code lhs_code = TREE_CODE (lhs);
5041 enum tree_code inv_code;
5043 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5046 if (TREE_CODE_CLASS (code) != tcc_comparison)
5049 if (rhs_code == truthop_code)
5051 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5052 if (newrhs != NULL_TREE)
5055 rhs_code = TREE_CODE (rhs);
5058 if (lhs_code == truthop_code && !rhs_only)
5060 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5061 if (newlhs != NULL_TREE)
5064 lhs_code = TREE_CODE (lhs);
5068 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5069 if (inv_code == rhs_code
5070 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5071 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5073 if (!rhs_only && inv_code == lhs_code
5074 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5075 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5077 if (rhs != orig_rhs || lhs != orig_lhs)
5078 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5083 /* Find ways of folding logical expressions of LHS and RHS:
5084 Try to merge two comparisons to the same innermost item.
5085 Look for range tests like "ch >= '0' && ch <= '9'".
5086 Look for combinations of simple terms on machines with expensive branches
5087 and evaluate the RHS unconditionally.
5089 For example, if we have p->a == 2 && p->b == 4 and we can make an
5090 object large enough to span both A and B, we can do this with a comparison
5091 against the object ANDed with the a mask.
5093 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5094 operations to do this with one comparison.
5096 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5097 function and the one above.
5099 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5100 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5102 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5105 We return the simplified tree or 0 if no optimization is possible. */
5108 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5111 /* If this is the "or" of two comparisons, we can do something if
5112 the comparisons are NE_EXPR. If this is the "and", we can do something
5113 if the comparisons are EQ_EXPR. I.e.,
5114 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5116 WANTED_CODE is this operation code. For single bit fields, we can
5117 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5118 comparison for one-bit fields. */
5120 enum tree_code wanted_code;
5121 enum tree_code lcode, rcode;
5122 tree ll_arg, lr_arg, rl_arg, rr_arg;
5123 tree ll_inner, lr_inner, rl_inner, rr_inner;
5124 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5125 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5126 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5127 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5128 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5129 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5130 machine_mode lnmode, rnmode;
5131 tree ll_mask, lr_mask, rl_mask, rr_mask;
5132 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5133 tree l_const, r_const;
5134 tree lntype, rntype, result;
5135 HOST_WIDE_INT first_bit, end_bit;
5138 /* Start by getting the comparison codes. Fail if anything is volatile.
5139 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5140 it were surrounded with a NE_EXPR. */
5142 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5145 lcode = TREE_CODE (lhs);
5146 rcode = TREE_CODE (rhs);
5148 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5150 lhs = build2 (NE_EXPR, truth_type, lhs,
5151 build_int_cst (TREE_TYPE (lhs), 0));
5155 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5157 rhs = build2 (NE_EXPR, truth_type, rhs,
5158 build_int_cst (TREE_TYPE (rhs), 0));
5162 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5163 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5166 ll_arg = TREE_OPERAND (lhs, 0);
5167 lr_arg = TREE_OPERAND (lhs, 1);
5168 rl_arg = TREE_OPERAND (rhs, 0);
5169 rr_arg = TREE_OPERAND (rhs, 1);
5171 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5172 if (simple_operand_p (ll_arg)
5173 && simple_operand_p (lr_arg))
5175 if (operand_equal_p (ll_arg, rl_arg, 0)
5176 && operand_equal_p (lr_arg, rr_arg, 0))
5178 result = combine_comparisons (loc, code, lcode, rcode,
5179 truth_type, ll_arg, lr_arg);
5183 else if (operand_equal_p (ll_arg, rr_arg, 0)
5184 && operand_equal_p (lr_arg, rl_arg, 0))
5186 result = combine_comparisons (loc, code, lcode,
5187 swap_tree_comparison (rcode),
5188 truth_type, ll_arg, lr_arg);
5194 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5195 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5197 /* If the RHS can be evaluated unconditionally and its operands are
5198 simple, it wins to evaluate the RHS unconditionally on machines
5199 with expensive branches. In this case, this isn't a comparison
5200 that can be merged. */
5202 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5204 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5205 && simple_operand_p (rl_arg)
5206 && simple_operand_p (rr_arg))
5208 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5209 if (code == TRUTH_OR_EXPR
5210 && lcode == NE_EXPR && integer_zerop (lr_arg)
5211 && rcode == NE_EXPR && integer_zerop (rr_arg)
5212 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5213 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5214 return build2_loc (loc, NE_EXPR, truth_type,
5215 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5217 build_int_cst (TREE_TYPE (ll_arg), 0));
5219 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5220 if (code == TRUTH_AND_EXPR
5221 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5222 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5223 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5224 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5225 return build2_loc (loc, EQ_EXPR, truth_type,
5226 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5228 build_int_cst (TREE_TYPE (ll_arg), 0));
5231 /* See if the comparisons can be merged. Then get all the parameters for
5234 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5235 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5239 ll_inner = decode_field_reference (loc, ll_arg,
5240 &ll_bitsize, &ll_bitpos, &ll_mode,
5241 &ll_unsignedp, &volatilep, &ll_mask,
5243 lr_inner = decode_field_reference (loc, lr_arg,
5244 &lr_bitsize, &lr_bitpos, &lr_mode,
5245 &lr_unsignedp, &volatilep, &lr_mask,
5247 rl_inner = decode_field_reference (loc, rl_arg,
5248 &rl_bitsize, &rl_bitpos, &rl_mode,
5249 &rl_unsignedp, &volatilep, &rl_mask,
5251 rr_inner = decode_field_reference (loc, rr_arg,
5252 &rr_bitsize, &rr_bitpos, &rr_mode,
5253 &rr_unsignedp, &volatilep, &rr_mask,
5256 /* It must be true that the inner operation on the lhs of each
5257 comparison must be the same if we are to be able to do anything.
5258 Then see if we have constants. If not, the same must be true for
5260 if (volatilep || ll_inner == 0 || rl_inner == 0
5261 || ! operand_equal_p (ll_inner, rl_inner, 0))
5264 if (TREE_CODE (lr_arg) == INTEGER_CST
5265 && TREE_CODE (rr_arg) == INTEGER_CST)
5266 l_const = lr_arg, r_const = rr_arg;
5267 else if (lr_inner == 0 || rr_inner == 0
5268 || ! operand_equal_p (lr_inner, rr_inner, 0))
5271 l_const = r_const = 0;
5273 /* If either comparison code is not correct for our logical operation,
5274 fail. However, we can convert a one-bit comparison against zero into
5275 the opposite comparison against that bit being set in the field. */
5277 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5278 if (lcode != wanted_code)
5280 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5282 /* Make the left operand unsigned, since we are only interested
5283 in the value of one bit. Otherwise we are doing the wrong
5292 /* This is analogous to the code for l_const above. */
5293 if (rcode != wanted_code)
5295 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5304 /* See if we can find a mode that contains both fields being compared on
5305 the left. If we can't, fail. Otherwise, update all constants and masks
5306 to be relative to a field of that size. */
5307 first_bit = MIN (ll_bitpos, rl_bitpos);
5308 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5309 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5310 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5312 if (lnmode == VOIDmode)
5315 lnbitsize = GET_MODE_BITSIZE (lnmode);
5316 lnbitpos = first_bit & ~ (lnbitsize - 1);
5317 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5318 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5320 if (BYTES_BIG_ENDIAN)
5322 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5323 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5326 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5327 size_int (xll_bitpos));
5328 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5329 size_int (xrl_bitpos));
5333 l_const = fold_convert_loc (loc, lntype, l_const);
5334 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5335 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5336 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5337 fold_build1_loc (loc, BIT_NOT_EXPR,
5340 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5342 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5347 r_const = fold_convert_loc (loc, lntype, r_const);
5348 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5349 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5350 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5351 fold_build1_loc (loc, BIT_NOT_EXPR,
5354 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5356 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5360 /* If the right sides are not constant, do the same for it. Also,
5361 disallow this optimization if a size or signedness mismatch occurs
5362 between the left and right sides. */
5365 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5366 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5367 /* Make sure the two fields on the right
5368 correspond to the left without being swapped. */
5369 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5372 first_bit = MIN (lr_bitpos, rr_bitpos);
5373 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5374 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5375 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5377 if (rnmode == VOIDmode)
5380 rnbitsize = GET_MODE_BITSIZE (rnmode);
5381 rnbitpos = first_bit & ~ (rnbitsize - 1);
5382 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5383 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5385 if (BYTES_BIG_ENDIAN)
5387 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5388 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5391 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5393 size_int (xlr_bitpos));
5394 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5396 size_int (xrr_bitpos));
5398 /* Make a mask that corresponds to both fields being compared.
5399 Do this for both items being compared. If the operands are the
5400 same size and the bits being compared are in the same position
5401 then we can do this by masking both and comparing the masked
5403 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5404 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5405 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5407 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5408 ll_unsignedp || rl_unsignedp);
5409 if (! all_ones_mask_p (ll_mask, lnbitsize))
5410 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5412 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5413 lr_unsignedp || rr_unsignedp);
5414 if (! all_ones_mask_p (lr_mask, rnbitsize))
5415 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5417 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5420 /* There is still another way we can do something: If both pairs of
5421 fields being compared are adjacent, we may be able to make a wider
5422 field containing them both.
5424 Note that we still must mask the lhs/rhs expressions. Furthermore,
5425 the mask must be shifted to account for the shift done by
5426 make_bit_field_ref. */
5427 if ((ll_bitsize + ll_bitpos == rl_bitpos
5428 && lr_bitsize + lr_bitpos == rr_bitpos)
5429 || (ll_bitpos == rl_bitpos + rl_bitsize
5430 && lr_bitpos == rr_bitpos + rr_bitsize))
5434 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5435 ll_bitsize + rl_bitsize,
5436 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5437 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5438 lr_bitsize + rr_bitsize,
5439 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5441 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5442 size_int (MIN (xll_bitpos, xrl_bitpos)));
5443 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5444 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5446 /* Convert to the smaller type before masking out unwanted bits. */
5448 if (lntype != rntype)
5450 if (lnbitsize > rnbitsize)
5452 lhs = fold_convert_loc (loc, rntype, lhs);
5453 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5456 else if (lnbitsize < rnbitsize)
5458 rhs = fold_convert_loc (loc, lntype, rhs);
5459 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5464 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5465 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5467 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5468 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5470 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5476 /* Handle the case of comparisons with constants. If there is something in
5477 common between the masks, those bits of the constants must be the same.
5478 If not, the condition is always false. Test for this to avoid generating
5479 incorrect code below. */
5480 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5481 if (! integer_zerop (result)
5482 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5483 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5485 if (wanted_code == NE_EXPR)
5487 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5488 return constant_boolean_node (true, truth_type);
5492 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5493 return constant_boolean_node (false, truth_type);
5497 /* Construct the expression we will return. First get the component
5498 reference we will make. Unless the mask is all ones the width of
5499 that field, perform the mask operation. Then compare with the
5501 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5502 ll_unsignedp || rl_unsignedp);
5504 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5505 if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5508 return build2_loc (loc, wanted_code, truth_type, result,
5509 const_binop (BIT_IOR_EXPR, l_const, r_const));
5512 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5516 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5520 enum tree_code op_code;
5523 int consts_equal, consts_lt;
5526 STRIP_SIGN_NOPS (arg0);
5528 op_code = TREE_CODE (arg0);
5529 minmax_const = TREE_OPERAND (arg0, 1);
5530 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5531 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5532 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5533 inner = TREE_OPERAND (arg0, 0);
5535 /* If something does not permit us to optimize, return the original tree. */
5536 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5537 || TREE_CODE (comp_const) != INTEGER_CST
5538 || TREE_OVERFLOW (comp_const)
5539 || TREE_CODE (minmax_const) != INTEGER_CST
5540 || TREE_OVERFLOW (minmax_const))
5543 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5544 and GT_EXPR, doing the rest with recursive calls using logical
5548 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5551 = optimize_minmax_comparison (loc,
5552 invert_tree_comparison (code, false),
5555 return invert_truthvalue_loc (loc, tem);
5561 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5562 optimize_minmax_comparison
5563 (loc, EQ_EXPR, type, arg0, comp_const),
5564 optimize_minmax_comparison
5565 (loc, GT_EXPR, type, arg0, comp_const));
5568 if (op_code == MAX_EXPR && consts_equal)
5569 /* MAX (X, 0) == 0 -> X <= 0 */
5570 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5572 else if (op_code == MAX_EXPR && consts_lt)
5573 /* MAX (X, 0) == 5 -> X == 5 */
5574 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5576 else if (op_code == MAX_EXPR)
5577 /* MAX (X, 0) == -1 -> false */
5578 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5580 else if (consts_equal)
5581 /* MIN (X, 0) == 0 -> X >= 0 */
5582 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5585 /* MIN (X, 0) == 5 -> false */
5586 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5589 /* MIN (X, 0) == -1 -> X == -1 */
5590 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5593 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5594 /* MAX (X, 0) > 0 -> X > 0
5595 MAX (X, 0) > 5 -> X > 5 */
5596 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) > -1 -> true */
5600 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5602 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5603 /* MIN (X, 0) > 0 -> false
5604 MIN (X, 0) > 5 -> false */
5605 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5608 /* MIN (X, 0) > -1 -> X > -1 */
5609 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5616 /* T is an integer expression that is being multiplied, divided, or taken a
5617 modulus (CODE says which and what kind of divide or modulus) by a
5618 constant C. See if we can eliminate that operation by folding it with
5619 other operations already in T. WIDE_TYPE, if non-null, is a type that
5620 should be used for the computation if wider than our type.
5622 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5623 (X * 2) + (Y * 4). We must, however, be assured that either the original
5624 expression would not overflow or that overflow is undefined for the type
5625 in the language in question.
5627 If we return a non-null expression, it is an equivalent form of the
5628 original computation, but need not be in the original type.
5630 We set *STRICT_OVERFLOW_P to true if the return values depends on
5631 signed overflow being undefined. Otherwise we do not change
5632 *STRICT_OVERFLOW_P. */
5635 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5636 bool *strict_overflow_p)
5638 /* To avoid exponential search depth, refuse to allow recursion past
5639 three levels. Beyond that (1) it's highly unlikely that we'll find
5640 something interesting and (2) we've probably processed it before
5641 when we built the inner expression. */
5650 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5657 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 tree type = TREE_TYPE (t);
5661 enum tree_code tcode = TREE_CODE (t);
5662 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5663 > GET_MODE_SIZE (TYPE_MODE (type)))
5664 ? wide_type : type);
5666 int same_p = tcode == code;
5667 tree op0 = NULL_TREE, op1 = NULL_TREE;
5668 bool sub_strict_overflow_p;
5670 /* Don't deal with constants of zero here; they confuse the code below. */
5671 if (integer_zerop (c))
5674 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5675 op0 = TREE_OPERAND (t, 0);
5677 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5678 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5680 /* Note that we need not handle conditional operations here since fold
5681 already handles those cases. So just do arithmetic here. */
5685 /* For a constant, we can always simplify if we are a multiply
5686 or (for divide and modulus) if it is a multiple of our constant. */
5687 if (code == MULT_EXPR
5688 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5689 return const_binop (code, fold_convert (ctype, t),
5690 fold_convert (ctype, c));
5693 CASE_CONVERT: case NON_LVALUE_EXPR:
5694 /* If op0 is an expression ... */
5695 if ((COMPARISON_CLASS_P (op0)
5696 || UNARY_CLASS_P (op0)
5697 || BINARY_CLASS_P (op0)
5698 || VL_EXP_CLASS_P (op0)
5699 || EXPRESSION_CLASS_P (op0))
5700 /* ... and has wrapping overflow, and its type is smaller
5701 than ctype, then we cannot pass through as widening. */
5702 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5703 && (TYPE_PRECISION (ctype)
5704 > TYPE_PRECISION (TREE_TYPE (op0))))
5705 /* ... or this is a truncation (t is narrower than op0),
5706 then we cannot pass through this narrowing. */
5707 || (TYPE_PRECISION (type)
5708 < TYPE_PRECISION (TREE_TYPE (op0)))
5709 /* ... or signedness changes for division or modulus,
5710 then we cannot pass through this conversion. */
5711 || (code != MULT_EXPR
5712 && (TYPE_UNSIGNED (ctype)
5713 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5714 /* ... or has undefined overflow while the converted to
5715 type has not, we cannot do the operation in the inner type
5716 as that would introduce undefined overflow. */
5717 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5718 && !TYPE_OVERFLOW_UNDEFINED (type))))
5721 /* Pass the constant down and see if we can make a simplification. If
5722 we can, replace this expression with the inner simplification for
5723 possible later conversion to our or some other type. */
5724 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5725 && TREE_CODE (t2) == INTEGER_CST
5726 && !TREE_OVERFLOW (t2)
5727 && (0 != (t1 = extract_muldiv (op0, t2, code,
5729 ? ctype : NULL_TREE,
5730 strict_overflow_p))))
5735 /* If widening the type changes it from signed to unsigned, then we
5736 must avoid building ABS_EXPR itself as unsigned. */
5737 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5739 tree cstype = (*signed_type_for) (ctype);
5740 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5743 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5744 return fold_convert (ctype, t1);
5748 /* If the constant is negative, we cannot simplify this. */
5749 if (tree_int_cst_sgn (c) == -1)
5753 /* For division and modulus, type can't be unsigned, as e.g.
5754 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5755 For signed types, even with wrapping overflow, this is fine. */
5756 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5758 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5760 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5763 case MIN_EXPR: case MAX_EXPR:
5764 /* If widening the type changes the signedness, then we can't perform
5765 this optimization as that changes the result. */
5766 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5769 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5770 sub_strict_overflow_p = false;
5771 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5772 &sub_strict_overflow_p)) != 0
5773 && (t2 = extract_muldiv (op1, c, code, wide_type,
5774 &sub_strict_overflow_p)) != 0)
5776 if (tree_int_cst_sgn (c) < 0)
5777 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5778 if (sub_strict_overflow_p)
5779 *strict_overflow_p = true;
5780 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5781 fold_convert (ctype, t2));
5785 case LSHIFT_EXPR: case RSHIFT_EXPR:
5786 /* If the second operand is constant, this is a multiplication
5787 or floor division, by a power of two, so we can treat it that
5788 way unless the multiplier or divisor overflows. Signed
5789 left-shift overflow is implementation-defined rather than
5790 undefined in C90, so do not convert signed left shift into
5792 if (TREE_CODE (op1) == INTEGER_CST
5793 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5794 /* const_binop may not detect overflow correctly,
5795 so check for it explicitly here. */
5796 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5797 && 0 != (t1 = fold_convert (ctype,
5798 const_binop (LSHIFT_EXPR,
5801 && !TREE_OVERFLOW (t1))
5802 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5803 ? MULT_EXPR : FLOOR_DIV_EXPR,
5805 fold_convert (ctype, op0),
5807 c, code, wide_type, strict_overflow_p);
5810 case PLUS_EXPR: case MINUS_EXPR:
5811 /* See if we can eliminate the operation on both sides. If we can, we
5812 can return a new PLUS or MINUS. If we can't, the only remaining
5813 cases where we can do anything are if the second operand is a
5815 sub_strict_overflow_p = false;
5816 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5817 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5818 if (t1 != 0 && t2 != 0
5819 && (code == MULT_EXPR
5820 /* If not multiplication, we can only do this if both operands
5821 are divisible by c. */
5822 || (multiple_of_p (ctype, op0, c)
5823 && multiple_of_p (ctype, op1, c))))
5825 if (sub_strict_overflow_p)
5826 *strict_overflow_p = true;
5827 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5828 fold_convert (ctype, t2));
5831 /* If this was a subtraction, negate OP1 and set it to be an addition.
5832 This simplifies the logic below. */
5833 if (tcode == MINUS_EXPR)
5835 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5836 /* If OP1 was not easily negatable, the constant may be OP0. */
5837 if (TREE_CODE (op0) == INTEGER_CST)
5848 if (TREE_CODE (op1) != INTEGER_CST)
5851 /* If either OP1 or C are negative, this optimization is not safe for
5852 some of the division and remainder types while for others we need
5853 to change the code. */
5854 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5856 if (code == CEIL_DIV_EXPR)
5857 code = FLOOR_DIV_EXPR;
5858 else if (code == FLOOR_DIV_EXPR)
5859 code = CEIL_DIV_EXPR;
5860 else if (code != MULT_EXPR
5861 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5865 /* If it's a multiply or a division/modulus operation of a multiple
5866 of our constant, do the operation and verify it doesn't overflow. */
5867 if (code == MULT_EXPR
5868 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5870 op1 = const_binop (code, fold_convert (ctype, op1),
5871 fold_convert (ctype, c));
5872 /* We allow the constant to overflow with wrapping semantics. */
5874 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5880 /* If we have an unsigned type, we cannot widen the operation since it
5881 will change the result if the original computation overflowed. */
5882 if (TYPE_UNSIGNED (ctype) && ctype != type)
5885 /* If we were able to eliminate our operation from the first side,
5886 apply our operation to the second side and reform the PLUS. */
5887 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5888 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5890 /* The last case is if we are a multiply. In that case, we can
5891 apply the distributive law to commute the multiply and addition
5892 if the multiplication of the constants doesn't overflow
5893 and overflow is defined. With undefined overflow
5894 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5895 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5896 return fold_build2 (tcode, ctype,
5897 fold_build2 (code, ctype,
5898 fold_convert (ctype, op0),
5899 fold_convert (ctype, c)),
5905 /* We have a special case here if we are doing something like
5906 (C * 8) % 4 since we know that's zero. */
5907 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5908 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5909 /* If the multiplication can overflow we cannot optimize this. */
5910 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5911 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5912 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5914 *strict_overflow_p = true;
5915 return omit_one_operand (type, integer_zero_node, op0);
5918 /* ... fall through ... */
5920 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5921 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5922 /* If we can extract our operation from the LHS, do so and return a
5923 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5924 do something only if the second operand is a constant. */
5926 && (t1 = extract_muldiv (op0, c, code, wide_type,
5927 strict_overflow_p)) != 0)
5928 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5929 fold_convert (ctype, op1));
5930 else if (tcode == MULT_EXPR && code == MULT_EXPR
5931 && (t1 = extract_muldiv (op1, c, code, wide_type,
5932 strict_overflow_p)) != 0)
5933 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5934 fold_convert (ctype, t1));
5935 else if (TREE_CODE (op1) != INTEGER_CST)
5938 /* If these are the same operation types, we can associate them
5939 assuming no overflow. */
5942 bool overflow_p = false;
5943 bool overflow_mul_p;
5944 signop sign = TYPE_SIGN (ctype);
5945 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5946 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5948 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 wide_int_to_tree (ctype, mul));
5955 /* If these operations "cancel" each other, we have the main
5956 optimizations of this pass, which occur when either constant is a
5957 multiple of the other, in which case we replace this with either an
5958 operation or CODE or TCODE.
5960 If we have an unsigned type, we cannot do this since it will change
5961 the result if the original computation overflowed. */
5962 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5963 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5964 || (tcode == MULT_EXPR
5965 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5966 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5967 && code != MULT_EXPR)))
5969 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5971 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5972 *strict_overflow_p = true;
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5974 fold_convert (ctype,
5975 const_binop (TRUNC_DIV_EXPR,
5978 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5981 *strict_overflow_p = true;
5982 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype,
5984 const_binop (TRUNC_DIV_EXPR,
5997 /* Return a node which has the indicated constant VALUE (either 0 or
5998 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5999 and is of the indicated TYPE. */
6002 constant_boolean_node (bool value, tree type)
6004 if (type == integer_type_node)
6005 return value ? integer_one_node : integer_zero_node;
6006 else if (type == boolean_type_node)
6007 return value ? boolean_true_node : boolean_false_node;
6008 else if (TREE_CODE (type) == VECTOR_TYPE)
6009 return build_vector_from_val (type,
6010 build_int_cst (TREE_TYPE (type),
6013 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6017 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6018 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6019 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6020 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6021 COND is the first argument to CODE; otherwise (as in the example
6022 given here), it is the second argument. TYPE is the type of the
6023 original expression. Return NULL_TREE if no simplification is
6027 fold_binary_op_with_conditional_arg (location_t loc,
6028 enum tree_code code,
6029 tree type, tree op0, tree op1,
6030 tree cond, tree arg, int cond_first_p)
6032 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6033 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6034 tree test, true_value, false_value;
6035 tree lhs = NULL_TREE;
6036 tree rhs = NULL_TREE;
6037 enum tree_code cond_code = COND_EXPR;
6039 if (TREE_CODE (cond) == COND_EXPR
6040 || TREE_CODE (cond) == VEC_COND_EXPR)
6042 test = TREE_OPERAND (cond, 0);
6043 true_value = TREE_OPERAND (cond, 1);
6044 false_value = TREE_OPERAND (cond, 2);
6045 /* If this operand throws an expression, then it does not make
6046 sense to try to perform a logical or arithmetic operation
6048 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6050 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6055 tree testtype = TREE_TYPE (cond);
6057 true_value = constant_boolean_node (true, testtype);
6058 false_value = constant_boolean_node (false, testtype);
6061 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6062 cond_code = VEC_COND_EXPR;
6064 /* This transformation is only worthwhile if we don't have to wrap ARG
6065 in a SAVE_EXPR and the operation can be simplified without recursing
6066 on at least one of the branches once its pushed inside the COND_EXPR. */
6067 if (!TREE_CONSTANT (arg)
6068 && (TREE_SIDE_EFFECTS (arg)
6069 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6070 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6073 arg = fold_convert_loc (loc, arg_type, arg);
6076 true_value = fold_convert_loc (loc, cond_type, true_value);
6078 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6080 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6084 false_value = fold_convert_loc (loc, cond_type, false_value);
6086 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6088 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6091 /* Check that we have simplified at least one of the branches. */
6092 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6095 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6114 if (!real_zerop (addend))
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6125 /* In a vector or complex, we would need to check the sign of all zeros. */
6126 if (TREE_CODE (addend) != REAL_CST)
6129 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6130 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6133 /* The mode has signed zeros, and we have to honor their sign.
6134 In this situation, there is only one case we can return true for.
6135 X - 0 is the same as X unless rounding towards -infinity is
6137 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6140 /* Subroutine of fold() that checks comparisons of built-in math
6141 functions against real constants.
6143 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6144 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6145 is the type of the result and ARG0 and ARG1 are the operands of the
6146 comparison. ARG1 must be a TREE_REAL_CST.
6148 The function returns the constant folded tree if a simplification
6149 can be made, and NULL_TREE otherwise. */
6152 fold_mathfn_compare (location_t loc,
6153 enum built_in_function fcode, enum tree_code code,
6154 tree type, tree arg0, tree arg1)
6158 if (BUILTIN_SQRT_P (fcode))
6160 tree arg = CALL_EXPR_ARG (arg0, 0);
6161 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6163 c = TREE_REAL_CST (arg1);
6164 if (REAL_VALUE_NEGATIVE (c))
6166 /* sqrt(x) < y is always false, if y is negative. */
6167 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6168 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6170 /* sqrt(x) > y is always true, if y is negative and we
6171 don't care about NaNs, i.e. negative values of x. */
6172 if (code == NE_EXPR || !HONOR_NANS (mode))
6173 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6175 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6176 return fold_build2_loc (loc, GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg), dconst0));
6179 else if (code == GT_EXPR || code == GE_EXPR)
6183 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6184 real_convert (&c2, mode, &c2);
6186 if (REAL_VALUE_ISINF (c2))
6188 /* sqrt(x) > y is x == +Inf, when y is very large. */
6189 if (HONOR_INFINITIES (mode))
6190 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6191 build_real (TREE_TYPE (arg), c2));
6193 /* sqrt(x) > y is always false, when y is very large
6194 and we don't care about infinities. */
6195 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6198 /* sqrt(x) > c is the same as x > c*c. */
6199 return fold_build2_loc (loc, code, type, arg,
6200 build_real (TREE_TYPE (arg), c2));
6202 else if (code == LT_EXPR || code == LE_EXPR)
6206 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6207 real_convert (&c2, mode, &c2);
6209 if (REAL_VALUE_ISINF (c2))
6211 /* sqrt(x) < y is always true, when y is a very large
6212 value and we don't care about NaNs or Infinities. */
6213 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6214 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6216 /* sqrt(x) < y is x != +Inf when y is very large and we
6217 don't care about NaNs. */
6218 if (! HONOR_NANS (mode))
6219 return fold_build2_loc (loc, NE_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) < y is x >= 0 when y is very large and we
6223 don't care about Infinities. */
6224 if (! HONOR_INFINITIES (mode))
6225 return fold_build2_loc (loc, GE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), dconst0));
6228 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6229 arg = save_expr (arg);
6230 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6231 fold_build2_loc (loc, GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6234 fold_build2_loc (loc, NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6239 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, code, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6245 arg = save_expr (arg);
6246 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6247 fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg),
6250 fold_build2_loc (loc, code, type, arg,
6251 build_real (TREE_TYPE (arg),
6259 /* Subroutine of fold() that optimizes comparisons against Infinities,
6260 either +Inf or -Inf.
6262 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6263 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6264 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6266 The function returns the constant folded tree if a simplification
6267 can be made, and NULL_TREE otherwise. */
6270 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6271 tree arg0, tree arg1)
6274 REAL_VALUE_TYPE max;
6278 mode = TYPE_MODE (TREE_TYPE (arg0));
6280 /* For negative infinity swap the sense of the comparison. */
6281 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6283 code = swap_tree_comparison (code);
6288 /* x > +Inf is always false, if with ignore sNANs. */
6289 if (HONOR_SNANS (mode))
6291 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6294 /* x <= +Inf is always true, if we don't case about NaNs. */
6295 if (! HONOR_NANS (mode))
6296 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6298 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6299 arg0 = save_expr (arg0);
6300 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6304 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6305 real_maxval (&max, neg, mode);
6306 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6307 arg0, build_real (TREE_TYPE (arg0), max));
6310 /* x < +Inf is always equal to x <= DBL_MAX. */
6311 real_maxval (&max, neg, mode);
6312 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6313 arg0, build_real (TREE_TYPE (arg0), max));
6316 /* x != +Inf is always equal to !(x > DBL_MAX). */
6317 real_maxval (&max, neg, mode);
6318 if (! HONOR_NANS (mode))
6319 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6320 arg0, build_real (TREE_TYPE (arg0), max));
6322 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6324 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6333 /* Subroutine of fold() that optimizes comparisons of a division by
6334 a nonzero integer constant against an integer constant, i.e.
6337 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6338 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6339 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6341 The function returns the constant folded tree if a simplification
6342 can be made, and NULL_TREE otherwise. */
6345 fold_div_compare (location_t loc,
6346 enum tree_code code, tree type, tree arg0, tree arg1)
6348 tree prod, tmp, hi, lo;
6349 tree arg00 = TREE_OPERAND (arg0, 0);
6350 tree arg01 = TREE_OPERAND (arg0, 1);
6351 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6352 bool neg_overflow = false;
6355 /* We have to do this the hard way to detect unsigned overflow.
6356 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6357 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6358 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6359 neg_overflow = false;
6361 if (sign == UNSIGNED)
6363 tmp = int_const_binop (MINUS_EXPR, arg01,
6364 build_int_cst (TREE_TYPE (arg01), 1));
6367 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6368 val = wi::add (prod, tmp, sign, &overflow);
6369 hi = force_fit_type (TREE_TYPE (arg00), val,
6370 -1, overflow | TREE_OVERFLOW (prod));
6372 else if (tree_int_cst_sgn (arg01) >= 0)
6374 tmp = int_const_binop (MINUS_EXPR, arg01,
6375 build_int_cst (TREE_TYPE (arg01), 1));
6376 switch (tree_int_cst_sgn (arg1))
6379 neg_overflow = true;
6380 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6385 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6390 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6400 /* A negative divisor reverses the relational operators. */
6401 code = swap_tree_comparison (code);
6403 tmp = int_const_binop (PLUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6408 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6413 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6418 neg_overflow = true;
6419 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6431 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6432 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6433 if (TREE_OVERFLOW (hi))
6434 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6435 if (TREE_OVERFLOW (lo))
6436 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6437 return build_range_check (loc, type, arg00, 1, lo, hi);
6440 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6441 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6442 if (TREE_OVERFLOW (hi))
6443 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6444 if (TREE_OVERFLOW (lo))
6445 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6446 return build_range_check (loc, type, arg00, 0, lo, hi);
6449 if (TREE_OVERFLOW (lo))
6451 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6452 return omit_one_operand_loc (loc, type, tmp, arg00);
6454 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6457 if (TREE_OVERFLOW (hi))
6459 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6460 return omit_one_operand_loc (loc, type, tmp, arg00);
6462 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6465 if (TREE_OVERFLOW (hi))
6467 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6468 return omit_one_operand_loc (loc, type, tmp, arg00);
6470 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6473 if (TREE_OVERFLOW (lo))
6475 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6476 return omit_one_operand_loc (loc, type, tmp, arg00);
6478 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6488 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6489 equality/inequality test, then return a simplified form of the test
6490 using a sign testing. Otherwise return NULL. TYPE is the desired
6494 fold_single_bit_test_into_sign_test (location_t loc,
6495 enum tree_code code, tree arg0, tree arg1,
6498 /* If this is testing a single bit, we can optimize the test. */
6499 if ((code == NE_EXPR || code == EQ_EXPR)
6500 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6501 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6503 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6504 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6505 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6507 if (arg00 != NULL_TREE
6508 /* This is only a win if casting to a signed type is cheap,
6509 i.e. when arg00's type is not a partial mode. */
6510 && TYPE_PRECISION (TREE_TYPE (arg00))
6511 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6513 tree stype = signed_type_for (TREE_TYPE (arg00));
6514 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6516 fold_convert_loc (loc, stype, arg00),
6517 build_int_cst (stype, 0));
6524 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6525 equality/inequality test, then return a simplified form of
6526 the test using shifts and logical operations. Otherwise return
6527 NULL. TYPE is the desired result type. */
6530 fold_single_bit_test (location_t loc, enum tree_code code,
6531 tree arg0, tree arg1, tree result_type)
6533 /* If this is testing a single bit, we can optimize the test. */
6534 if ((code == NE_EXPR || code == EQ_EXPR)
6535 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6536 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6538 tree inner = TREE_OPERAND (arg0, 0);
6539 tree type = TREE_TYPE (arg0);
6540 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6541 machine_mode operand_mode = TYPE_MODE (type);
6543 tree signed_type, unsigned_type, intermediate_type;
6546 /* First, see if we can fold the single bit test into a sign-bit
6548 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6553 /* Otherwise we have (A & C) != 0 where C is a single bit,
6554 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6555 Similarly for (A & C) == 0. */
6557 /* If INNER is a right shift of a constant and it plus BITNUM does
6558 not overflow, adjust BITNUM and INNER. */
6559 if (TREE_CODE (inner) == RSHIFT_EXPR
6560 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6561 && bitnum < TYPE_PRECISION (type)
6562 && wi::ltu_p (TREE_OPERAND (inner, 1),
6563 TYPE_PRECISION (type) - bitnum))
6565 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6566 inner = TREE_OPERAND (inner, 0);
6569 /* If we are going to be able to omit the AND below, we must do our
6570 operations as unsigned. If we must use the AND, we have a choice.
6571 Normally unsigned is faster, but for some machines signed is. */
6572 #ifdef LOAD_EXTEND_OP
6573 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6574 && !flag_syntax_only) ? 0 : 1;
6579 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6580 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6581 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6582 inner = fold_convert_loc (loc, intermediate_type, inner);
6585 inner = build2 (RSHIFT_EXPR, intermediate_type,
6586 inner, size_int (bitnum));
6588 one = build_int_cst (intermediate_type, 1);
6590 if (code == EQ_EXPR)
6591 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6593 /* Put the AND last so it can combine with more things. */
6594 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6596 /* Make sure to return the proper type. */
6597 inner = fold_convert_loc (loc, result_type, inner);
6604 /* Check whether we are allowed to reorder operands arg0 and arg1,
6605 such that the evaluation of arg1 occurs before arg0. */
6608 reorder_operands_p (const_tree arg0, const_tree arg1)
6610 if (! flag_evaluation_order)
6612 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6614 return ! TREE_SIDE_EFFECTS (arg0)
6615 && ! TREE_SIDE_EFFECTS (arg1);
6618 /* Test whether it is preferable two swap two operands, ARG0 and
6619 ARG1, for example because ARG0 is an integer constant and ARG1
6620 isn't. If REORDER is true, only recommend swapping if we can
6621 evaluate the operands in reverse order. */
6624 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6626 if (CONSTANT_CLASS_P (arg1))
6628 if (CONSTANT_CLASS_P (arg0))
6631 STRIP_SIGN_NOPS (arg0);
6632 STRIP_SIGN_NOPS (arg1);
6634 if (TREE_CONSTANT (arg1))
6636 if (TREE_CONSTANT (arg0))
6639 if (reorder && flag_evaluation_order
6640 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6643 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6644 for commutative and comparison operators. Ensuring a canonical
6645 form allows the optimizers to find additional redundancies without
6646 having to explicitly check for both orderings. */
6647 if (TREE_CODE (arg0) == SSA_NAME
6648 && TREE_CODE (arg1) == SSA_NAME
6649 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6652 /* Put SSA_NAMEs last. */
6653 if (TREE_CODE (arg1) == SSA_NAME)
6655 if (TREE_CODE (arg0) == SSA_NAME)
6658 /* Put variables last. */
6667 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6668 ARG0 is extended to a wider type. */
6671 fold_widened_comparison (location_t loc, enum tree_code code,
6672 tree type, tree arg0, tree arg1)
6674 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6676 tree shorter_type, outer_type;
6680 if (arg0_unw == arg0)
6682 shorter_type = TREE_TYPE (arg0_unw);
6684 #ifdef HAVE_canonicalize_funcptr_for_compare
6685 /* Disable this optimization if we're casting a function pointer
6686 type on targets that require function pointer canonicalization. */
6687 if (HAVE_canonicalize_funcptr_for_compare
6688 && TREE_CODE (shorter_type) == POINTER_TYPE
6689 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6693 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6696 arg1_unw = get_unwidened (arg1, NULL_TREE);
6698 /* If possible, express the comparison in the shorter mode. */
6699 if ((code == EQ_EXPR || code == NE_EXPR
6700 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6701 && (TREE_TYPE (arg1_unw) == shorter_type
6702 || ((TYPE_PRECISION (shorter_type)
6703 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6704 && (TYPE_UNSIGNED (shorter_type)
6705 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6706 || (TREE_CODE (arg1_unw) == INTEGER_CST
6707 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6708 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6709 && int_fits_type_p (arg1_unw, shorter_type))))
6710 return fold_build2_loc (loc, code, type, arg0_unw,
6711 fold_convert_loc (loc, shorter_type, arg1_unw));
6713 if (TREE_CODE (arg1_unw) != INTEGER_CST
6714 || TREE_CODE (shorter_type) != INTEGER_TYPE
6715 || !int_fits_type_p (arg1_unw, shorter_type))
6718 /* If we are comparing with the integer that does not fit into the range
6719 of the shorter type, the result is known. */
6720 outer_type = TREE_TYPE (arg1_unw);
6721 min = lower_bound_in_type (outer_type, shorter_type);
6722 max = upper_bound_in_type (outer_type, shorter_type);
6724 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6726 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6733 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6738 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6744 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6746 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6751 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6753 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6762 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6763 ARG0 just the signedness is changed. */
6766 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6767 tree arg0, tree arg1)
6770 tree inner_type, outer_type;
6772 if (!CONVERT_EXPR_P (arg0))
6775 outer_type = TREE_TYPE (arg0);
6776 arg0_inner = TREE_OPERAND (arg0, 0);
6777 inner_type = TREE_TYPE (arg0_inner);
6779 #ifdef HAVE_canonicalize_funcptr_for_compare
6780 /* Disable this optimization if we're casting a function pointer
6781 type on targets that require function pointer canonicalization. */
6782 if (HAVE_canonicalize_funcptr_for_compare
6783 && TREE_CODE (inner_type) == POINTER_TYPE
6784 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6788 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6791 if (TREE_CODE (arg1) != INTEGER_CST
6792 && !(CONVERT_EXPR_P (arg1)
6793 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6796 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6801 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6804 if (TREE_CODE (arg1) == INTEGER_CST)
6805 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6806 TREE_OVERFLOW (arg1));
6808 arg1 = fold_convert_loc (loc, inner_type, arg1);
6810 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6814 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6815 means A >= Y && A != MAX, but in this case we know that
6816 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6819 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6821 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6823 if (TREE_CODE (bound) == LT_EXPR)
6824 a = TREE_OPERAND (bound, 0);
6825 else if (TREE_CODE (bound) == GT_EXPR)
6826 a = TREE_OPERAND (bound, 1);
6830 typea = TREE_TYPE (a);
6831 if (!INTEGRAL_TYPE_P (typea)
6832 && !POINTER_TYPE_P (typea))
6835 if (TREE_CODE (ineq) == LT_EXPR)
6837 a1 = TREE_OPERAND (ineq, 1);
6838 y = TREE_OPERAND (ineq, 0);
6840 else if (TREE_CODE (ineq) == GT_EXPR)
6842 a1 = TREE_OPERAND (ineq, 0);
6843 y = TREE_OPERAND (ineq, 1);
6848 if (TREE_TYPE (a1) != typea)
6851 if (POINTER_TYPE_P (typea))
6853 /* Convert the pointer types into integer before taking the difference. */
6854 tree ta = fold_convert_loc (loc, ssizetype, a);
6855 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6856 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6859 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6861 if (!diff || !integer_onep (diff))
6864 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6867 /* Fold a sum or difference of at least one multiplication.
6868 Returns the folded tree or NULL if no simplification could be made. */
6871 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6872 tree arg0, tree arg1)
6874 tree arg00, arg01, arg10, arg11;
6875 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6877 /* (A * C) +- (B * C) -> (A+-B) * C.
6878 (A * C) +- A -> A * (C+-1).
6879 We are most concerned about the case where C is a constant,
6880 but other combinations show up during loop reduction. Since
6881 it is not difficult, try all four possibilities. */
6883 if (TREE_CODE (arg0) == MULT_EXPR)
6885 arg00 = TREE_OPERAND (arg0, 0);
6886 arg01 = TREE_OPERAND (arg0, 1);
6888 else if (TREE_CODE (arg0) == INTEGER_CST)
6890 arg00 = build_one_cst (type);
6895 /* We cannot generate constant 1 for fract. */
6896 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6899 arg01 = build_one_cst (type);
6901 if (TREE_CODE (arg1) == MULT_EXPR)
6903 arg10 = TREE_OPERAND (arg1, 0);
6904 arg11 = TREE_OPERAND (arg1, 1);
6906 else if (TREE_CODE (arg1) == INTEGER_CST)
6908 arg10 = build_one_cst (type);
6909 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6910 the purpose of this canonicalization. */
6911 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6912 && negate_expr_p (arg1)
6913 && code == PLUS_EXPR)
6915 arg11 = negate_expr (arg1);
6923 /* We cannot generate constant 1 for fract. */
6924 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6927 arg11 = build_one_cst (type);
6931 if (operand_equal_p (arg01, arg11, 0))
6932 same = arg01, alt0 = arg00, alt1 = arg10;
6933 else if (operand_equal_p (arg00, arg10, 0))
6934 same = arg00, alt0 = arg01, alt1 = arg11;
6935 else if (operand_equal_p (arg00, arg11, 0))
6936 same = arg00, alt0 = arg01, alt1 = arg10;
6937 else if (operand_equal_p (arg01, arg10, 0))
6938 same = arg01, alt0 = arg00, alt1 = arg11;
6940 /* No identical multiplicands; see if we can find a common
6941 power-of-two factor in non-power-of-two multiplies. This
6942 can help in multi-dimensional array access. */
6943 else if (tree_fits_shwi_p (arg01)
6944 && tree_fits_shwi_p (arg11))
6946 HOST_WIDE_INT int01, int11, tmp;
6949 int01 = tree_to_shwi (arg01);
6950 int11 = tree_to_shwi (arg11);
6952 /* Move min of absolute values to int11. */
6953 if (absu_hwi (int01) < absu_hwi (int11))
6955 tmp = int01, int01 = int11, int11 = tmp;
6956 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6963 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6964 /* The remainder should not be a constant, otherwise we
6965 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6966 increased the number of multiplications necessary. */
6967 && TREE_CODE (arg10) != INTEGER_CST)
6969 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6970 build_int_cst (TREE_TYPE (arg00),
6975 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6980 return fold_build2_loc (loc, MULT_EXPR, type,
6981 fold_build2_loc (loc, code, type,
6982 fold_convert_loc (loc, type, alt0),
6983 fold_convert_loc (loc, type, alt1)),
6984 fold_convert_loc (loc, type, same));
6989 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6990 specified by EXPR into the buffer PTR of length LEN bytes.
6991 Return the number of bytes placed in the buffer, or zero
6995 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6997 tree type = TREE_TYPE (expr);
6998 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6999 int byte, offset, word, words;
7000 unsigned char value;
7002 if ((off == -1 && total_bytes > len)
7003 || off >= total_bytes)
7007 words = total_bytes / UNITS_PER_WORD;
7009 for (byte = 0; byte < total_bytes; byte++)
7011 int bitpos = byte * BITS_PER_UNIT;
7012 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7014 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7016 if (total_bytes > UNITS_PER_WORD)
7018 word = byte / UNITS_PER_WORD;
7019 if (WORDS_BIG_ENDIAN)
7020 word = (words - 1) - word;
7021 offset = word * UNITS_PER_WORD;
7022 if (BYTES_BIG_ENDIAN)
7023 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7025 offset += byte % UNITS_PER_WORD;
7028 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7030 && offset - off < len)
7031 ptr[offset - off] = value;
7033 return MIN (len, total_bytes - off);
7037 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7038 specified by EXPR into the buffer PTR of length LEN bytes.
7039 Return the number of bytes placed in the buffer, or zero
7043 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7045 tree type = TREE_TYPE (expr);
7046 machine_mode mode = TYPE_MODE (type);
7047 int total_bytes = GET_MODE_SIZE (mode);
7048 FIXED_VALUE_TYPE value;
7049 tree i_value, i_type;
7051 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7054 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7056 if (NULL_TREE == i_type
7057 || TYPE_PRECISION (i_type) != total_bytes)
7060 value = TREE_FIXED_CST (expr);
7061 i_value = double_int_to_tree (i_type, value.data);
7063 return native_encode_int (i_value, ptr, len, off);
7067 /* Subroutine of native_encode_expr. Encode the REAL_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7073 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7075 tree type = TREE_TYPE (expr);
7076 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7077 int byte, offset, word, words, bitpos;
7078 unsigned char value;
7080 /* There are always 32 bits in each long, no matter the size of
7081 the hosts long. We handle floating point representations with
7085 if ((off == -1 && total_bytes > len)
7086 || off >= total_bytes)
7090 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7092 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7094 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7095 bitpos += BITS_PER_UNIT)
7097 byte = (bitpos / BITS_PER_UNIT) & 3;
7098 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7100 if (UNITS_PER_WORD < 4)
7102 word = byte / UNITS_PER_WORD;
7103 if (WORDS_BIG_ENDIAN)
7104 word = (words - 1) - word;
7105 offset = word * UNITS_PER_WORD;
7106 if (BYTES_BIG_ENDIAN)
7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7109 offset += byte % UNITS_PER_WORD;
7112 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7113 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7115 && offset - off < len)
7116 ptr[offset - off] = value;
7118 return MIN (len, total_bytes - off);
7121 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7127 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7132 part = TREE_REALPART (expr);
7133 rsize = native_encode_expr (part, ptr, len, off);
7137 part = TREE_IMAGPART (expr);
7139 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7140 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7144 return rsize + isize;
7148 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7154 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7161 count = VECTOR_CST_NELTS (expr);
7162 itype = TREE_TYPE (TREE_TYPE (expr));
7163 size = GET_MODE_SIZE (TYPE_MODE (itype));
7164 for (i = 0; i < count; i++)
7171 elem = VECTOR_CST_ELT (expr, i);
7172 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7173 if ((off == -1 && res != size)
7186 /* Subroutine of native_encode_expr. Encode the STRING_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7192 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7194 tree type = TREE_TYPE (expr);
7195 HOST_WIDE_INT total_bytes;
7197 if (TREE_CODE (type) != ARRAY_TYPE
7198 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7199 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7200 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7202 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7203 if ((off == -1 && total_bytes > len)
7204 || off >= total_bytes)
7208 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7211 if (off < TREE_STRING_LENGTH (expr))
7213 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7214 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7216 memset (ptr + written, 0,
7217 MIN (total_bytes - written, len - written));
7220 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7221 return MIN (total_bytes - off, len);
7225 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7226 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7227 buffer PTR of length LEN bytes. If OFF is not -1 then start
7228 the encoding at byte offset OFF and encode at most LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero upon failure. */
7232 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7234 switch (TREE_CODE (expr))
7237 return native_encode_int (expr, ptr, len, off);
7240 return native_encode_real (expr, ptr, len, off);
7243 return native_encode_fixed (expr, ptr, len, off);
7246 return native_encode_complex (expr, ptr, len, off);
7249 return native_encode_vector (expr, ptr, len, off);
7252 return native_encode_string (expr, ptr, len, off);
7260 /* Subroutine of native_interpret_expr. Interpret the contents of
7261 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7262 If the buffer cannot be interpreted, return NULL_TREE. */
7265 native_interpret_int (tree type, const unsigned char *ptr, int len)
7267 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7269 if (total_bytes > len
7270 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7273 wide_int result = wi::from_buffer (ptr, total_bytes);
7275 return wide_int_to_tree (type, result);
7279 /* Subroutine of native_interpret_expr. Interpret the contents of
7280 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7281 If the buffer cannot be interpreted, return NULL_TREE. */
7284 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7286 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7288 FIXED_VALUE_TYPE fixed_value;
7290 if (total_bytes > len
7291 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7294 result = double_int::from_buffer (ptr, total_bytes);
7295 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7297 return build_fixed (type, fixed_value);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7306 native_interpret_real (tree type, const unsigned char *ptr, int len)
7308 machine_mode mode = TYPE_MODE (type);
7309 int total_bytes = GET_MODE_SIZE (mode);
7310 int byte, offset, word, words, bitpos;
7311 unsigned char value;
7312 /* There are always 32 bits in each long, no matter the size of
7313 the hosts long. We handle floating point representations with
7318 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7319 if (total_bytes > len || total_bytes > 24)
7321 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7323 memset (tmp, 0, sizeof (tmp));
7324 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7325 bitpos += BITS_PER_UNIT)
7327 byte = (bitpos / BITS_PER_UNIT) & 3;
7328 if (UNITS_PER_WORD < 4)
7330 word = byte / UNITS_PER_WORD;
7331 if (WORDS_BIG_ENDIAN)
7332 word = (words - 1) - word;
7333 offset = word * UNITS_PER_WORD;
7334 if (BYTES_BIG_ENDIAN)
7335 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7337 offset += byte % UNITS_PER_WORD;
7340 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7341 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7343 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7346 real_from_target (&r, tmp, mode);
7347 return build_real (type, r);
7351 /* Subroutine of native_interpret_expr. Interpret the contents of
7352 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7353 If the buffer cannot be interpreted, return NULL_TREE. */
7356 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7358 tree etype, rpart, ipart;
7361 etype = TREE_TYPE (type);
7362 size = GET_MODE_SIZE (TYPE_MODE (etype));
7365 rpart = native_interpret_expr (etype, ptr, size);
7368 ipart = native_interpret_expr (etype, ptr+size, size);
7371 return build_complex (type, rpart, ipart);
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7380 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7386 etype = TREE_TYPE (type);
7387 size = GET_MODE_SIZE (TYPE_MODE (etype));
7388 count = TYPE_VECTOR_SUBPARTS (type);
7389 if (size * count > len)
7392 elements = XALLOCAVEC (tree, count);
7393 for (i = count - 1; i >= 0; i--)
7395 elem = native_interpret_expr (etype, ptr+(i*size), size);
7400 return build_vector (type, elements);
7404 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a constant of type TYPE. For
7406 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7407 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7408 return NULL_TREE. */
7411 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7413 switch (TREE_CODE (type))
7419 case REFERENCE_TYPE:
7420 return native_interpret_int (type, ptr, len);
7423 return native_interpret_real (type, ptr, len);
7425 case FIXED_POINT_TYPE:
7426 return native_interpret_fixed (type, ptr, len);
7429 return native_interpret_complex (type, ptr, len);
7432 return native_interpret_vector (type, ptr, len);
7439 /* Returns true if we can interpret the contents of a native encoding
7443 can_native_interpret_type_p (tree type)
7445 switch (TREE_CODE (type))
7451 case REFERENCE_TYPE:
7452 case FIXED_POINT_TYPE:
7462 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7463 TYPE at compile-time. If we're unable to perform the conversion
7464 return NULL_TREE. */
7467 fold_view_convert_expr (tree type, tree expr)
7469 /* We support up to 512-bit values (for V8DFmode). */
7470 unsigned char buffer[64];
7473 /* Check that the host and target are sane. */
7474 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7477 len = native_encode_expr (expr, buffer, sizeof (buffer));
7481 return native_interpret_expr (type, buffer, len);
7484 /* Build an expression for the address of T. Folds away INDIRECT_REF
7485 to avoid confusing the gimplify process. */
7488 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7490 /* The size of the object is not relevant when talking about its address. */
7491 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7492 t = TREE_OPERAND (t, 0);
7494 if (TREE_CODE (t) == INDIRECT_REF)
7496 t = TREE_OPERAND (t, 0);
7498 if (TREE_TYPE (t) != ptrtype)
7499 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7501 else if (TREE_CODE (t) == MEM_REF
7502 && integer_zerop (TREE_OPERAND (t, 1)))
7503 return TREE_OPERAND (t, 0);
7504 else if (TREE_CODE (t) == MEM_REF
7505 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7506 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7507 TREE_OPERAND (t, 0),
7508 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7509 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7511 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7513 if (TREE_TYPE (t) != ptrtype)
7514 t = fold_convert_loc (loc, ptrtype, t);
7517 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7522 /* Build an expression for the address of T. */
7525 build_fold_addr_expr_loc (location_t loc, tree t)
7527 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7529 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7532 static bool vec_cst_ctor_to_array (tree, tree *);
7534 /* Fold a unary expression of code CODE and type TYPE with operand
7535 OP0. Return the folded expression if folding is successful.
7536 Otherwise, return NULL_TREE. */
7539 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7543 enum tree_code_class kind = TREE_CODE_CLASS (code);
7545 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7546 && TREE_CODE_LENGTH (code) == 1);
7548 tem = generic_simplify (loc, code, type, op0);
7555 if (CONVERT_EXPR_CODE_P (code)
7556 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7558 /* Don't use STRIP_NOPS, because signedness of argument type
7560 STRIP_SIGN_NOPS (arg0);
7564 /* Strip any conversions that don't change the mode. This
7565 is safe for every expression, except for a comparison
7566 expression because its signedness is derived from its
7569 Note that this is done as an internal manipulation within
7570 the constant folder, in order to find the simplest
7571 representation of the arguments so that their form can be
7572 studied. In any cases, the appropriate type conversions
7573 should be put back in the tree that will get out of the
7579 if (TREE_CODE_CLASS (code) == tcc_unary)
7581 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7582 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7583 fold_build1_loc (loc, code, type,
7584 fold_convert_loc (loc, TREE_TYPE (op0),
7585 TREE_OPERAND (arg0, 1))));
7586 else if (TREE_CODE (arg0) == COND_EXPR)
7588 tree arg01 = TREE_OPERAND (arg0, 1);
7589 tree arg02 = TREE_OPERAND (arg0, 2);
7590 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7591 arg01 = fold_build1_loc (loc, code, type,
7592 fold_convert_loc (loc,
7593 TREE_TYPE (op0), arg01));
7594 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7595 arg02 = fold_build1_loc (loc, code, type,
7596 fold_convert_loc (loc,
7597 TREE_TYPE (op0), arg02));
7598 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7601 /* If this was a conversion, and all we did was to move into
7602 inside the COND_EXPR, bring it back out. But leave it if
7603 it is a conversion from integer to integer and the
7604 result precision is no wider than a word since such a
7605 conversion is cheap and may be optimized away by combine,
7606 while it couldn't if it were outside the COND_EXPR. Then return
7607 so we don't get into an infinite recursion loop taking the
7608 conversion out and then back in. */
7610 if ((CONVERT_EXPR_CODE_P (code)
7611 || code == NON_LVALUE_EXPR)
7612 && TREE_CODE (tem) == COND_EXPR
7613 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7614 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7615 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7616 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7617 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7618 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7619 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7621 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7622 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7623 || flag_syntax_only))
7624 tem = build1_loc (loc, code, type,
7626 TREE_TYPE (TREE_OPERAND
7627 (TREE_OPERAND (tem, 1), 0)),
7628 TREE_OPERAND (tem, 0),
7629 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7630 TREE_OPERAND (TREE_OPERAND (tem, 2),
7638 case NON_LVALUE_EXPR:
7639 if (!maybe_lvalue_p (op0))
7640 return fold_convert_loc (loc, type, op0);
7645 case FIX_TRUNC_EXPR:
7646 if (COMPARISON_CLASS_P (op0))
7648 /* If we have (type) (a CMP b) and type is an integral type, return
7649 new expression involving the new type. Canonicalize
7650 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7652 Do not fold the result as that would not simplify further, also
7653 folding again results in recursions. */
7654 if (TREE_CODE (type) == BOOLEAN_TYPE)
7655 return build2_loc (loc, TREE_CODE (op0), type,
7656 TREE_OPERAND (op0, 0),
7657 TREE_OPERAND (op0, 1));
7658 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7659 && TREE_CODE (type) != VECTOR_TYPE)
7660 return build3_loc (loc, COND_EXPR, type, op0,
7661 constant_boolean_node (true, type),
7662 constant_boolean_node (false, type));
7665 /* Handle (T *)&A.B.C for A being of type T and B and C
7666 living at offset zero. This occurs frequently in
7667 C++ upcasting and then accessing the base. */
7668 if (TREE_CODE (op0) == ADDR_EXPR
7669 && POINTER_TYPE_P (type)
7670 && handled_component_p (TREE_OPERAND (op0, 0)))
7672 HOST_WIDE_INT bitsize, bitpos;
7675 int unsignedp, volatilep;
7676 tree base = TREE_OPERAND (op0, 0);
7677 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7678 &mode, &unsignedp, &volatilep, false);
7679 /* If the reference was to a (constant) zero offset, we can use
7680 the address of the base if it has the same base type
7681 as the result type and the pointer type is unqualified. */
7682 if (! offset && bitpos == 0
7683 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7684 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7685 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7686 return fold_convert_loc (loc, type,
7687 build_fold_addr_expr_loc (loc, base));
7690 if (TREE_CODE (op0) == MODIFY_EXPR
7691 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7692 /* Detect assigning a bitfield. */
7693 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7695 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7697 /* Don't leave an assignment inside a conversion
7698 unless assigning a bitfield. */
7699 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7700 /* First do the assignment, then return converted constant. */
7701 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7702 TREE_NO_WARNING (tem) = 1;
7703 TREE_USED (tem) = 1;
7707 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7708 constants (if x has signed type, the sign bit cannot be set
7709 in c). This folds extension into the BIT_AND_EXPR.
7710 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7711 very likely don't have maximal range for their precision and this
7712 transformation effectively doesn't preserve non-maximal ranges. */
7713 if (TREE_CODE (type) == INTEGER_TYPE
7714 && TREE_CODE (op0) == BIT_AND_EXPR
7715 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7717 tree and_expr = op0;
7718 tree and0 = TREE_OPERAND (and_expr, 0);
7719 tree and1 = TREE_OPERAND (and_expr, 1);
7722 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7723 || (TYPE_PRECISION (type)
7724 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7726 else if (TYPE_PRECISION (TREE_TYPE (and1))
7727 <= HOST_BITS_PER_WIDE_INT
7728 && tree_fits_uhwi_p (and1))
7730 unsigned HOST_WIDE_INT cst;
7732 cst = tree_to_uhwi (and1);
7733 cst &= HOST_WIDE_INT_M1U
7734 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7735 change = (cst == 0);
7736 #ifdef LOAD_EXTEND_OP
7738 && !flag_syntax_only
7739 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7742 tree uns = unsigned_type_for (TREE_TYPE (and0));
7743 and0 = fold_convert_loc (loc, uns, and0);
7744 and1 = fold_convert_loc (loc, uns, and1);
7750 tem = force_fit_type (type, wi::to_widest (and1), 0,
7751 TREE_OVERFLOW (and1));
7752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7753 fold_convert_loc (loc, type, and0), tem);
7757 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7758 when one of the new casts will fold away. Conservatively we assume
7759 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7760 if (POINTER_TYPE_P (type)
7761 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7762 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7763 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7764 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7765 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7767 tree arg00 = TREE_OPERAND (arg0, 0);
7768 tree arg01 = TREE_OPERAND (arg0, 1);
7770 return fold_build_pointer_plus_loc
7771 (loc, fold_convert_loc (loc, type, arg00), arg01);
7774 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7775 of the same precision, and X is an integer type not narrower than
7776 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7777 if (INTEGRAL_TYPE_P (type)
7778 && TREE_CODE (op0) == BIT_NOT_EXPR
7779 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7780 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7781 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7783 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7784 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7785 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7786 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7787 fold_convert_loc (loc, type, tem));
7790 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7791 type of X and Y (integer types only). */
7792 if (INTEGRAL_TYPE_P (type)
7793 && TREE_CODE (op0) == MULT_EXPR
7794 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7795 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7797 /* Be careful not to introduce new overflows. */
7799 if (TYPE_OVERFLOW_WRAPS (type))
7802 mult_type = unsigned_type_for (type);
7804 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7806 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7807 fold_convert_loc (loc, mult_type,
7808 TREE_OPERAND (op0, 0)),
7809 fold_convert_loc (loc, mult_type,
7810 TREE_OPERAND (op0, 1)));
7811 return fold_convert_loc (loc, type, tem);
7815 tem = fold_convert_const (code, type, arg0);
7816 return tem ? tem : NULL_TREE;
7818 case ADDR_SPACE_CONVERT_EXPR:
7819 if (integer_zerop (arg0))
7820 return fold_convert_const (code, type, arg0);
7823 case FIXED_CONVERT_EXPR:
7824 tem = fold_convert_const (code, type, arg0);
7825 return tem ? tem : NULL_TREE;
7827 case VIEW_CONVERT_EXPR:
7828 if (TREE_CODE (op0) == MEM_REF)
7829 return fold_build2_loc (loc, MEM_REF, type,
7830 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7832 return fold_view_convert_expr (type, op0);
7835 tem = fold_negate_expr (loc, arg0);
7837 return fold_convert_loc (loc, type, tem);
7841 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7842 return fold_abs_const (arg0, type);
7843 /* Convert fabs((double)float) into (double)fabsf(float). */
7844 else if (TREE_CODE (arg0) == NOP_EXPR
7845 && TREE_CODE (type) == REAL_TYPE)
7847 tree targ0 = strip_float_extensions (arg0);
7849 return fold_convert_loc (loc, type,
7850 fold_build1_loc (loc, ABS_EXPR,
7854 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7855 else if (TREE_CODE (arg0) == ABS_EXPR)
7858 /* Strip sign ops from argument. */
7859 if (TREE_CODE (type) == REAL_TYPE)
7861 tem = fold_strip_sign_ops (arg0);
7863 return fold_build1_loc (loc, ABS_EXPR, type,
7864 fold_convert_loc (loc, type, tem));
7869 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7870 return fold_convert_loc (loc, type, arg0);
7871 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7873 tree itype = TREE_TYPE (type);
7874 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7875 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7876 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7877 negate_expr (ipart));
7879 if (TREE_CODE (arg0) == COMPLEX_CST)
7881 tree itype = TREE_TYPE (type);
7882 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7883 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7884 return build_complex (type, rpart, negate_expr (ipart));
7886 if (TREE_CODE (arg0) == CONJ_EXPR)
7887 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7891 if (TREE_CODE (arg0) == INTEGER_CST)
7892 return fold_not_const (arg0, type);
7893 /* Convert ~ (-A) to A - 1. */
7894 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7895 return fold_build2_loc (loc, MINUS_EXPR, type,
7896 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7897 build_int_cst (type, 1));
7898 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7899 else if (INTEGRAL_TYPE_P (type)
7900 && ((TREE_CODE (arg0) == MINUS_EXPR
7901 && integer_onep (TREE_OPERAND (arg0, 1)))
7902 || (TREE_CODE (arg0) == PLUS_EXPR
7903 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7904 return fold_build1_loc (loc, NEGATE_EXPR, type,
7905 fold_convert_loc (loc, type,
7906 TREE_OPERAND (arg0, 0)));
7907 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7908 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7909 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7910 fold_convert_loc (loc, type,
7911 TREE_OPERAND (arg0, 0)))))
7912 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7913 fold_convert_loc (loc, type,
7914 TREE_OPERAND (arg0, 1)));
7915 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7916 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type,
7918 TREE_OPERAND (arg0, 1)))))
7919 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7920 fold_convert_loc (loc, type,
7921 TREE_OPERAND (arg0, 0)), tem);
7922 /* Perform BIT_NOT_EXPR on each element individually. */
7923 else if (TREE_CODE (arg0) == VECTOR_CST)
7927 unsigned count = VECTOR_CST_NELTS (arg0), i;
7929 elements = XALLOCAVEC (tree, count);
7930 for (i = 0; i < count; i++)
7932 elem = VECTOR_CST_ELT (arg0, i);
7933 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7934 if (elem == NULL_TREE)
7939 return build_vector (type, elements);
7944 case TRUTH_NOT_EXPR:
7945 /* Note that the operand of this must be an int
7946 and its values must be 0 or 1.
7947 ("true" is a fixed value perhaps depending on the language,
7948 but we don't handle values other than 1 correctly yet.) */
7949 tem = fold_truth_not_expr (loc, arg0);
7952 return fold_convert_loc (loc, type, tem);
7955 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7956 return fold_convert_loc (loc, type, arg0);
7957 if (TREE_CODE (arg0) == COMPLEX_CST)
7958 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7959 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7961 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7962 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7963 fold_build1_loc (loc, REALPART_EXPR, itype,
7964 TREE_OPERAND (arg0, 0)),
7965 fold_build1_loc (loc, REALPART_EXPR, itype,
7966 TREE_OPERAND (arg0, 1)));
7967 return fold_convert_loc (loc, type, tem);
7969 if (TREE_CODE (arg0) == CONJ_EXPR)
7971 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7972 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7973 TREE_OPERAND (arg0, 0));
7974 return fold_convert_loc (loc, type, tem);
7976 if (TREE_CODE (arg0) == CALL_EXPR)
7978 tree fn = get_callee_fndecl (arg0);
7979 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7980 switch (DECL_FUNCTION_CODE (fn))
7982 CASE_FLT_FN (BUILT_IN_CEXPI):
7983 fn = mathfn_built_in (type, BUILT_IN_COS);
7985 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7995 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7996 return build_zero_cst (type);
7997 if (TREE_CODE (arg0) == COMPLEX_CST)
7998 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
7999 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8001 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8002 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8003 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8004 TREE_OPERAND (arg0, 0)),
8005 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8006 TREE_OPERAND (arg0, 1)));
8007 return fold_convert_loc (loc, type, tem);
8009 if (TREE_CODE (arg0) == CONJ_EXPR)
8011 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8012 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8013 return fold_convert_loc (loc, type, negate_expr (tem));
8015 if (TREE_CODE (arg0) == CALL_EXPR)
8017 tree fn = get_callee_fndecl (arg0);
8018 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8019 switch (DECL_FUNCTION_CODE (fn))
8021 CASE_FLT_FN (BUILT_IN_CEXPI):
8022 fn = mathfn_built_in (type, BUILT_IN_SIN);
8024 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8034 /* Fold *&X to X if X is an lvalue. */
8035 if (TREE_CODE (op0) == ADDR_EXPR)
8037 tree op00 = TREE_OPERAND (op0, 0);
8038 if ((TREE_CODE (op00) == VAR_DECL
8039 || TREE_CODE (op00) == PARM_DECL
8040 || TREE_CODE (op00) == RESULT_DECL)
8041 && !TREE_READONLY (op00))
8046 case VEC_UNPACK_LO_EXPR:
8047 case VEC_UNPACK_HI_EXPR:
8048 case VEC_UNPACK_FLOAT_LO_EXPR:
8049 case VEC_UNPACK_FLOAT_HI_EXPR:
8051 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8053 enum tree_code subcode;
8055 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8056 if (TREE_CODE (arg0) != VECTOR_CST)
8059 elts = XALLOCAVEC (tree, nelts * 2);
8060 if (!vec_cst_ctor_to_array (arg0, elts))
8063 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8064 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8067 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8070 subcode = FLOAT_EXPR;
8072 for (i = 0; i < nelts; i++)
8074 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8075 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8079 return build_vector (type, elts);
8082 case REDUC_MIN_EXPR:
8083 case REDUC_MAX_EXPR:
8084 case REDUC_PLUS_EXPR:
8086 unsigned int nelts, i;
8088 enum tree_code subcode;
8090 if (TREE_CODE (op0) != VECTOR_CST)
8092 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8094 elts = XALLOCAVEC (tree, nelts);
8095 if (!vec_cst_ctor_to_array (op0, elts))
8100 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8101 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8102 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8103 default: gcc_unreachable ();
8106 for (i = 1; i < nelts; i++)
8108 elts[0] = const_binop (subcode, elts[0], elts[i]);
8109 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8118 } /* switch (code) */
8122 /* If the operation was a conversion do _not_ mark a resulting constant
8123 with TREE_OVERFLOW if the original constant was not. These conversions
8124 have implementation defined behavior and retaining the TREE_OVERFLOW
8125 flag here would confuse later passes such as VRP. */
8127 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8128 tree type, tree op0)
8130 tree res = fold_unary_loc (loc, code, type, op0);
8132 && TREE_CODE (res) == INTEGER_CST
8133 && TREE_CODE (op0) == INTEGER_CST
8134 && CONVERT_EXPR_CODE_P (code))
8135 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8140 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8141 operands OP0 and OP1. LOC is the location of the resulting expression.
8142 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8143 Return the folded expression if folding is successful. Otherwise,
8144 return NULL_TREE. */
8146 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8147 tree arg0, tree arg1, tree op0, tree op1)
8151 /* We only do these simplifications if we are optimizing. */
8155 /* Check for things like (A || B) && (A || C). We can convert this
8156 to A || (B && C). Note that either operator can be any of the four
8157 truth and/or operations and the transformation will still be
8158 valid. Also note that we only care about order for the
8159 ANDIF and ORIF operators. If B contains side effects, this
8160 might change the truth-value of A. */
8161 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8162 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8163 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8164 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8165 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8166 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8168 tree a00 = TREE_OPERAND (arg0, 0);
8169 tree a01 = TREE_OPERAND (arg0, 1);
8170 tree a10 = TREE_OPERAND (arg1, 0);
8171 tree a11 = TREE_OPERAND (arg1, 1);
8172 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8173 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8174 && (code == TRUTH_AND_EXPR
8175 || code == TRUTH_OR_EXPR));
8177 if (operand_equal_p (a00, a10, 0))
8178 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8179 fold_build2_loc (loc, code, type, a01, a11));
8180 else if (commutative && operand_equal_p (a00, a11, 0))
8181 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8182 fold_build2_loc (loc, code, type, a01, a10));
8183 else if (commutative && operand_equal_p (a01, a10, 0))
8184 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8185 fold_build2_loc (loc, code, type, a00, a11));
8187 /* This case if tricky because we must either have commutative
8188 operators or else A10 must not have side-effects. */
8190 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8191 && operand_equal_p (a01, a11, 0))
8192 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8193 fold_build2_loc (loc, code, type, a00, a10),
8197 /* See if we can build a range comparison. */
8198 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8201 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8202 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8204 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8206 return fold_build2_loc (loc, code, type, tem, arg1);
8209 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8210 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8212 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8214 return fold_build2_loc (loc, code, type, arg0, tem);
8217 /* Check for the possibility of merging component references. If our
8218 lhs is another similar operation, try to merge its rhs with our
8219 rhs. Then try to merge our lhs and rhs. */
8220 if (TREE_CODE (arg0) == code
8221 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8222 TREE_OPERAND (arg0, 1), arg1)))
8223 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8225 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8228 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8229 && (code == TRUTH_AND_EXPR
8230 || code == TRUTH_ANDIF_EXPR
8231 || code == TRUTH_OR_EXPR
8232 || code == TRUTH_ORIF_EXPR))
8234 enum tree_code ncode, icode;
8236 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8237 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8238 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8240 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8241 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8242 We don't want to pack more than two leafs to a non-IF AND/OR
8244 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8245 equal to IF-CODE, then we don't want to add right-hand operand.
8246 If the inner right-hand side of left-hand operand has
8247 side-effects, or isn't simple, then we can't add to it,
8248 as otherwise we might destroy if-sequence. */
8249 if (TREE_CODE (arg0) == icode
8250 && simple_operand_p_2 (arg1)
8251 /* Needed for sequence points to handle trappings, and
8253 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8255 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8257 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8260 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8261 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8262 else if (TREE_CODE (arg1) == icode
8263 && simple_operand_p_2 (arg0)
8264 /* Needed for sequence points to handle trappings, and
8266 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8268 tem = fold_build2_loc (loc, ncode, type,
8269 arg0, TREE_OPERAND (arg1, 0));
8270 return fold_build2_loc (loc, icode, type, tem,
8271 TREE_OPERAND (arg1, 1));
8273 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8275 For sequence point consistancy, we need to check for trapping,
8276 and side-effects. */
8277 else if (code == icode && simple_operand_p_2 (arg0)
8278 && simple_operand_p_2 (arg1))
8279 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8285 /* Fold a binary expression of code CODE and type TYPE with operands
8286 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8287 Return the folded expression if folding is successful. Otherwise,
8288 return NULL_TREE. */
8291 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8293 enum tree_code compl_code;
8295 if (code == MIN_EXPR)
8296 compl_code = MAX_EXPR;
8297 else if (code == MAX_EXPR)
8298 compl_code = MIN_EXPR;
8302 /* MIN (MAX (a, b), b) == b. */
8303 if (TREE_CODE (op0) == compl_code
8304 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8305 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8307 /* MIN (MAX (b, a), b) == b. */
8308 if (TREE_CODE (op0) == compl_code
8309 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8310 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8311 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8313 /* MIN (a, MAX (a, b)) == a. */
8314 if (TREE_CODE (op1) == compl_code
8315 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8316 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8317 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8319 /* MIN (a, MAX (b, a)) == a. */
8320 if (TREE_CODE (op1) == compl_code
8321 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8322 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8323 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8328 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8329 by changing CODE to reduce the magnitude of constants involved in
8330 ARG0 of the comparison.
8331 Returns a canonicalized comparison tree if a simplification was
8332 possible, otherwise returns NULL_TREE.
8333 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8334 valid if signed overflow is undefined. */
8337 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8338 tree arg0, tree arg1,
8339 bool *strict_overflow_p)
8341 enum tree_code code0 = TREE_CODE (arg0);
8342 tree t, cst0 = NULL_TREE;
8346 /* Match A +- CST code arg1 and CST code arg1. We can change the
8347 first form only if overflow is undefined. */
8348 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8349 /* In principle pointers also have undefined overflow behavior,
8350 but that causes problems elsewhere. */
8351 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8352 && (code0 == MINUS_EXPR
8353 || code0 == PLUS_EXPR)
8354 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8355 || code0 == INTEGER_CST))
8358 /* Identify the constant in arg0 and its sign. */
8359 if (code0 == INTEGER_CST)
8362 cst0 = TREE_OPERAND (arg0, 1);
8363 sgn0 = tree_int_cst_sgn (cst0);
8365 /* Overflowed constants and zero will cause problems. */
8366 if (integer_zerop (cst0)
8367 || TREE_OVERFLOW (cst0))
8370 /* See if we can reduce the magnitude of the constant in
8371 arg0 by changing the comparison code. */
8372 if (code0 == INTEGER_CST)
8374 /* CST <= arg1 -> CST-1 < arg1. */
8375 if (code == LE_EXPR && sgn0 == 1)
8377 /* -CST < arg1 -> -CST-1 <= arg1. */
8378 else if (code == LT_EXPR && sgn0 == -1)
8380 /* CST > arg1 -> CST-1 >= arg1. */
8381 else if (code == GT_EXPR && sgn0 == 1)
8383 /* -CST >= arg1 -> -CST-1 > arg1. */
8384 else if (code == GE_EXPR && sgn0 == -1)
8388 /* arg1 code' CST' might be more canonical. */
8393 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8395 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8397 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8398 else if (code == GT_EXPR
8399 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8401 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8402 else if (code == LE_EXPR
8403 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8405 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8406 else if (code == GE_EXPR
8407 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8411 *strict_overflow_p = true;
8414 /* Now build the constant reduced in magnitude. But not if that
8415 would produce one outside of its types range. */
8416 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8418 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8419 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8421 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8422 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8423 /* We cannot swap the comparison here as that would cause us to
8424 endlessly recurse. */
8427 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8428 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8429 if (code0 != INTEGER_CST)
8430 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8431 t = fold_convert (TREE_TYPE (arg1), t);
8433 /* If swapping might yield to a more canonical form, do so. */
8435 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8437 return fold_build2_loc (loc, code, type, t, arg1);
8440 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8441 overflow further. Try to decrease the magnitude of constants involved
8442 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8443 and put sole constants at the second argument position.
8444 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8447 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8448 tree arg0, tree arg1)
8451 bool strict_overflow_p;
8452 const char * const warnmsg = G_("assuming signed overflow does not occur "
8453 "when reducing constant in comparison");
8455 /* Try canonicalization by simplifying arg0. */
8456 strict_overflow_p = false;
8457 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8458 &strict_overflow_p);
8461 if (strict_overflow_p)
8462 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8466 /* Try canonicalization by simplifying arg1 using the swapped
8468 code = swap_tree_comparison (code);
8469 strict_overflow_p = false;
8470 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8471 &strict_overflow_p);
8472 if (t && strict_overflow_p)
8473 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8477 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8478 space. This is used to avoid issuing overflow warnings for
8479 expressions like &p->x which can not wrap. */
8482 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8484 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8491 int precision = TYPE_PRECISION (TREE_TYPE (base));
8492 if (offset == NULL_TREE)
8493 wi_offset = wi::zero (precision);
8494 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8500 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8501 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8505 if (!wi::fits_uhwi_p (total))
8508 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8512 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8514 if (TREE_CODE (base) == ADDR_EXPR)
8516 HOST_WIDE_INT base_size;
8518 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8519 if (base_size > 0 && size < base_size)
8523 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8526 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8527 kind INTEGER_CST. This makes sure to properly sign-extend the
8530 static HOST_WIDE_INT
8531 size_low_cst (const_tree t)
8533 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8534 int prec = TYPE_PRECISION (TREE_TYPE (t));
8535 if (prec < HOST_BITS_PER_WIDE_INT)
8536 return sext_hwi (w, prec);
8540 /* Subroutine of fold_binary. This routine performs all of the
8541 transformations that are common to the equality/inequality
8542 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8543 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8544 fold_binary should call fold_binary. Fold a comparison with
8545 tree code CODE and type TYPE with operands OP0 and OP1. Return
8546 the folded comparison or NULL_TREE. */
8549 fold_comparison (location_t loc, enum tree_code code, tree type,
8552 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8553 tree arg0, arg1, tem;
8558 STRIP_SIGN_NOPS (arg0);
8559 STRIP_SIGN_NOPS (arg1);
8561 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8562 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8563 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8565 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8566 && TREE_CODE (arg1) == INTEGER_CST
8567 && !TREE_OVERFLOW (arg1))
8569 const enum tree_code
8570 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8571 tree const1 = TREE_OPERAND (arg0, 1);
8572 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8573 tree variable = TREE_OPERAND (arg0, 0);
8574 tree new_const = int_const_binop (reverse_op, const2, const1);
8576 /* If the constant operation overflowed this can be
8577 simplified as a comparison against INT_MAX/INT_MIN. */
8578 if (TREE_OVERFLOW (new_const)
8579 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8581 int const1_sgn = tree_int_cst_sgn (const1);
8582 enum tree_code code2 = code;
8584 /* Get the sign of the constant on the lhs if the
8585 operation were VARIABLE + CONST1. */
8586 if (TREE_CODE (arg0) == MINUS_EXPR)
8587 const1_sgn = -const1_sgn;
8589 /* The sign of the constant determines if we overflowed
8590 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8591 Canonicalize to the INT_MIN overflow by swapping the comparison
8593 if (const1_sgn == -1)
8594 code2 = swap_tree_comparison (code);
8596 /* We now can look at the canonicalized case
8597 VARIABLE + 1 CODE2 INT_MIN
8598 and decide on the result. */
8605 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8611 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8620 fold_overflow_warning ("assuming signed overflow does not occur "
8621 "when changing X +- C1 cmp C2 to "
8623 WARN_STRICT_OVERFLOW_COMPARISON);
8624 return fold_build2_loc (loc, code, type, variable, new_const);
8628 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8629 if (TREE_CODE (arg0) == MINUS_EXPR
8631 && integer_zerop (arg1))
8633 /* ??? The transformation is valid for the other operators if overflow
8634 is undefined for the type, but performing it here badly interacts
8635 with the transformation in fold_cond_expr_with_comparison which
8636 attempts to synthetize ABS_EXPR. */
8638 fold_overflow_warning ("assuming signed overflow does not occur "
8639 "when changing X - Y cmp 0 to X cmp Y",
8640 WARN_STRICT_OVERFLOW_COMPARISON);
8641 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8642 TREE_OPERAND (arg0, 1));
8645 /* For comparisons of pointers we can decompose it to a compile time
8646 comparison of the base objects and the offsets into the object.
8647 This requires at least one operand being an ADDR_EXPR or a
8648 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8649 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8650 && (TREE_CODE (arg0) == ADDR_EXPR
8651 || TREE_CODE (arg1) == ADDR_EXPR
8652 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8653 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8655 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8656 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8658 int volatilep, unsignedp;
8659 bool indirect_base0 = false, indirect_base1 = false;
8661 /* Get base and offset for the access. Strip ADDR_EXPR for
8662 get_inner_reference, but put it back by stripping INDIRECT_REF
8663 off the base object if possible. indirect_baseN will be true
8664 if baseN is not an address but refers to the object itself. */
8666 if (TREE_CODE (arg0) == ADDR_EXPR)
8668 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8669 &bitsize, &bitpos0, &offset0, &mode,
8670 &unsignedp, &volatilep, false);
8671 if (TREE_CODE (base0) == INDIRECT_REF)
8672 base0 = TREE_OPERAND (base0, 0);
8674 indirect_base0 = true;
8676 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8678 base0 = TREE_OPERAND (arg0, 0);
8679 STRIP_SIGN_NOPS (base0);
8680 if (TREE_CODE (base0) == ADDR_EXPR)
8682 base0 = TREE_OPERAND (base0, 0);
8683 indirect_base0 = true;
8685 offset0 = TREE_OPERAND (arg0, 1);
8686 if (tree_fits_shwi_p (offset0))
8688 HOST_WIDE_INT off = size_low_cst (offset0);
8689 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8691 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8693 bitpos0 = off * BITS_PER_UNIT;
8694 offset0 = NULL_TREE;
8700 if (TREE_CODE (arg1) == ADDR_EXPR)
8702 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8703 &bitsize, &bitpos1, &offset1, &mode,
8704 &unsignedp, &volatilep, false);
8705 if (TREE_CODE (base1) == INDIRECT_REF)
8706 base1 = TREE_OPERAND (base1, 0);
8708 indirect_base1 = true;
8710 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8712 base1 = TREE_OPERAND (arg1, 0);
8713 STRIP_SIGN_NOPS (base1);
8714 if (TREE_CODE (base1) == ADDR_EXPR)
8716 base1 = TREE_OPERAND (base1, 0);
8717 indirect_base1 = true;
8719 offset1 = TREE_OPERAND (arg1, 1);
8720 if (tree_fits_shwi_p (offset1))
8722 HOST_WIDE_INT off = size_low_cst (offset1);
8723 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8725 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8727 bitpos1 = off * BITS_PER_UNIT;
8728 offset1 = NULL_TREE;
8733 /* A local variable can never be pointed to by
8734 the default SSA name of an incoming parameter. */
8735 if ((TREE_CODE (arg0) == ADDR_EXPR
8737 && TREE_CODE (base0) == VAR_DECL
8738 && auto_var_in_fn_p (base0, current_function_decl)
8740 && TREE_CODE (base1) == SSA_NAME
8741 && SSA_NAME_IS_DEFAULT_DEF (base1)
8742 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8743 || (TREE_CODE (arg1) == ADDR_EXPR
8745 && TREE_CODE (base1) == VAR_DECL
8746 && auto_var_in_fn_p (base1, current_function_decl)
8748 && TREE_CODE (base0) == SSA_NAME
8749 && SSA_NAME_IS_DEFAULT_DEF (base0)
8750 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8752 if (code == NE_EXPR)
8753 return constant_boolean_node (1, type);
8754 else if (code == EQ_EXPR)
8755 return constant_boolean_node (0, type);
8757 /* If we have equivalent bases we might be able to simplify. */
8758 else if (indirect_base0 == indirect_base1
8759 && operand_equal_p (base0, base1, 0))
8761 /* We can fold this expression to a constant if the non-constant
8762 offset parts are equal. */
8763 if ((offset0 == offset1
8764 || (offset0 && offset1
8765 && operand_equal_p (offset0, offset1, 0)))
8768 || (indirect_base0 && DECL_P (base0))
8769 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8773 && bitpos0 != bitpos1
8774 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8775 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8776 fold_overflow_warning (("assuming pointer wraparound does not "
8777 "occur when comparing P +- C1 with "
8779 WARN_STRICT_OVERFLOW_CONDITIONAL);
8784 return constant_boolean_node (bitpos0 == bitpos1, type);
8786 return constant_boolean_node (bitpos0 != bitpos1, type);
8788 return constant_boolean_node (bitpos0 < bitpos1, type);
8790 return constant_boolean_node (bitpos0 <= bitpos1, type);
8792 return constant_boolean_node (bitpos0 >= bitpos1, type);
8794 return constant_boolean_node (bitpos0 > bitpos1, type);
8798 /* We can simplify the comparison to a comparison of the variable
8799 offset parts if the constant offset parts are equal.
8800 Be careful to use signed sizetype here because otherwise we
8801 mess with array offsets in the wrong way. This is possible
8802 because pointer arithmetic is restricted to retain within an
8803 object and overflow on pointer differences is undefined as of
8804 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8805 else if (bitpos0 == bitpos1
8807 || (indirect_base0 && DECL_P (base0))
8808 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8810 /* By converting to signed sizetype we cover middle-end pointer
8811 arithmetic which operates on unsigned pointer types of size
8812 type size and ARRAY_REF offsets which are properly sign or
8813 zero extended from their type in case it is narrower than
8815 if (offset0 == NULL_TREE)
8816 offset0 = build_int_cst (ssizetype, 0);
8818 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8819 if (offset1 == NULL_TREE)
8820 offset1 = build_int_cst (ssizetype, 0);
8822 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8825 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8826 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8827 fold_overflow_warning (("assuming pointer wraparound does not "
8828 "occur when comparing P +- C1 with "
8830 WARN_STRICT_OVERFLOW_COMPARISON);
8832 return fold_build2_loc (loc, code, type, offset0, offset1);
8835 /* For non-equal bases we can simplify if they are addresses
8836 of local binding decls or constants. */
8837 else if (indirect_base0 && indirect_base1
8838 /* We know that !operand_equal_p (base0, base1, 0)
8839 because the if condition was false. But make
8840 sure two decls are not the same. */
8842 && TREE_CODE (arg0) == ADDR_EXPR
8843 && TREE_CODE (arg1) == ADDR_EXPR
8844 && (((TREE_CODE (base0) == VAR_DECL
8845 || TREE_CODE (base0) == PARM_DECL)
8846 && (targetm.binds_local_p (base0)
8847 || CONSTANT_CLASS_P (base1)))
8848 || CONSTANT_CLASS_P (base0))
8849 && (((TREE_CODE (base1) == VAR_DECL
8850 || TREE_CODE (base1) == PARM_DECL)
8851 && (targetm.binds_local_p (base1)
8852 || CONSTANT_CLASS_P (base0)))
8853 || CONSTANT_CLASS_P (base1)))
8855 if (code == EQ_EXPR)
8856 return omit_two_operands_loc (loc, type, boolean_false_node,
8858 else if (code == NE_EXPR)
8859 return omit_two_operands_loc (loc, type, boolean_true_node,
8862 /* For equal offsets we can simplify to a comparison of the
8864 else if (bitpos0 == bitpos1
8866 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8868 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8869 && ((offset0 == offset1)
8870 || (offset0 && offset1
8871 && operand_equal_p (offset0, offset1, 0))))
8874 base0 = build_fold_addr_expr_loc (loc, base0);
8876 base1 = build_fold_addr_expr_loc (loc, base1);
8877 return fold_build2_loc (loc, code, type, base0, base1);
8881 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8882 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8883 the resulting offset is smaller in absolute value than the
8884 original one and has the same sign. */
8885 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8886 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8887 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8888 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8889 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8890 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8891 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8893 tree const1 = TREE_OPERAND (arg0, 1);
8894 tree const2 = TREE_OPERAND (arg1, 1);
8895 tree variable1 = TREE_OPERAND (arg0, 0);
8896 tree variable2 = TREE_OPERAND (arg1, 0);
8898 const char * const warnmsg = G_("assuming signed overflow does not "
8899 "occur when combining constants around "
8902 /* Put the constant on the side where it doesn't overflow and is
8903 of lower absolute value and of same sign than before. */
8904 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8905 ? MINUS_EXPR : PLUS_EXPR,
8907 if (!TREE_OVERFLOW (cst)
8908 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8909 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8911 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8912 return fold_build2_loc (loc, code, type,
8914 fold_build2_loc (loc, TREE_CODE (arg1),
8919 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8920 ? MINUS_EXPR : PLUS_EXPR,
8922 if (!TREE_OVERFLOW (cst)
8923 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8924 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8927 return fold_build2_loc (loc, code, type,
8928 fold_build2_loc (loc, TREE_CODE (arg0),
8935 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8936 signed arithmetic case. That form is created by the compiler
8937 often enough for folding it to be of value. One example is in
8938 computing loop trip counts after Operator Strength Reduction. */
8939 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8940 && TREE_CODE (arg0) == MULT_EXPR
8941 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8942 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8943 && integer_zerop (arg1))
8945 tree const1 = TREE_OPERAND (arg0, 1);
8946 tree const2 = arg1; /* zero */
8947 tree variable1 = TREE_OPERAND (arg0, 0);
8948 enum tree_code cmp_code = code;
8950 /* Handle unfolded multiplication by zero. */
8951 if (integer_zerop (const1))
8952 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8954 fold_overflow_warning (("assuming signed overflow does not occur when "
8955 "eliminating multiplication in comparison "
8957 WARN_STRICT_OVERFLOW_COMPARISON);
8959 /* If const1 is negative we swap the sense of the comparison. */
8960 if (tree_int_cst_sgn (const1) < 0)
8961 cmp_code = swap_tree_comparison (cmp_code);
8963 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8966 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8970 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8972 tree targ0 = strip_float_extensions (arg0);
8973 tree targ1 = strip_float_extensions (arg1);
8974 tree newtype = TREE_TYPE (targ0);
8976 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8977 newtype = TREE_TYPE (targ1);
8979 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8980 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8981 return fold_build2_loc (loc, code, type,
8982 fold_convert_loc (loc, newtype, targ0),
8983 fold_convert_loc (loc, newtype, targ1));
8985 /* (-a) CMP (-b) -> b CMP a */
8986 if (TREE_CODE (arg0) == NEGATE_EXPR
8987 && TREE_CODE (arg1) == NEGATE_EXPR)
8988 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8989 TREE_OPERAND (arg0, 0));
8991 if (TREE_CODE (arg1) == REAL_CST)
8993 REAL_VALUE_TYPE cst;
8994 cst = TREE_REAL_CST (arg1);
8996 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8997 if (TREE_CODE (arg0) == NEGATE_EXPR)
8998 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8999 TREE_OPERAND (arg0, 0),
9000 build_real (TREE_TYPE (arg1),
9001 real_value_negate (&cst)));
9003 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9004 /* a CMP (-0) -> a CMP 0 */
9005 if (REAL_VALUE_MINUS_ZERO (cst))
9006 return fold_build2_loc (loc, code, type, arg0,
9007 build_real (TREE_TYPE (arg1), dconst0));
9009 /* x != NaN is always true, other ops are always false. */
9010 if (REAL_VALUE_ISNAN (cst)
9011 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9013 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9014 return omit_one_operand_loc (loc, type, tem, arg0);
9017 /* Fold comparisons against infinity. */
9018 if (REAL_VALUE_ISINF (cst)
9019 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9021 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9022 if (tem != NULL_TREE)
9027 /* If this is a comparison of a real constant with a PLUS_EXPR
9028 or a MINUS_EXPR of a real constant, we can convert it into a
9029 comparison with a revised real constant as long as no overflow
9030 occurs when unsafe_math_optimizations are enabled. */
9031 if (flag_unsafe_math_optimizations
9032 && TREE_CODE (arg1) == REAL_CST
9033 && (TREE_CODE (arg0) == PLUS_EXPR
9034 || TREE_CODE (arg0) == MINUS_EXPR)
9035 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9036 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9037 ? MINUS_EXPR : PLUS_EXPR,
9038 arg1, TREE_OPERAND (arg0, 1)))
9039 && !TREE_OVERFLOW (tem))
9040 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9042 /* Likewise, we can simplify a comparison of a real constant with
9043 a MINUS_EXPR whose first operand is also a real constant, i.e.
9044 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9045 floating-point types only if -fassociative-math is set. */
9046 if (flag_associative_math
9047 && TREE_CODE (arg1) == REAL_CST
9048 && TREE_CODE (arg0) == MINUS_EXPR
9049 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9050 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9052 && !TREE_OVERFLOW (tem))
9053 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9054 TREE_OPERAND (arg0, 1), tem);
9056 /* Fold comparisons against built-in math functions. */
9057 if (TREE_CODE (arg1) == REAL_CST
9058 && flag_unsafe_math_optimizations
9059 && ! flag_errno_math)
9061 enum built_in_function fcode = builtin_mathfn_code (arg0);
9063 if (fcode != END_BUILTINS)
9065 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9066 if (tem != NULL_TREE)
9072 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9073 && CONVERT_EXPR_P (arg0))
9075 /* If we are widening one operand of an integer comparison,
9076 see if the other operand is similarly being widened. Perhaps we
9077 can do the comparison in the narrower type. */
9078 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9082 /* Or if we are changing signedness. */
9083 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9088 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9089 constant, we can simplify it. */
9090 if (TREE_CODE (arg1) == INTEGER_CST
9091 && (TREE_CODE (arg0) == MIN_EXPR
9092 || TREE_CODE (arg0) == MAX_EXPR)
9093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9095 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9100 /* Simplify comparison of something with itself. (For IEEE
9101 floating-point, we can only do some of these simplifications.) */
9102 if (operand_equal_p (arg0, arg1, 0))
9107 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9108 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9109 return constant_boolean_node (1, type);
9114 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9115 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9116 return constant_boolean_node (1, type);
9117 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9120 /* For NE, we can only do this simplification if integer
9121 or we don't honor IEEE floating point NaNs. */
9122 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9123 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9125 /* ... fall through ... */
9128 return constant_boolean_node (0, type);
9134 /* If we are comparing an expression that just has comparisons
9135 of two integer values, arithmetic expressions of those comparisons,
9136 and constants, we can simplify it. There are only three cases
9137 to check: the two values can either be equal, the first can be
9138 greater, or the second can be greater. Fold the expression for
9139 those three values. Since each value must be 0 or 1, we have
9140 eight possibilities, each of which corresponds to the constant 0
9141 or 1 or one of the six possible comparisons.
9143 This handles common cases like (a > b) == 0 but also handles
9144 expressions like ((x > y) - (y > x)) > 0, which supposedly
9145 occur in macroized code. */
9147 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9149 tree cval1 = 0, cval2 = 0;
9152 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9153 /* Don't handle degenerate cases here; they should already
9154 have been handled anyway. */
9155 && cval1 != 0 && cval2 != 0
9156 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9157 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9158 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9159 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9160 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9161 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9162 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9164 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9165 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9167 /* We can't just pass T to eval_subst in case cval1 or cval2
9168 was the same as ARG1. */
9171 = fold_build2_loc (loc, code, type,
9172 eval_subst (loc, arg0, cval1, maxval,
9176 = fold_build2_loc (loc, code, type,
9177 eval_subst (loc, arg0, cval1, maxval,
9181 = fold_build2_loc (loc, code, type,
9182 eval_subst (loc, arg0, cval1, minval,
9186 /* All three of these results should be 0 or 1. Confirm they are.
9187 Then use those values to select the proper code to use. */
9189 if (TREE_CODE (high_result) == INTEGER_CST
9190 && TREE_CODE (equal_result) == INTEGER_CST
9191 && TREE_CODE (low_result) == INTEGER_CST)
9193 /* Make a 3-bit mask with the high-order bit being the
9194 value for `>', the next for '=', and the low for '<'. */
9195 switch ((integer_onep (high_result) * 4)
9196 + (integer_onep (equal_result) * 2)
9197 + integer_onep (low_result))
9201 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9222 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9227 tem = save_expr (build2 (code, type, cval1, cval2));
9228 SET_EXPR_LOCATION (tem, loc);
9231 return fold_build2_loc (loc, code, type, cval1, cval2);
9236 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9237 into a single range test. */
9238 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9239 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9240 && TREE_CODE (arg1) == INTEGER_CST
9241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9242 && !integer_zerop (TREE_OPERAND (arg0, 1))
9243 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9244 && !TREE_OVERFLOW (arg1))
9246 tem = fold_div_compare (loc, code, type, arg0, arg1);
9247 if (tem != NULL_TREE)
9251 /* Fold ~X op ~Y as Y op X. */
9252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9253 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9255 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9256 return fold_build2_loc (loc, code, type,
9257 fold_convert_loc (loc, cmp_type,
9258 TREE_OPERAND (arg1, 0)),
9259 TREE_OPERAND (arg0, 0));
9262 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9263 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9264 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9266 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9267 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9268 TREE_OPERAND (arg0, 0),
9269 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9270 fold_convert_loc (loc, cmp_type, arg1)));
9277 /* Subroutine of fold_binary. Optimize complex multiplications of the
9278 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9279 argument EXPR represents the expression "z" of type TYPE. */
9282 fold_mult_zconjz (location_t loc, tree type, tree expr)
9284 tree itype = TREE_TYPE (type);
9285 tree rpart, ipart, tem;
9287 if (TREE_CODE (expr) == COMPLEX_EXPR)
9289 rpart = TREE_OPERAND (expr, 0);
9290 ipart = TREE_OPERAND (expr, 1);
9292 else if (TREE_CODE (expr) == COMPLEX_CST)
9294 rpart = TREE_REALPART (expr);
9295 ipart = TREE_IMAGPART (expr);
9299 expr = save_expr (expr);
9300 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9301 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9304 rpart = save_expr (rpart);
9305 ipart = save_expr (ipart);
9306 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9307 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9308 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9309 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9310 build_zero_cst (itype));
9314 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9315 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9316 guarantees that P and N have the same least significant log2(M) bits.
9317 N is not otherwise constrained. In particular, N is not normalized to
9318 0 <= N < M as is common. In general, the precise value of P is unknown.
9319 M is chosen as large as possible such that constant N can be determined.
9321 Returns M and sets *RESIDUE to N.
9323 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9324 account. This is not always possible due to PR 35705.
9327 static unsigned HOST_WIDE_INT
9328 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9329 bool allow_func_align)
9331 enum tree_code code;
9335 code = TREE_CODE (expr);
9336 if (code == ADDR_EXPR)
9338 unsigned int bitalign;
9339 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9340 *residue /= BITS_PER_UNIT;
9341 return bitalign / BITS_PER_UNIT;
9343 else if (code == POINTER_PLUS_EXPR)
9346 unsigned HOST_WIDE_INT modulus;
9347 enum tree_code inner_code;
9349 op0 = TREE_OPERAND (expr, 0);
9351 modulus = get_pointer_modulus_and_residue (op0, residue,
9354 op1 = TREE_OPERAND (expr, 1);
9356 inner_code = TREE_CODE (op1);
9357 if (inner_code == INTEGER_CST)
9359 *residue += TREE_INT_CST_LOW (op1);
9362 else if (inner_code == MULT_EXPR)
9364 op1 = TREE_OPERAND (op1, 1);
9365 if (TREE_CODE (op1) == INTEGER_CST)
9367 unsigned HOST_WIDE_INT align;
9369 /* Compute the greatest power-of-2 divisor of op1. */
9370 align = TREE_INT_CST_LOW (op1);
9373 /* If align is non-zero and less than *modulus, replace
9374 *modulus with align., If align is 0, then either op1 is 0
9375 or the greatest power-of-2 divisor of op1 doesn't fit in an
9376 unsigned HOST_WIDE_INT. In either case, no additional
9377 constraint is imposed. */
9379 modulus = MIN (modulus, align);
9386 /* If we get here, we were unable to determine anything useful about the
9391 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9392 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9395 vec_cst_ctor_to_array (tree arg, tree *elts)
9397 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9399 if (TREE_CODE (arg) == VECTOR_CST)
9401 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9402 elts[i] = VECTOR_CST_ELT (arg, i);
9404 else if (TREE_CODE (arg) == CONSTRUCTOR)
9406 constructor_elt *elt;
9408 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9409 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9412 elts[i] = elt->value;
9416 for (; i < nelts; i++)
9418 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9422 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9423 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9424 NULL_TREE otherwise. */
9427 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9429 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9431 bool need_ctor = false;
9433 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9434 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9435 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9436 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9439 elts = XALLOCAVEC (tree, nelts * 3);
9440 if (!vec_cst_ctor_to_array (arg0, elts)
9441 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9444 for (i = 0; i < nelts; i++)
9446 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9448 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9453 vec<constructor_elt, va_gc> *v;
9454 vec_alloc (v, nelts);
9455 for (i = 0; i < nelts; i++)
9456 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9457 return build_constructor (type, v);
9460 return build_vector (type, &elts[2 * nelts]);
9463 /* Try to fold a pointer difference of type TYPE two address expressions of
9464 array references AREF0 and AREF1 using location LOC. Return a
9465 simplified expression for the difference or NULL_TREE. */
9468 fold_addr_of_array_ref_difference (location_t loc, tree type,
9469 tree aref0, tree aref1)
9471 tree base0 = TREE_OPERAND (aref0, 0);
9472 tree base1 = TREE_OPERAND (aref1, 0);
9473 tree base_offset = build_int_cst (type, 0);
9475 /* If the bases are array references as well, recurse. If the bases
9476 are pointer indirections compute the difference of the pointers.
9477 If the bases are equal, we are set. */
9478 if ((TREE_CODE (base0) == ARRAY_REF
9479 && TREE_CODE (base1) == ARRAY_REF
9481 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9482 || (INDIRECT_REF_P (base0)
9483 && INDIRECT_REF_P (base1)
9484 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9485 TREE_OPERAND (base0, 0),
9486 TREE_OPERAND (base1, 0))))
9487 || operand_equal_p (base0, base1, 0))
9489 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9490 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9491 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9492 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9493 return fold_build2_loc (loc, PLUS_EXPR, type,
9495 fold_build2_loc (loc, MULT_EXPR, type,
9501 /* If the real or vector real constant CST of type TYPE has an exact
9502 inverse, return it, else return NULL. */
9505 exact_inverse (tree type, tree cst)
9508 tree unit_type, *elts;
9510 unsigned vec_nelts, i;
9512 switch (TREE_CODE (cst))
9515 r = TREE_REAL_CST (cst);
9517 if (exact_real_inverse (TYPE_MODE (type), &r))
9518 return build_real (type, r);
9523 vec_nelts = VECTOR_CST_NELTS (cst);
9524 elts = XALLOCAVEC (tree, vec_nelts);
9525 unit_type = TREE_TYPE (type);
9526 mode = TYPE_MODE (unit_type);
9528 for (i = 0; i < vec_nelts; i++)
9530 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9531 if (!exact_real_inverse (mode, &r))
9533 elts[i] = build_real (unit_type, r);
9536 return build_vector (type, elts);
9543 /* Mask out the tz least significant bits of X of type TYPE where
9544 tz is the number of trailing zeroes in Y. */
9546 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9548 int tz = wi::ctz (y);
9550 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9554 /* Return true when T is an address and is known to be nonzero.
9555 For floating point we further ensure that T is not denormal.
9556 Similar logic is present in nonzero_address in rtlanal.h.
9558 If the return value is based on the assumption that signed overflow
9559 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9560 change *STRICT_OVERFLOW_P. */
9563 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9565 tree type = TREE_TYPE (t);
9566 enum tree_code code;
9568 /* Doing something useful for floating point would need more work. */
9569 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9572 code = TREE_CODE (t);
9573 switch (TREE_CODE_CLASS (code))
9576 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9579 case tcc_comparison:
9580 return tree_binary_nonzero_warnv_p (code, type,
9581 TREE_OPERAND (t, 0),
9582 TREE_OPERAND (t, 1),
9585 case tcc_declaration:
9587 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9595 case TRUTH_NOT_EXPR:
9596 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9599 case TRUTH_AND_EXPR:
9601 case TRUTH_XOR_EXPR:
9602 return tree_binary_nonzero_warnv_p (code, type,
9603 TREE_OPERAND (t, 0),
9604 TREE_OPERAND (t, 1),
9612 case WITH_SIZE_EXPR:
9614 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9619 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9623 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9628 tree fndecl = get_callee_fndecl (t);
9629 if (!fndecl) return false;
9630 if (flag_delete_null_pointer_checks && !flag_check_new
9631 && DECL_IS_OPERATOR_NEW (fndecl)
9632 && !TREE_NOTHROW (fndecl))
9634 if (flag_delete_null_pointer_checks
9635 && lookup_attribute ("returns_nonnull",
9636 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9638 return alloca_call_p (t);
9647 /* Return true when T is an address and is known to be nonzero.
9648 Handle warnings about undefined signed overflow. */
9651 tree_expr_nonzero_p (tree t)
9653 bool ret, strict_overflow_p;
9655 strict_overflow_p = false;
9656 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9657 if (strict_overflow_p)
9658 fold_overflow_warning (("assuming signed overflow does not occur when "
9659 "determining that expression is always "
9661 WARN_STRICT_OVERFLOW_MISC);
9665 /* Fold a binary expression of code CODE and type TYPE with operands
9666 OP0 and OP1. LOC is the location of the resulting expression.
9667 Return the folded expression if folding is successful. Otherwise,
9668 return NULL_TREE. */
9671 fold_binary_loc (location_t loc,
9672 enum tree_code code, tree type, tree op0, tree op1)
9674 enum tree_code_class kind = TREE_CODE_CLASS (code);
9675 tree arg0, arg1, tem;
9676 tree t1 = NULL_TREE;
9677 bool strict_overflow_p;
9680 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9681 && TREE_CODE_LENGTH (code) == 2
9683 && op1 != NULL_TREE);
9688 /* Strip any conversions that don't change the mode. This is
9689 safe for every expression, except for a comparison expression
9690 because its signedness is derived from its operands. So, in
9691 the latter case, only strip conversions that don't change the
9692 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9695 Note that this is done as an internal manipulation within the
9696 constant folder, in order to find the simplest representation
9697 of the arguments so that their form can be studied. In any
9698 cases, the appropriate type conversions should be put back in
9699 the tree that will get out of the constant folder. */
9701 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9703 STRIP_SIGN_NOPS (arg0);
9704 STRIP_SIGN_NOPS (arg1);
9712 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9713 constant but we can't do arithmetic on them. */
9714 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9715 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9716 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9717 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9718 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9719 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9720 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9722 if (kind == tcc_binary)
9724 /* Make sure type and arg0 have the same saturating flag. */
9725 gcc_assert (TYPE_SATURATING (type)
9726 == TYPE_SATURATING (TREE_TYPE (arg0)));
9727 tem = const_binop (code, arg0, arg1);
9729 else if (kind == tcc_comparison)
9730 tem = fold_relational_const (code, type, arg0, arg1);
9734 if (tem != NULL_TREE)
9736 if (TREE_TYPE (tem) != type)
9737 tem = fold_convert_loc (loc, type, tem);
9742 /* If this is a commutative operation, and ARG0 is a constant, move it
9743 to ARG1 to reduce the number of tests below. */
9744 if (commutative_tree_code (code)
9745 && tree_swap_operands_p (arg0, arg1, true))
9746 return fold_build2_loc (loc, code, type, op1, op0);
9748 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9749 to ARG1 to reduce the number of tests below. */
9750 if (kind == tcc_comparison
9751 && tree_swap_operands_p (arg0, arg1, true))
9752 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9754 tem = generic_simplify (loc, code, type, op0, op1);
9758 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9760 First check for cases where an arithmetic operation is applied to a
9761 compound, conditional, or comparison operation. Push the arithmetic
9762 operation inside the compound or conditional to see if any folding
9763 can then be done. Convert comparison to conditional for this purpose.
9764 The also optimizes non-constant cases that used to be done in
9767 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9768 one of the operands is a comparison and the other is a comparison, a
9769 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9770 code below would make the expression more complex. Change it to a
9771 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9772 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9774 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9775 || code == EQ_EXPR || code == NE_EXPR)
9776 && TREE_CODE (type) != VECTOR_TYPE
9777 && ((truth_value_p (TREE_CODE (arg0))
9778 && (truth_value_p (TREE_CODE (arg1))
9779 || (TREE_CODE (arg1) == BIT_AND_EXPR
9780 && integer_onep (TREE_OPERAND (arg1, 1)))))
9781 || (truth_value_p (TREE_CODE (arg1))
9782 && (truth_value_p (TREE_CODE (arg0))
9783 || (TREE_CODE (arg0) == BIT_AND_EXPR
9784 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9786 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9787 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9790 fold_convert_loc (loc, boolean_type_node, arg0),
9791 fold_convert_loc (loc, boolean_type_node, arg1));
9793 if (code == EQ_EXPR)
9794 tem = invert_truthvalue_loc (loc, tem);
9796 return fold_convert_loc (loc, type, tem);
9799 if (TREE_CODE_CLASS (code) == tcc_binary
9800 || TREE_CODE_CLASS (code) == tcc_comparison)
9802 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9804 tem = fold_build2_loc (loc, code, type,
9805 fold_convert_loc (loc, TREE_TYPE (op0),
9806 TREE_OPERAND (arg0, 1)), op1);
9807 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9810 if (TREE_CODE (arg1) == COMPOUND_EXPR
9811 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9813 tem = fold_build2_loc (loc, code, type, op0,
9814 fold_convert_loc (loc, TREE_TYPE (op1),
9815 TREE_OPERAND (arg1, 1)));
9816 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9820 if (TREE_CODE (arg0) == COND_EXPR
9821 || TREE_CODE (arg0) == VEC_COND_EXPR
9822 || COMPARISON_CLASS_P (arg0))
9824 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9826 /*cond_first_p=*/1);
9827 if (tem != NULL_TREE)
9831 if (TREE_CODE (arg1) == COND_EXPR
9832 || TREE_CODE (arg1) == VEC_COND_EXPR
9833 || COMPARISON_CLASS_P (arg1))
9835 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9837 /*cond_first_p=*/0);
9838 if (tem != NULL_TREE)
9846 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9847 if (TREE_CODE (arg0) == ADDR_EXPR
9848 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9850 tree iref = TREE_OPERAND (arg0, 0);
9851 return fold_build2 (MEM_REF, type,
9852 TREE_OPERAND (iref, 0),
9853 int_const_binop (PLUS_EXPR, arg1,
9854 TREE_OPERAND (iref, 1)));
9857 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9858 if (TREE_CODE (arg0) == ADDR_EXPR
9859 && handled_component_p (TREE_OPERAND (arg0, 0)))
9862 HOST_WIDE_INT coffset;
9863 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9867 return fold_build2 (MEM_REF, type,
9868 build_fold_addr_expr (base),
9869 int_const_binop (PLUS_EXPR, arg1,
9870 size_int (coffset)));
9875 case POINTER_PLUS_EXPR:
9876 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9877 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9878 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9879 return fold_convert_loc (loc, type,
9880 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9881 fold_convert_loc (loc, sizetype,
9883 fold_convert_loc (loc, sizetype,
9886 /* PTR_CST +p CST -> CST1 */
9887 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9888 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9889 fold_convert_loc (loc, type, arg1));
9894 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9896 /* X + (X / CST) * -CST is X % CST. */
9897 if (TREE_CODE (arg1) == MULT_EXPR
9898 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9899 && operand_equal_p (arg0,
9900 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9902 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9903 tree cst1 = TREE_OPERAND (arg1, 1);
9904 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9906 if (sum && integer_zerop (sum))
9907 return fold_convert_loc (loc, type,
9908 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9909 TREE_TYPE (arg0), arg0,
9914 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9915 one. Make sure the type is not saturating and has the signedness of
9916 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9917 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9918 if ((TREE_CODE (arg0) == MULT_EXPR
9919 || TREE_CODE (arg1) == MULT_EXPR)
9920 && !TYPE_SATURATING (type)
9921 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9922 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9923 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9925 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9930 if (! FLOAT_TYPE_P (type))
9932 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9933 with a constant, and the two constants have no bits in common,
9934 we should treat this as a BIT_IOR_EXPR since this may produce more
9936 if (TREE_CODE (arg0) == BIT_AND_EXPR
9937 && TREE_CODE (arg1) == BIT_AND_EXPR
9938 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9939 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9940 && wi::bit_and (TREE_OPERAND (arg0, 1),
9941 TREE_OPERAND (arg1, 1)) == 0)
9943 code = BIT_IOR_EXPR;
9947 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9948 (plus (plus (mult) (mult)) (foo)) so that we can
9949 take advantage of the factoring cases below. */
9950 if (TYPE_OVERFLOW_WRAPS (type)
9951 && (((TREE_CODE (arg0) == PLUS_EXPR
9952 || TREE_CODE (arg0) == MINUS_EXPR)
9953 && TREE_CODE (arg1) == MULT_EXPR)
9954 || ((TREE_CODE (arg1) == PLUS_EXPR
9955 || TREE_CODE (arg1) == MINUS_EXPR)
9956 && TREE_CODE (arg0) == MULT_EXPR)))
9958 tree parg0, parg1, parg, marg;
9959 enum tree_code pcode;
9961 if (TREE_CODE (arg1) == MULT_EXPR)
9962 parg = arg0, marg = arg1;
9964 parg = arg1, marg = arg0;
9965 pcode = TREE_CODE (parg);
9966 parg0 = TREE_OPERAND (parg, 0);
9967 parg1 = TREE_OPERAND (parg, 1);
9971 if (TREE_CODE (parg0) == MULT_EXPR
9972 && TREE_CODE (parg1) != MULT_EXPR)
9973 return fold_build2_loc (loc, pcode, type,
9974 fold_build2_loc (loc, PLUS_EXPR, type,
9975 fold_convert_loc (loc, type,
9977 fold_convert_loc (loc, type,
9979 fold_convert_loc (loc, type, parg1));
9980 if (TREE_CODE (parg0) != MULT_EXPR
9981 && TREE_CODE (parg1) == MULT_EXPR)
9983 fold_build2_loc (loc, PLUS_EXPR, type,
9984 fold_convert_loc (loc, type, parg0),
9985 fold_build2_loc (loc, pcode, type,
9986 fold_convert_loc (loc, type, marg),
9987 fold_convert_loc (loc, type,
9993 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9994 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9995 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9997 /* Likewise if the operands are reversed. */
9998 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10001 /* Convert X + -C into X - C. */
10002 if (TREE_CODE (arg1) == REAL_CST
10003 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10005 tem = fold_negate_const (arg1, type);
10006 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10007 return fold_build2_loc (loc, MINUS_EXPR, type,
10008 fold_convert_loc (loc, type, arg0),
10009 fold_convert_loc (loc, type, tem));
10012 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10013 to __complex__ ( x, y ). This is not the same for SNaNs or
10014 if signed zeros are involved. */
10015 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10016 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10017 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10019 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10020 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10021 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10022 bool arg0rz = false, arg0iz = false;
10023 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10024 || (arg0i && (arg0iz = real_zerop (arg0i))))
10026 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10027 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10028 if (arg0rz && arg1i && real_zerop (arg1i))
10030 tree rp = arg1r ? arg1r
10031 : build1 (REALPART_EXPR, rtype, arg1);
10032 tree ip = arg0i ? arg0i
10033 : build1 (IMAGPART_EXPR, rtype, arg0);
10034 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10036 else if (arg0iz && arg1r && real_zerop (arg1r))
10038 tree rp = arg0r ? arg0r
10039 : build1 (REALPART_EXPR, rtype, arg0);
10040 tree ip = arg1i ? arg1i
10041 : build1 (IMAGPART_EXPR, rtype, arg1);
10042 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10047 if (flag_unsafe_math_optimizations
10048 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10049 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10050 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10053 /* Convert x+x into x*2.0. */
10054 if (operand_equal_p (arg0, arg1, 0)
10055 && SCALAR_FLOAT_TYPE_P (type))
10056 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10057 build_real (type, dconst2));
10059 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10060 We associate floats only if the user has specified
10061 -fassociative-math. */
10062 if (flag_associative_math
10063 && TREE_CODE (arg1) == PLUS_EXPR
10064 && TREE_CODE (arg0) != MULT_EXPR)
10066 tree tree10 = TREE_OPERAND (arg1, 0);
10067 tree tree11 = TREE_OPERAND (arg1, 1);
10068 if (TREE_CODE (tree11) == MULT_EXPR
10069 && TREE_CODE (tree10) == MULT_EXPR)
10072 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10073 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10076 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10077 We associate floats only if the user has specified
10078 -fassociative-math. */
10079 if (flag_associative_math
10080 && TREE_CODE (arg0) == PLUS_EXPR
10081 && TREE_CODE (arg1) != MULT_EXPR)
10083 tree tree00 = TREE_OPERAND (arg0, 0);
10084 tree tree01 = TREE_OPERAND (arg0, 1);
10085 if (TREE_CODE (tree01) == MULT_EXPR
10086 && TREE_CODE (tree00) == MULT_EXPR)
10089 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10090 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10096 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10097 is a rotate of A by C1 bits. */
10098 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10099 is a rotate of A by B bits. */
10101 enum tree_code code0, code1;
10103 code0 = TREE_CODE (arg0);
10104 code1 = TREE_CODE (arg1);
10105 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10106 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10107 && operand_equal_p (TREE_OPERAND (arg0, 0),
10108 TREE_OPERAND (arg1, 0), 0)
10109 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10110 TYPE_UNSIGNED (rtype))
10111 /* Only create rotates in complete modes. Other cases are not
10112 expanded properly. */
10113 && (element_precision (rtype)
10114 == element_precision (TYPE_MODE (rtype))))
10116 tree tree01, tree11;
10117 enum tree_code code01, code11;
10119 tree01 = TREE_OPERAND (arg0, 1);
10120 tree11 = TREE_OPERAND (arg1, 1);
10121 STRIP_NOPS (tree01);
10122 STRIP_NOPS (tree11);
10123 code01 = TREE_CODE (tree01);
10124 code11 = TREE_CODE (tree11);
10125 if (code01 == INTEGER_CST
10126 && code11 == INTEGER_CST
10127 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10128 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10130 tem = build2_loc (loc, LROTATE_EXPR,
10131 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10132 TREE_OPERAND (arg0, 0),
10133 code0 == LSHIFT_EXPR ? tree01 : tree11);
10134 return fold_convert_loc (loc, type, tem);
10136 else if (code11 == MINUS_EXPR)
10138 tree tree110, tree111;
10139 tree110 = TREE_OPERAND (tree11, 0);
10140 tree111 = TREE_OPERAND (tree11, 1);
10141 STRIP_NOPS (tree110);
10142 STRIP_NOPS (tree111);
10143 if (TREE_CODE (tree110) == INTEGER_CST
10144 && 0 == compare_tree_int (tree110,
10146 (TREE_TYPE (TREE_OPERAND
10148 && operand_equal_p (tree01, tree111, 0))
10150 fold_convert_loc (loc, type,
10151 build2 ((code0 == LSHIFT_EXPR
10154 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10155 TREE_OPERAND (arg0, 0), tree01));
10157 else if (code01 == MINUS_EXPR)
10159 tree tree010, tree011;
10160 tree010 = TREE_OPERAND (tree01, 0);
10161 tree011 = TREE_OPERAND (tree01, 1);
10162 STRIP_NOPS (tree010);
10163 STRIP_NOPS (tree011);
10164 if (TREE_CODE (tree010) == INTEGER_CST
10165 && 0 == compare_tree_int (tree010,
10167 (TREE_TYPE (TREE_OPERAND
10169 && operand_equal_p (tree11, tree011, 0))
10170 return fold_convert_loc
10172 build2 ((code0 != LSHIFT_EXPR
10175 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10176 TREE_OPERAND (arg0, 0), tree11));
10182 /* In most languages, can't associate operations on floats through
10183 parentheses. Rather than remember where the parentheses were, we
10184 don't associate floats at all, unless the user has specified
10185 -fassociative-math.
10186 And, we need to make sure type is not saturating. */
10188 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10189 && !TYPE_SATURATING (type))
10191 tree var0, con0, lit0, minus_lit0;
10192 tree var1, con1, lit1, minus_lit1;
10196 /* Split both trees into variables, constants, and literals. Then
10197 associate each group together, the constants with literals,
10198 then the result with variables. This increases the chances of
10199 literals being recombined later and of generating relocatable
10200 expressions for the sum of a constant and literal. */
10201 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10202 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10203 code == MINUS_EXPR);
10205 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10206 if (code == MINUS_EXPR)
10209 /* With undefined overflow prefer doing association in a type
10210 which wraps on overflow, if that is one of the operand types. */
10211 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10212 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10214 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10215 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10216 atype = TREE_TYPE (arg0);
10217 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10218 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10219 atype = TREE_TYPE (arg1);
10220 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10223 /* With undefined overflow we can only associate constants with one
10224 variable, and constants whose association doesn't overflow. */
10225 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10226 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10233 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10234 tmp0 = TREE_OPERAND (tmp0, 0);
10235 if (CONVERT_EXPR_P (tmp0)
10236 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10237 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10238 <= TYPE_PRECISION (atype)))
10239 tmp0 = TREE_OPERAND (tmp0, 0);
10240 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10241 tmp1 = TREE_OPERAND (tmp1, 0);
10242 if (CONVERT_EXPR_P (tmp1)
10243 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10244 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10245 <= TYPE_PRECISION (atype)))
10246 tmp1 = TREE_OPERAND (tmp1, 0);
10247 /* The only case we can still associate with two variables
10248 is if they are the same, modulo negation and bit-pattern
10249 preserving conversions. */
10250 if (!operand_equal_p (tmp0, tmp1, 0))
10255 /* Only do something if we found more than two objects. Otherwise,
10256 nothing has changed and we risk infinite recursion. */
10258 && (2 < ((var0 != 0) + (var1 != 0)
10259 + (con0 != 0) + (con1 != 0)
10260 + (lit0 != 0) + (lit1 != 0)
10261 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10263 bool any_overflows = false;
10264 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10265 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10266 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10267 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10268 var0 = associate_trees (loc, var0, var1, code, atype);
10269 con0 = associate_trees (loc, con0, con1, code, atype);
10270 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10271 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10274 /* Preserve the MINUS_EXPR if the negative part of the literal is
10275 greater than the positive part. Otherwise, the multiplicative
10276 folding code (i.e extract_muldiv) may be fooled in case
10277 unsigned constants are subtracted, like in the following
10278 example: ((X*2 + 4) - 8U)/2. */
10279 if (minus_lit0 && lit0)
10281 if (TREE_CODE (lit0) == INTEGER_CST
10282 && TREE_CODE (minus_lit0) == INTEGER_CST
10283 && tree_int_cst_lt (lit0, minus_lit0))
10285 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10286 MINUS_EXPR, atype);
10291 lit0 = associate_trees (loc, lit0, minus_lit0,
10292 MINUS_EXPR, atype);
10297 /* Don't introduce overflows through reassociation. */
10299 && ((lit0 && TREE_OVERFLOW (lit0))
10300 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10307 fold_convert_loc (loc, type,
10308 associate_trees (loc, var0, minus_lit0,
10309 MINUS_EXPR, atype));
10312 con0 = associate_trees (loc, con0, minus_lit0,
10313 MINUS_EXPR, atype);
10315 fold_convert_loc (loc, type,
10316 associate_trees (loc, var0, con0,
10317 PLUS_EXPR, atype));
10321 con0 = associate_trees (loc, con0, lit0, code, atype);
10323 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10331 /* Pointer simplifications for subtraction, simple reassociations. */
10332 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10334 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10335 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10336 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10338 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10339 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10340 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10341 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10342 return fold_build2_loc (loc, PLUS_EXPR, type,
10343 fold_build2_loc (loc, MINUS_EXPR, type,
10345 fold_build2_loc (loc, MINUS_EXPR, type,
10348 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10349 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10351 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10352 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10353 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10354 fold_convert_loc (loc, type, arg1));
10356 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10358 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10360 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10362 tree arg10 = fold_convert_loc (loc, type,
10363 TREE_OPERAND (arg1, 0));
10364 tree arg11 = fold_convert_loc (loc, type,
10365 TREE_OPERAND (arg1, 1));
10366 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10367 fold_convert_loc (loc, type, arg0),
10370 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10373 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10374 if (TREE_CODE (arg0) == NEGATE_EXPR
10375 && negate_expr_p (arg1)
10376 && reorder_operands_p (arg0, arg1))
10377 return fold_build2_loc (loc, MINUS_EXPR, type,
10378 fold_convert_loc (loc, type,
10379 negate_expr (arg1)),
10380 fold_convert_loc (loc, type,
10381 TREE_OPERAND (arg0, 0)));
10382 /* Convert -A - 1 to ~A. */
10383 if (TREE_CODE (arg0) == NEGATE_EXPR
10384 && integer_each_onep (arg1)
10385 && !TYPE_OVERFLOW_TRAPS (type))
10386 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10387 fold_convert_loc (loc, type,
10388 TREE_OPERAND (arg0, 0)));
10390 /* Convert -1 - A to ~A. */
10391 if (TREE_CODE (type) != COMPLEX_TYPE
10392 && integer_all_onesp (arg0))
10393 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10396 /* X - (X / Y) * Y is X % Y. */
10397 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10398 && TREE_CODE (arg1) == MULT_EXPR
10399 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10400 && operand_equal_p (arg0,
10401 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10402 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10403 TREE_OPERAND (arg1, 1), 0))
10405 fold_convert_loc (loc, type,
10406 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10407 arg0, TREE_OPERAND (arg1, 1)));
10409 if (! FLOAT_TYPE_P (type))
10411 if (integer_zerop (arg0))
10412 return negate_expr (fold_convert_loc (loc, type, arg1));
10414 /* Fold A - (A & B) into ~B & A. */
10415 if (!TREE_SIDE_EFFECTS (arg0)
10416 && TREE_CODE (arg1) == BIT_AND_EXPR)
10418 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10420 tree arg10 = fold_convert_loc (loc, type,
10421 TREE_OPERAND (arg1, 0));
10422 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10423 fold_build1_loc (loc, BIT_NOT_EXPR,
10425 fold_convert_loc (loc, type, arg0));
10427 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10429 tree arg11 = fold_convert_loc (loc,
10430 type, TREE_OPERAND (arg1, 1));
10431 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10432 fold_build1_loc (loc, BIT_NOT_EXPR,
10434 fold_convert_loc (loc, type, arg0));
10438 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10439 any power of 2 minus 1. */
10440 if (TREE_CODE (arg0) == BIT_AND_EXPR
10441 && TREE_CODE (arg1) == BIT_AND_EXPR
10442 && operand_equal_p (TREE_OPERAND (arg0, 0),
10443 TREE_OPERAND (arg1, 0), 0))
10445 tree mask0 = TREE_OPERAND (arg0, 1);
10446 tree mask1 = TREE_OPERAND (arg1, 1);
10447 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10449 if (operand_equal_p (tem, mask1, 0))
10451 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10452 TREE_OPERAND (arg0, 0), mask1);
10453 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10458 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10459 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10462 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10463 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10464 (-ARG1 + ARG0) reduces to -ARG1. */
10465 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10466 return negate_expr (fold_convert_loc (loc, type, arg1));
10468 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10469 __complex__ ( x, -y ). This is not the same for SNaNs or if
10470 signed zeros are involved. */
10471 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10472 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10473 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10475 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10476 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10477 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10478 bool arg0rz = false, arg0iz = false;
10479 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10480 || (arg0i && (arg0iz = real_zerop (arg0i))))
10482 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10483 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10484 if (arg0rz && arg1i && real_zerop (arg1i))
10486 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10488 : build1 (REALPART_EXPR, rtype, arg1));
10489 tree ip = arg0i ? arg0i
10490 : build1 (IMAGPART_EXPR, rtype, arg0);
10491 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10493 else if (arg0iz && arg1r && real_zerop (arg1r))
10495 tree rp = arg0r ? arg0r
10496 : build1 (REALPART_EXPR, rtype, arg0);
10497 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10499 : build1 (IMAGPART_EXPR, rtype, arg1));
10500 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10505 /* A - B -> A + (-B) if B is easily negatable. */
10506 if (negate_expr_p (arg1)
10507 && !TYPE_OVERFLOW_SANITIZED (type)
10508 && ((FLOAT_TYPE_P (type)
10509 /* Avoid this transformation if B is a positive REAL_CST. */
10510 && (TREE_CODE (arg1) != REAL_CST
10511 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10512 || INTEGRAL_TYPE_P (type)))
10513 return fold_build2_loc (loc, PLUS_EXPR, type,
10514 fold_convert_loc (loc, type, arg0),
10515 fold_convert_loc (loc, type,
10516 negate_expr (arg1)));
10518 /* Try folding difference of addresses. */
10520 HOST_WIDE_INT diff;
10522 if ((TREE_CODE (arg0) == ADDR_EXPR
10523 || TREE_CODE (arg1) == ADDR_EXPR)
10524 && ptr_difference_const (arg0, arg1, &diff))
10525 return build_int_cst_type (type, diff);
10528 /* Fold &a[i] - &a[j] to i-j. */
10529 if (TREE_CODE (arg0) == ADDR_EXPR
10530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10531 && TREE_CODE (arg1) == ADDR_EXPR
10532 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10534 tree tem = fold_addr_of_array_ref_difference (loc, type,
10535 TREE_OPERAND (arg0, 0),
10536 TREE_OPERAND (arg1, 0));
10541 if (FLOAT_TYPE_P (type)
10542 && flag_unsafe_math_optimizations
10543 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10544 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10545 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10548 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10549 one. Make sure the type is not saturating and has the signedness of
10550 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10551 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10552 if ((TREE_CODE (arg0) == MULT_EXPR
10553 || TREE_CODE (arg1) == MULT_EXPR)
10554 && !TYPE_SATURATING (type)
10555 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10556 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10557 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10559 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10567 /* (-A) * (-B) -> A * B */
10568 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10569 return fold_build2_loc (loc, MULT_EXPR, type,
10570 fold_convert_loc (loc, type,
10571 TREE_OPERAND (arg0, 0)),
10572 fold_convert_loc (loc, type,
10573 negate_expr (arg1)));
10574 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10575 return fold_build2_loc (loc, MULT_EXPR, type,
10576 fold_convert_loc (loc, type,
10577 negate_expr (arg0)),
10578 fold_convert_loc (loc, type,
10579 TREE_OPERAND (arg1, 0)));
10581 if (! FLOAT_TYPE_P (type))
10583 /* Transform x * -1 into -x. Make sure to do the negation
10584 on the original operand with conversions not stripped
10585 because we can only strip non-sign-changing conversions. */
10586 if (integer_minus_onep (arg1))
10587 return fold_convert_loc (loc, type, negate_expr (op0));
10588 /* Transform x * -C into -x * C if x is easily negatable. */
10589 if (TREE_CODE (arg1) == INTEGER_CST
10590 && tree_int_cst_sgn (arg1) == -1
10591 && negate_expr_p (arg0)
10592 && (tem = negate_expr (arg1)) != arg1
10593 && !TREE_OVERFLOW (tem))
10594 return fold_build2_loc (loc, MULT_EXPR, type,
10595 fold_convert_loc (loc, type,
10596 negate_expr (arg0)),
10599 /* (a * (1 << b)) is (a << b) */
10600 if (TREE_CODE (arg1) == LSHIFT_EXPR
10601 && integer_onep (TREE_OPERAND (arg1, 0)))
10602 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10603 TREE_OPERAND (arg1, 1));
10604 if (TREE_CODE (arg0) == LSHIFT_EXPR
10605 && integer_onep (TREE_OPERAND (arg0, 0)))
10606 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10607 TREE_OPERAND (arg0, 1));
10609 /* (A + A) * C -> A * 2 * C */
10610 if (TREE_CODE (arg0) == PLUS_EXPR
10611 && TREE_CODE (arg1) == INTEGER_CST
10612 && operand_equal_p (TREE_OPERAND (arg0, 0),
10613 TREE_OPERAND (arg0, 1), 0))
10614 return fold_build2_loc (loc, MULT_EXPR, type,
10615 omit_one_operand_loc (loc, type,
10616 TREE_OPERAND (arg0, 0),
10617 TREE_OPERAND (arg0, 1)),
10618 fold_build2_loc (loc, MULT_EXPR, type,
10619 build_int_cst (type, 2) , arg1));
10621 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10622 sign-changing only. */
10623 if (TREE_CODE (arg1) == INTEGER_CST
10624 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10625 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10626 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10628 strict_overflow_p = false;
10629 if (TREE_CODE (arg1) == INTEGER_CST
10630 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10631 &strict_overflow_p)))
10633 if (strict_overflow_p)
10634 fold_overflow_warning (("assuming signed overflow does not "
10635 "occur when simplifying "
10637 WARN_STRICT_OVERFLOW_MISC);
10638 return fold_convert_loc (loc, type, tem);
10641 /* Optimize z * conj(z) for integer complex numbers. */
10642 if (TREE_CODE (arg0) == CONJ_EXPR
10643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10644 return fold_mult_zconjz (loc, type, arg1);
10645 if (TREE_CODE (arg1) == CONJ_EXPR
10646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10647 return fold_mult_zconjz (loc, type, arg0);
10651 /* Maybe fold x * 0 to 0. The expressions aren't the same
10652 when x is NaN, since x * 0 is also NaN. Nor are they the
10653 same in modes with signed zeros, since multiplying a
10654 negative value by 0 gives -0, not +0. */
10655 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10656 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10657 && real_zerop (arg1))
10658 return omit_one_operand_loc (loc, type, arg1, arg0);
10659 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10660 Likewise for complex arithmetic with signed zeros. */
10661 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10662 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10663 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10664 && real_onep (arg1))
10665 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10667 /* Transform x * -1.0 into -x. */
10668 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10669 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10670 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10671 && real_minus_onep (arg1))
10672 return fold_convert_loc (loc, type, negate_expr (arg0));
10674 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10675 the result for floating point types due to rounding so it is applied
10676 only if -fassociative-math was specify. */
10677 if (flag_associative_math
10678 && TREE_CODE (arg0) == RDIV_EXPR
10679 && TREE_CODE (arg1) == REAL_CST
10680 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10682 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10685 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10686 TREE_OPERAND (arg0, 1));
10689 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10690 if (operand_equal_p (arg0, arg1, 0))
10692 tree tem = fold_strip_sign_ops (arg0);
10693 if (tem != NULL_TREE)
10695 tem = fold_convert_loc (loc, type, tem);
10696 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10700 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10701 This is not the same for NaNs or if signed zeros are
10703 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10704 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10705 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10706 && TREE_CODE (arg1) == COMPLEX_CST
10707 && real_zerop (TREE_REALPART (arg1)))
10709 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10710 if (real_onep (TREE_IMAGPART (arg1)))
10712 fold_build2_loc (loc, COMPLEX_EXPR, type,
10713 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10715 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10716 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10718 fold_build2_loc (loc, COMPLEX_EXPR, type,
10719 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10720 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10724 /* Optimize z * conj(z) for floating point complex numbers.
10725 Guarded by flag_unsafe_math_optimizations as non-finite
10726 imaginary components don't produce scalar results. */
10727 if (flag_unsafe_math_optimizations
10728 && TREE_CODE (arg0) == CONJ_EXPR
10729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10730 return fold_mult_zconjz (loc, type, arg1);
10731 if (flag_unsafe_math_optimizations
10732 && TREE_CODE (arg1) == CONJ_EXPR
10733 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10734 return fold_mult_zconjz (loc, type, arg0);
10736 if (flag_unsafe_math_optimizations)
10738 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10739 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10741 /* Optimizations of root(...)*root(...). */
10742 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10745 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10746 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10748 /* Optimize sqrt(x)*sqrt(x) as x. */
10749 if (BUILTIN_SQRT_P (fcode0)
10750 && operand_equal_p (arg00, arg10, 0)
10751 && ! HONOR_SNANS (TYPE_MODE (type)))
10754 /* Optimize root(x)*root(y) as root(x*y). */
10755 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10756 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10757 return build_call_expr_loc (loc, rootfn, 1, arg);
10760 /* Optimize expN(x)*expN(y) as expN(x+y). */
10761 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10763 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10764 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10765 CALL_EXPR_ARG (arg0, 0),
10766 CALL_EXPR_ARG (arg1, 0));
10767 return build_call_expr_loc (loc, expfn, 1, arg);
10770 /* Optimizations of pow(...)*pow(...). */
10771 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10772 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10773 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10775 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10776 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10777 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10778 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10780 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10781 if (operand_equal_p (arg01, arg11, 0))
10783 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10784 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10786 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10789 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10790 if (operand_equal_p (arg00, arg10, 0))
10792 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10793 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10795 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10799 /* Optimize tan(x)*cos(x) as sin(x). */
10800 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10801 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10802 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10803 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10804 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10805 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10806 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10807 CALL_EXPR_ARG (arg1, 0), 0))
10809 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10811 if (sinfn != NULL_TREE)
10812 return build_call_expr_loc (loc, sinfn, 1,
10813 CALL_EXPR_ARG (arg0, 0));
10816 /* Optimize x*pow(x,c) as pow(x,c+1). */
10817 if (fcode1 == BUILT_IN_POW
10818 || fcode1 == BUILT_IN_POWF
10819 || fcode1 == BUILT_IN_POWL)
10821 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10822 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10823 if (TREE_CODE (arg11) == REAL_CST
10824 && !TREE_OVERFLOW (arg11)
10825 && operand_equal_p (arg0, arg10, 0))
10827 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10831 c = TREE_REAL_CST (arg11);
10832 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10833 arg = build_real (type, c);
10834 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10838 /* Optimize pow(x,c)*x as pow(x,c+1). */
10839 if (fcode0 == BUILT_IN_POW
10840 || fcode0 == BUILT_IN_POWF
10841 || fcode0 == BUILT_IN_POWL)
10843 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10844 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10845 if (TREE_CODE (arg01) == REAL_CST
10846 && !TREE_OVERFLOW (arg01)
10847 && operand_equal_p (arg1, arg00, 0))
10849 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10853 c = TREE_REAL_CST (arg01);
10854 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10855 arg = build_real (type, c);
10856 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10860 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10861 if (!in_gimple_form
10863 && operand_equal_p (arg0, arg1, 0))
10865 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10869 tree arg = build_real (type, dconst2);
10870 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10879 /* ~X | X is -1. */
10880 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10881 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10883 t1 = build_zero_cst (type);
10884 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10885 return omit_one_operand_loc (loc, type, t1, arg1);
10888 /* X | ~X is -1. */
10889 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10890 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10892 t1 = build_zero_cst (type);
10893 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10894 return omit_one_operand_loc (loc, type, t1, arg0);
10897 /* Canonicalize (X & C1) | C2. */
10898 if (TREE_CODE (arg0) == BIT_AND_EXPR
10899 && TREE_CODE (arg1) == INTEGER_CST
10900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10902 int width = TYPE_PRECISION (type), w;
10903 wide_int c1 = TREE_OPERAND (arg0, 1);
10904 wide_int c2 = arg1;
10906 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10907 if ((c1 & c2) == c1)
10908 return omit_one_operand_loc (loc, type, arg1,
10909 TREE_OPERAND (arg0, 0));
10911 wide_int msk = wi::mask (width, false,
10912 TYPE_PRECISION (TREE_TYPE (arg1)));
10914 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10915 if (msk.and_not (c1 | c2) == 0)
10916 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10917 TREE_OPERAND (arg0, 0), arg1);
10919 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10920 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10921 mode which allows further optimizations. */
10924 wide_int c3 = c1.and_not (c2);
10925 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10927 wide_int mask = wi::mask (w, false,
10928 TYPE_PRECISION (type));
10929 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10937 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10938 fold_build2_loc (loc, BIT_AND_EXPR, type,
10939 TREE_OPERAND (arg0, 0),
10940 wide_int_to_tree (type,
10945 /* (X & ~Y) | (~X & Y) is X ^ Y */
10946 if (TREE_CODE (arg0) == BIT_AND_EXPR
10947 && TREE_CODE (arg1) == BIT_AND_EXPR)
10949 tree a0, a1, l0, l1, n0, n1;
10951 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10952 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10954 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10955 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10957 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10958 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10960 if ((operand_equal_p (n0, a0, 0)
10961 && operand_equal_p (n1, a1, 0))
10962 || (operand_equal_p (n0, a1, 0)
10963 && operand_equal_p (n1, a0, 0)))
10964 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10967 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10968 if (t1 != NULL_TREE)
10971 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10973 This results in more efficient code for machines without a NAND
10974 instruction. Combine will canonicalize to the first form
10975 which will allow use of NAND instructions provided by the
10976 backend if they exist. */
10977 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10978 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10981 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10982 build2 (BIT_AND_EXPR, type,
10983 fold_convert_loc (loc, type,
10984 TREE_OPERAND (arg0, 0)),
10985 fold_convert_loc (loc, type,
10986 TREE_OPERAND (arg1, 0))));
10989 /* See if this can be simplified into a rotate first. If that
10990 is unsuccessful continue in the association code. */
10994 /* ~X ^ X is -1. */
10995 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10998 t1 = build_zero_cst (type);
10999 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11000 return omit_one_operand_loc (loc, type, t1, arg1);
11003 /* X ^ ~X is -1. */
11004 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11007 t1 = build_zero_cst (type);
11008 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11009 return omit_one_operand_loc (loc, type, t1, arg0);
11012 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11013 with a constant, and the two constants have no bits in common,
11014 we should treat this as a BIT_IOR_EXPR since this may produce more
11015 simplifications. */
11016 if (TREE_CODE (arg0) == BIT_AND_EXPR
11017 && TREE_CODE (arg1) == BIT_AND_EXPR
11018 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11019 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11020 && wi::bit_and (TREE_OPERAND (arg0, 1),
11021 TREE_OPERAND (arg1, 1)) == 0)
11023 code = BIT_IOR_EXPR;
11027 /* (X | Y) ^ X -> Y & ~ X*/
11028 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11029 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11031 tree t2 = TREE_OPERAND (arg0, 1);
11032 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11034 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11035 fold_convert_loc (loc, type, t2),
11036 fold_convert_loc (loc, type, t1));
11040 /* (Y | X) ^ X -> Y & ~ X*/
11041 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11044 tree t2 = TREE_OPERAND (arg0, 0);
11045 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11047 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11048 fold_convert_loc (loc, type, t2),
11049 fold_convert_loc (loc, type, t1));
11053 /* X ^ (X | Y) -> Y & ~ X*/
11054 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11055 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11057 tree t2 = TREE_OPERAND (arg1, 1);
11058 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11060 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11061 fold_convert_loc (loc, type, t2),
11062 fold_convert_loc (loc, type, t1));
11066 /* X ^ (Y | X) -> Y & ~ X*/
11067 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11070 tree t2 = TREE_OPERAND (arg1, 0);
11071 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11073 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11074 fold_convert_loc (loc, type, t2),
11075 fold_convert_loc (loc, type, t1));
11079 /* Convert ~X ^ ~Y to X ^ Y. */
11080 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11081 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11082 return fold_build2_loc (loc, code, type,
11083 fold_convert_loc (loc, type,
11084 TREE_OPERAND (arg0, 0)),
11085 fold_convert_loc (loc, type,
11086 TREE_OPERAND (arg1, 0)));
11088 /* Convert ~X ^ C to X ^ ~C. */
11089 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11090 && TREE_CODE (arg1) == INTEGER_CST)
11091 return fold_build2_loc (loc, code, type,
11092 fold_convert_loc (loc, type,
11093 TREE_OPERAND (arg0, 0)),
11094 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11096 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11097 if (TREE_CODE (arg0) == BIT_AND_EXPR
11098 && INTEGRAL_TYPE_P (type)
11099 && integer_onep (TREE_OPERAND (arg0, 1))
11100 && integer_onep (arg1))
11101 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11102 build_zero_cst (TREE_TYPE (arg0)));
11104 /* Fold (X & Y) ^ Y as ~X & Y. */
11105 if (TREE_CODE (arg0) == BIT_AND_EXPR
11106 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11108 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11109 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11110 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11111 fold_convert_loc (loc, type, arg1));
11113 /* Fold (X & Y) ^ X as ~Y & X. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11116 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11118 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11119 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11120 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11121 fold_convert_loc (loc, type, arg1));
11123 /* Fold X ^ (X & Y) as X & ~Y. */
11124 if (TREE_CODE (arg1) == BIT_AND_EXPR
11125 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11127 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11128 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11129 fold_convert_loc (loc, type, arg0),
11130 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11132 /* Fold X ^ (Y & X) as ~Y & X. */
11133 if (TREE_CODE (arg1) == BIT_AND_EXPR
11134 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11135 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11137 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11138 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11139 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11140 fold_convert_loc (loc, type, arg0));
11143 /* See if this can be simplified into a rotate first. If that
11144 is unsuccessful continue in the association code. */
11148 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11149 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11150 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11151 || (TREE_CODE (arg0) == EQ_EXPR
11152 && integer_zerop (TREE_OPERAND (arg0, 1))))
11153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11154 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11156 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11157 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11158 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11159 || (TREE_CODE (arg1) == EQ_EXPR
11160 && integer_zerop (TREE_OPERAND (arg1, 1))))
11161 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11162 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11164 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11165 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11166 && INTEGRAL_TYPE_P (type)
11167 && integer_onep (TREE_OPERAND (arg0, 1))
11168 && integer_onep (arg1))
11171 tem = TREE_OPERAND (arg0, 0);
11172 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11173 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11175 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11176 build_zero_cst (TREE_TYPE (tem)));
11178 /* Fold ~X & 1 as (X & 1) == 0. */
11179 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11180 && INTEGRAL_TYPE_P (type)
11181 && integer_onep (arg1))
11184 tem = TREE_OPERAND (arg0, 0);
11185 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11186 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11188 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11189 build_zero_cst (TREE_TYPE (tem)));
11191 /* Fold !X & 1 as X == 0. */
11192 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11193 && integer_onep (arg1))
11195 tem = TREE_OPERAND (arg0, 0);
11196 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11197 build_zero_cst (TREE_TYPE (tem)));
11200 /* Fold (X ^ Y) & Y as ~X & Y. */
11201 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11202 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11204 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11205 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11206 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11207 fold_convert_loc (loc, type, arg1));
11209 /* Fold (X ^ Y) & X as ~Y & X. */
11210 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11211 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11212 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11214 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11215 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11216 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11217 fold_convert_loc (loc, type, arg1));
11219 /* Fold X & (X ^ Y) as X & ~Y. */
11220 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11221 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11223 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11224 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11225 fold_convert_loc (loc, type, arg0),
11226 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11228 /* Fold X & (Y ^ X) as ~Y & X. */
11229 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11230 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11231 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11233 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11234 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11235 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11236 fold_convert_loc (loc, type, arg0));
11239 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11240 multiple of 1 << CST. */
11241 if (TREE_CODE (arg1) == INTEGER_CST)
11243 wide_int cst1 = arg1;
11244 wide_int ncst1 = -cst1;
11245 if ((cst1 & ncst1) == ncst1
11246 && multiple_of_p (type, arg0,
11247 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11248 return fold_convert_loc (loc, type, arg0);
11251 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11253 if (TREE_CODE (arg1) == INTEGER_CST
11254 && TREE_CODE (arg0) == MULT_EXPR
11255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11257 wide_int warg1 = arg1;
11258 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11261 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11263 else if (masked != warg1)
11265 /* Avoid the transform if arg1 is a mask of some
11266 mode which allows further optimizations. */
11267 int pop = wi::popcount (warg1);
11268 if (!(pop >= BITS_PER_UNIT
11269 && exact_log2 (pop) != -1
11270 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11271 return fold_build2_loc (loc, code, type, op0,
11272 wide_int_to_tree (type, masked));
11276 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11277 ((A & N) + B) & M -> (A + B) & M
11278 Similarly if (N & M) == 0,
11279 ((A | N) + B) & M -> (A + B) & M
11280 and for - instead of + (or unary - instead of +)
11281 and/or ^ instead of |.
11282 If B is constant and (B & M) == 0, fold into A & M. */
11283 if (TREE_CODE (arg1) == INTEGER_CST)
11285 wide_int cst1 = arg1;
11286 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11287 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11288 && (TREE_CODE (arg0) == PLUS_EXPR
11289 || TREE_CODE (arg0) == MINUS_EXPR
11290 || TREE_CODE (arg0) == NEGATE_EXPR)
11291 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11292 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11298 /* Now we know that arg0 is (C + D) or (C - D) or
11299 -C and arg1 (M) is == (1LL << cst) - 1.
11300 Store C into PMOP[0] and D into PMOP[1]. */
11301 pmop[0] = TREE_OPERAND (arg0, 0);
11303 if (TREE_CODE (arg0) != NEGATE_EXPR)
11305 pmop[1] = TREE_OPERAND (arg0, 1);
11309 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11312 for (; which >= 0; which--)
11313 switch (TREE_CODE (pmop[which]))
11318 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11321 cst0 = TREE_OPERAND (pmop[which], 1);
11323 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11328 else if (cst0 != 0)
11330 /* If C or D is of the form (A & N) where
11331 (N & M) == M, or of the form (A | N) or
11332 (A ^ N) where (N & M) == 0, replace it with A. */
11333 pmop[which] = TREE_OPERAND (pmop[which], 0);
11336 /* If C or D is a N where (N & M) == 0, it can be
11337 omitted (assumed 0). */
11338 if ((TREE_CODE (arg0) == PLUS_EXPR
11339 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11340 && (cst1 & pmop[which]) == 0)
11341 pmop[which] = NULL;
11347 /* Only build anything new if we optimized one or both arguments
11349 if (pmop[0] != TREE_OPERAND (arg0, 0)
11350 || (TREE_CODE (arg0) != NEGATE_EXPR
11351 && pmop[1] != TREE_OPERAND (arg0, 1)))
11353 tree utype = TREE_TYPE (arg0);
11354 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11356 /* Perform the operations in a type that has defined
11357 overflow behavior. */
11358 utype = unsigned_type_for (TREE_TYPE (arg0));
11359 if (pmop[0] != NULL)
11360 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11361 if (pmop[1] != NULL)
11362 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11365 if (TREE_CODE (arg0) == NEGATE_EXPR)
11366 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11367 else if (TREE_CODE (arg0) == PLUS_EXPR)
11369 if (pmop[0] != NULL && pmop[1] != NULL)
11370 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11372 else if (pmop[0] != NULL)
11374 else if (pmop[1] != NULL)
11377 return build_int_cst (type, 0);
11379 else if (pmop[0] == NULL)
11380 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11382 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11384 /* TEM is now the new binary +, - or unary - replacement. */
11385 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11386 fold_convert_loc (loc, utype, arg1));
11387 return fold_convert_loc (loc, type, tem);
11392 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11393 if (t1 != NULL_TREE)
11395 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11396 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11397 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11399 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11401 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11404 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11407 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11409 This results in more efficient code for machines without a NOR
11410 instruction. Combine will canonicalize to the first form
11411 which will allow use of NOR instructions provided by the
11412 backend if they exist. */
11413 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11414 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11416 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11417 build2 (BIT_IOR_EXPR, type,
11418 fold_convert_loc (loc, type,
11419 TREE_OPERAND (arg0, 0)),
11420 fold_convert_loc (loc, type,
11421 TREE_OPERAND (arg1, 0))));
11424 /* If arg0 is derived from the address of an object or function, we may
11425 be able to fold this expression using the object or function's
11427 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11429 unsigned HOST_WIDE_INT modulus, residue;
11430 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11432 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11433 integer_onep (arg1));
11435 /* This works because modulus is a power of 2. If this weren't the
11436 case, we'd have to replace it by its greatest power-of-2
11437 divisor: modulus & -modulus. */
11439 return build_int_cst (type, residue & low);
11442 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11443 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11444 if the new mask might be further optimized. */
11445 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11446 || TREE_CODE (arg0) == RSHIFT_EXPR)
11447 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11448 && TREE_CODE (arg1) == INTEGER_CST
11449 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11450 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11451 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11452 < TYPE_PRECISION (TREE_TYPE (arg0))))
11454 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11455 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11456 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11457 tree shift_type = TREE_TYPE (arg0);
11459 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11460 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11461 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11462 && TYPE_PRECISION (TREE_TYPE (arg0))
11463 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11465 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11466 tree arg00 = TREE_OPERAND (arg0, 0);
11467 /* See if more bits can be proven as zero because of
11469 if (TREE_CODE (arg00) == NOP_EXPR
11470 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11472 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11473 if (TYPE_PRECISION (inner_type)
11474 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11475 && TYPE_PRECISION (inner_type) < prec)
11477 prec = TYPE_PRECISION (inner_type);
11478 /* See if we can shorten the right shift. */
11480 shift_type = inner_type;
11481 /* Otherwise X >> C1 is all zeros, so we'll optimize
11482 it into (X, 0) later on by making sure zerobits
11486 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11489 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11490 zerobits <<= prec - shiftc;
11492 /* For arithmetic shift if sign bit could be set, zerobits
11493 can contain actually sign bits, so no transformation is
11494 possible, unless MASK masks them all away. In that
11495 case the shift needs to be converted into logical shift. */
11496 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11497 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11499 if ((mask & zerobits) == 0)
11500 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11506 /* ((X << 16) & 0xff00) is (X, 0). */
11507 if ((mask & zerobits) == mask)
11508 return omit_one_operand_loc (loc, type,
11509 build_int_cst (type, 0), arg0);
11511 newmask = mask | zerobits;
11512 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11514 /* Only do the transformation if NEWMASK is some integer
11516 for (prec = BITS_PER_UNIT;
11517 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11518 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11520 if (prec < HOST_BITS_PER_WIDE_INT
11521 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11525 if (shift_type != TREE_TYPE (arg0))
11527 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11528 fold_convert_loc (loc, shift_type,
11529 TREE_OPERAND (arg0, 0)),
11530 TREE_OPERAND (arg0, 1));
11531 tem = fold_convert_loc (loc, type, tem);
11535 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11536 if (!tree_int_cst_equal (newmaskt, arg1))
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11545 /* Don't touch a floating-point divide by zero unless the mode
11546 of the constant can represent infinity. */
11547 if (TREE_CODE (arg1) == REAL_CST
11548 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11549 && real_zerop (arg1))
11552 /* Optimize A / A to 1.0 if we don't care about
11553 NaNs or Infinities. Skip the transformation
11554 for non-real operands. */
11555 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11556 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11557 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11558 && operand_equal_p (arg0, arg1, 0))
11560 tree r = build_real (TREE_TYPE (arg0), dconst1);
11562 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11565 /* The complex version of the above A / A optimization. */
11566 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11567 && operand_equal_p (arg0, arg1, 0))
11569 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11570 if (! HONOR_NANS (TYPE_MODE (elem_type))
11571 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11573 tree r = build_real (elem_type, dconst1);
11574 /* omit_two_operands will call fold_convert for us. */
11575 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11579 /* (-A) / (-B) -> A / B */
11580 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11581 return fold_build2_loc (loc, RDIV_EXPR, type,
11582 TREE_OPERAND (arg0, 0),
11583 negate_expr (arg1));
11584 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11585 return fold_build2_loc (loc, RDIV_EXPR, type,
11586 negate_expr (arg0),
11587 TREE_OPERAND (arg1, 0));
11589 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11590 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11591 && real_onep (arg1))
11592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11594 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11595 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11596 && real_minus_onep (arg1))
11597 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11598 negate_expr (arg0)));
11600 /* If ARG1 is a constant, we can convert this to a multiply by the
11601 reciprocal. This does not have the same rounding properties,
11602 so only do this if -freciprocal-math. We can actually
11603 always safely do it if ARG1 is a power of two, but it's hard to
11604 tell if it is or not in a portable manner. */
11606 && (TREE_CODE (arg1) == REAL_CST
11607 || (TREE_CODE (arg1) == COMPLEX_CST
11608 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11609 || (TREE_CODE (arg1) == VECTOR_CST
11610 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11612 if (flag_reciprocal_math
11613 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11614 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11615 /* Find the reciprocal if optimizing and the result is exact.
11616 TODO: Complex reciprocal not implemented. */
11617 if (TREE_CODE (arg1) != COMPLEX_CST)
11619 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11622 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11625 /* Convert A/B/C to A/(B*C). */
11626 if (flag_reciprocal_math
11627 && TREE_CODE (arg0) == RDIV_EXPR)
11628 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11629 fold_build2_loc (loc, MULT_EXPR, type,
11630 TREE_OPERAND (arg0, 1), arg1));
11632 /* Convert A/(B/C) to (A/B)*C. */
11633 if (flag_reciprocal_math
11634 && TREE_CODE (arg1) == RDIV_EXPR)
11635 return fold_build2_loc (loc, MULT_EXPR, type,
11636 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11637 TREE_OPERAND (arg1, 0)),
11638 TREE_OPERAND (arg1, 1));
11640 /* Convert C1/(X*C2) into (C1/C2)/X. */
11641 if (flag_reciprocal_math
11642 && TREE_CODE (arg1) == MULT_EXPR
11643 && TREE_CODE (arg0) == REAL_CST
11644 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11646 tree tem = const_binop (RDIV_EXPR, arg0,
11647 TREE_OPERAND (arg1, 1));
11649 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11650 TREE_OPERAND (arg1, 0));
11653 if (flag_unsafe_math_optimizations)
11655 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11656 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11658 /* Optimize sin(x)/cos(x) as tan(x). */
11659 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11660 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11661 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11662 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11663 CALL_EXPR_ARG (arg1, 0), 0))
11665 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11667 if (tanfn != NULL_TREE)
11668 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11671 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11672 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11673 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11674 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11675 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11676 CALL_EXPR_ARG (arg1, 0), 0))
11678 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11680 if (tanfn != NULL_TREE)
11682 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11683 CALL_EXPR_ARG (arg0, 0));
11684 return fold_build2_loc (loc, RDIV_EXPR, type,
11685 build_real (type, dconst1), tmp);
11689 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11690 NaNs or Infinities. */
11691 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11692 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11693 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11695 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11696 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11698 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11699 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11700 && operand_equal_p (arg00, arg01, 0))
11702 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11704 if (cosfn != NULL_TREE)
11705 return build_call_expr_loc (loc, cosfn, 1, arg00);
11709 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11710 NaNs or Infinities. */
11711 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11712 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11713 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11715 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11716 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11718 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11719 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11720 && operand_equal_p (arg00, arg01, 0))
11722 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11724 if (cosfn != NULL_TREE)
11726 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11727 return fold_build2_loc (loc, RDIV_EXPR, type,
11728 build_real (type, dconst1),
11734 /* Optimize pow(x,c)/x as pow(x,c-1). */
11735 if (fcode0 == BUILT_IN_POW
11736 || fcode0 == BUILT_IN_POWF
11737 || fcode0 == BUILT_IN_POWL)
11739 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11740 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11741 if (TREE_CODE (arg01) == REAL_CST
11742 && !TREE_OVERFLOW (arg01)
11743 && operand_equal_p (arg1, arg00, 0))
11745 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11749 c = TREE_REAL_CST (arg01);
11750 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11751 arg = build_real (type, c);
11752 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11756 /* Optimize a/root(b/c) into a*root(c/b). */
11757 if (BUILTIN_ROOT_P (fcode1))
11759 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11761 if (TREE_CODE (rootarg) == RDIV_EXPR)
11763 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11764 tree b = TREE_OPERAND (rootarg, 0);
11765 tree c = TREE_OPERAND (rootarg, 1);
11767 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11769 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11770 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11774 /* Optimize x/expN(y) into x*expN(-y). */
11775 if (BUILTIN_EXPONENT_P (fcode1))
11777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11778 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11779 arg1 = build_call_expr_loc (loc,
11781 fold_convert_loc (loc, type, arg));
11782 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11785 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11786 if (fcode1 == BUILT_IN_POW
11787 || fcode1 == BUILT_IN_POWF
11788 || fcode1 == BUILT_IN_POWL)
11790 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11791 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11792 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11793 tree neg11 = fold_convert_loc (loc, type,
11794 negate_expr (arg11));
11795 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11796 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11801 case TRUNC_DIV_EXPR:
11802 /* Optimize (X & (-A)) / A where A is a power of 2,
11804 if (TREE_CODE (arg0) == BIT_AND_EXPR
11805 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11806 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11808 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11809 arg1, TREE_OPERAND (arg0, 1));
11810 if (sum && integer_zerop (sum)) {
11811 tree pow2 = build_int_cst (integer_type_node,
11812 wi::exact_log2 (arg1));
11813 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11814 TREE_OPERAND (arg0, 0), pow2);
11820 case FLOOR_DIV_EXPR:
11821 /* Simplify A / (B << N) where A and B are positive and B is
11822 a power of 2, to A >> (N + log2(B)). */
11823 strict_overflow_p = false;
11824 if (TREE_CODE (arg1) == LSHIFT_EXPR
11825 && (TYPE_UNSIGNED (type)
11826 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11828 tree sval = TREE_OPERAND (arg1, 0);
11829 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11831 tree sh_cnt = TREE_OPERAND (arg1, 1);
11832 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11833 wi::exact_log2 (sval));
11835 if (strict_overflow_p)
11836 fold_overflow_warning (("assuming signed overflow does not "
11837 "occur when simplifying A / (B << N)"),
11838 WARN_STRICT_OVERFLOW_MISC);
11840 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11842 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11843 fold_convert_loc (loc, type, arg0), sh_cnt);
11847 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11848 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11849 if (INTEGRAL_TYPE_P (type)
11850 && TYPE_UNSIGNED (type)
11851 && code == FLOOR_DIV_EXPR)
11852 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11856 case ROUND_DIV_EXPR:
11857 case CEIL_DIV_EXPR:
11858 case EXACT_DIV_EXPR:
11859 if (integer_zerop (arg1))
11861 /* X / -1 is -X. */
11862 if (!TYPE_UNSIGNED (type)
11863 && TREE_CODE (arg1) == INTEGER_CST
11864 && wi::eq_p (arg1, -1))
11865 return fold_convert_loc (loc, type, negate_expr (arg0));
11867 /* Convert -A / -B to A / B when the type is signed and overflow is
11869 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11870 && TREE_CODE (arg0) == NEGATE_EXPR
11871 && negate_expr_p (arg1))
11873 if (INTEGRAL_TYPE_P (type))
11874 fold_overflow_warning (("assuming signed overflow does not occur "
11875 "when distributing negation across "
11877 WARN_STRICT_OVERFLOW_MISC);
11878 return fold_build2_loc (loc, code, type,
11879 fold_convert_loc (loc, type,
11880 TREE_OPERAND (arg0, 0)),
11881 fold_convert_loc (loc, type,
11882 negate_expr (arg1)));
11884 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11885 && TREE_CODE (arg1) == NEGATE_EXPR
11886 && negate_expr_p (arg0))
11888 if (INTEGRAL_TYPE_P (type))
11889 fold_overflow_warning (("assuming signed overflow does not occur "
11890 "when distributing negation across "
11892 WARN_STRICT_OVERFLOW_MISC);
11893 return fold_build2_loc (loc, code, type,
11894 fold_convert_loc (loc, type,
11895 negate_expr (arg0)),
11896 fold_convert_loc (loc, type,
11897 TREE_OPERAND (arg1, 0)));
11900 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11901 operation, EXACT_DIV_EXPR.
11903 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11904 At one time others generated faster code, it's not clear if they do
11905 after the last round to changes to the DIV code in expmed.c. */
11906 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11907 && multiple_of_p (type, arg0, arg1))
11908 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11910 strict_overflow_p = false;
11911 if (TREE_CODE (arg1) == INTEGER_CST
11912 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11913 &strict_overflow_p)))
11915 if (strict_overflow_p)
11916 fold_overflow_warning (("assuming signed overflow does not occur "
11917 "when simplifying division"),
11918 WARN_STRICT_OVERFLOW_MISC);
11919 return fold_convert_loc (loc, type, tem);
11924 case CEIL_MOD_EXPR:
11925 case FLOOR_MOD_EXPR:
11926 case ROUND_MOD_EXPR:
11927 case TRUNC_MOD_EXPR:
11928 /* X % -1 is zero. */
11929 if (!TYPE_UNSIGNED (type)
11930 && TREE_CODE (arg1) == INTEGER_CST
11931 && wi::eq_p (arg1, -1))
11932 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11934 /* X % -C is the same as X % C. */
11935 if (code == TRUNC_MOD_EXPR
11936 && TYPE_SIGN (type) == SIGNED
11937 && TREE_CODE (arg1) == INTEGER_CST
11938 && !TREE_OVERFLOW (arg1)
11939 && wi::neg_p (arg1)
11940 && !TYPE_OVERFLOW_TRAPS (type)
11941 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11942 && !sign_bit_p (arg1, arg1))
11943 return fold_build2_loc (loc, code, type,
11944 fold_convert_loc (loc, type, arg0),
11945 fold_convert_loc (loc, type,
11946 negate_expr (arg1)));
11948 /* X % -Y is the same as X % Y. */
11949 if (code == TRUNC_MOD_EXPR
11950 && !TYPE_UNSIGNED (type)
11951 && TREE_CODE (arg1) == NEGATE_EXPR
11952 && !TYPE_OVERFLOW_TRAPS (type))
11953 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11954 fold_convert_loc (loc, type,
11955 TREE_OPERAND (arg1, 0)));
11957 strict_overflow_p = false;
11958 if (TREE_CODE (arg1) == INTEGER_CST
11959 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11960 &strict_overflow_p)))
11962 if (strict_overflow_p)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying modulus"),
11965 WARN_STRICT_OVERFLOW_MISC);
11966 return fold_convert_loc (loc, type, tem);
11969 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11970 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11971 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11972 && (TYPE_UNSIGNED (type)
11973 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11976 /* Also optimize A % (C << N) where C is a power of 2,
11977 to A & ((C << N) - 1). */
11978 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11979 c = TREE_OPERAND (arg1, 0);
11981 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11984 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11985 build_int_cst (TREE_TYPE (arg1), 1));
11986 if (strict_overflow_p)
11987 fold_overflow_warning (("assuming signed overflow does not "
11988 "occur when simplifying "
11989 "X % (power of two)"),
11990 WARN_STRICT_OVERFLOW_MISC);
11991 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11992 fold_convert_loc (loc, type, arg0),
11993 fold_convert_loc (loc, type, mask));
12001 if (integer_all_onesp (arg0))
12002 return omit_one_operand_loc (loc, type, arg0, arg1);
12006 /* Optimize -1 >> x for arithmetic right shifts. */
12007 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12008 && tree_expr_nonnegative_p (arg1))
12009 return omit_one_operand_loc (loc, type, arg0, arg1);
12010 /* ... fall through ... */
12014 if (integer_zerop (arg1))
12015 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12016 if (integer_zerop (arg0))
12017 return omit_one_operand_loc (loc, type, arg0, arg1);
12019 /* Prefer vector1 << scalar to vector1 << vector2
12020 if vector2 is uniform. */
12021 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12022 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12023 return fold_build2_loc (loc, code, type, op0, tem);
12025 /* Since negative shift count is not well-defined,
12026 don't try to compute it in the compiler. */
12027 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12030 prec = element_precision (type);
12032 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12033 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12034 && tree_to_uhwi (arg1) < prec
12035 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12036 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12038 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12039 + tree_to_uhwi (arg1));
12041 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12042 being well defined. */
12045 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12047 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12048 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12049 TREE_OPERAND (arg0, 0));
12054 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12055 build_int_cst (TREE_TYPE (arg1), low));
12058 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12059 into x & ((unsigned)-1 >> c) for unsigned types. */
12060 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12061 || (TYPE_UNSIGNED (type)
12062 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12063 && tree_fits_uhwi_p (arg1)
12064 && tree_to_uhwi (arg1) < prec
12065 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12066 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12068 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12069 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12075 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12077 lshift = build_minus_one_cst (type);
12078 lshift = const_binop (code, lshift, arg1);
12080 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12084 /* Rewrite an LROTATE_EXPR by a constant into an
12085 RROTATE_EXPR by a new constant. */
12086 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12088 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12089 tem = const_binop (MINUS_EXPR, tem, arg1);
12090 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12093 /* If we have a rotate of a bit operation with the rotate count and
12094 the second operand of the bit operation both constant,
12095 permute the two operations. */
12096 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12097 && (TREE_CODE (arg0) == BIT_AND_EXPR
12098 || TREE_CODE (arg0) == BIT_IOR_EXPR
12099 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12100 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12101 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12102 fold_build2_loc (loc, code, type,
12103 TREE_OPERAND (arg0, 0), arg1),
12104 fold_build2_loc (loc, code, type,
12105 TREE_OPERAND (arg0, 1), arg1));
12107 /* Two consecutive rotates adding up to the some integer
12108 multiple of the precision of the type can be ignored. */
12109 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12110 && TREE_CODE (arg0) == RROTATE_EXPR
12111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12112 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12114 return TREE_OPERAND (arg0, 0);
12116 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12117 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12118 if the latter can be further optimized. */
12119 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12120 && TREE_CODE (arg0) == BIT_AND_EXPR
12121 && TREE_CODE (arg1) == INTEGER_CST
12122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12124 tree mask = fold_build2_loc (loc, code, type,
12125 fold_convert_loc (loc, type,
12126 TREE_OPERAND (arg0, 1)),
12128 tree shift = fold_build2_loc (loc, code, type,
12129 fold_convert_loc (loc, type,
12130 TREE_OPERAND (arg0, 0)),
12132 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12140 if (operand_equal_p (arg0, arg1, 0))
12141 return omit_one_operand_loc (loc, type, arg0, arg1);
12142 if (INTEGRAL_TYPE_P (type)
12143 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12144 return omit_one_operand_loc (loc, type, arg1, arg0);
12145 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12151 if (operand_equal_p (arg0, arg1, 0))
12152 return omit_one_operand_loc (loc, type, arg0, arg1);
12153 if (INTEGRAL_TYPE_P (type)
12154 && TYPE_MAX_VALUE (type)
12155 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12156 return omit_one_operand_loc (loc, type, arg1, arg0);
12157 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12162 case TRUTH_ANDIF_EXPR:
12163 /* Note that the operands of this must be ints
12164 and their values must be 0 or 1.
12165 ("true" is a fixed value perhaps depending on the language.) */
12166 /* If first arg is constant zero, return it. */
12167 if (integer_zerop (arg0))
12168 return fold_convert_loc (loc, type, arg0);
12169 case TRUTH_AND_EXPR:
12170 /* If either arg is constant true, drop it. */
12171 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12172 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12173 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12174 /* Preserve sequence points. */
12175 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12176 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12177 /* If second arg is constant zero, result is zero, but first arg
12178 must be evaluated. */
12179 if (integer_zerop (arg1))
12180 return omit_one_operand_loc (loc, type, arg1, arg0);
12181 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12182 case will be handled here. */
12183 if (integer_zerop (arg0))
12184 return omit_one_operand_loc (loc, type, arg0, arg1);
12186 /* !X && X is always false. */
12187 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12189 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12190 /* X && !X is always false. */
12191 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12192 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12193 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12195 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12196 means A >= Y && A != MAX, but in this case we know that
12199 if (!TREE_SIDE_EFFECTS (arg0)
12200 && !TREE_SIDE_EFFECTS (arg1))
12202 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12203 if (tem && !operand_equal_p (tem, arg0, 0))
12204 return fold_build2_loc (loc, code, type, tem, arg1);
12206 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12207 if (tem && !operand_equal_p (tem, arg1, 0))
12208 return fold_build2_loc (loc, code, type, arg0, tem);
12211 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12217 case TRUTH_ORIF_EXPR:
12218 /* Note that the operands of this must be ints
12219 and their values must be 0 or true.
12220 ("true" is a fixed value perhaps depending on the language.) */
12221 /* If first arg is constant true, return it. */
12222 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12223 return fold_convert_loc (loc, type, arg0);
12224 case TRUTH_OR_EXPR:
12225 /* If either arg is constant zero, drop it. */
12226 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12228 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12229 /* Preserve sequence points. */
12230 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12231 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12232 /* If second arg is constant true, result is true, but we must
12233 evaluate first arg. */
12234 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12235 return omit_one_operand_loc (loc, type, arg1, arg0);
12236 /* Likewise for first arg, but note this only occurs here for
12238 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12239 return omit_one_operand_loc (loc, type, arg0, arg1);
12241 /* !X || X is always true. */
12242 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12243 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12244 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12245 /* X || !X is always true. */
12246 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12247 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12248 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12250 /* (X && !Y) || (!X && Y) is X ^ Y */
12251 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12252 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12254 tree a0, a1, l0, l1, n0, n1;
12256 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12257 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12259 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12260 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12262 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12263 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12265 if ((operand_equal_p (n0, a0, 0)
12266 && operand_equal_p (n1, a1, 0))
12267 || (operand_equal_p (n0, a1, 0)
12268 && operand_equal_p (n1, a0, 0)))
12269 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12272 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12278 case TRUTH_XOR_EXPR:
12279 /* If the second arg is constant zero, drop it. */
12280 if (integer_zerop (arg1))
12281 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12282 /* If the second arg is constant true, this is a logical inversion. */
12283 if (integer_onep (arg1))
12285 tem = invert_truthvalue_loc (loc, arg0);
12286 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12288 /* Identical arguments cancel to zero. */
12289 if (operand_equal_p (arg0, arg1, 0))
12290 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12292 /* !X ^ X is always true. */
12293 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12294 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12295 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12297 /* X ^ !X is always true. */
12298 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12299 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12300 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12309 tem = fold_comparison (loc, code, type, op0, op1);
12310 if (tem != NULL_TREE)
12313 /* bool_var != 0 becomes bool_var. */
12314 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12315 && code == NE_EXPR)
12316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12318 /* bool_var == 1 becomes bool_var. */
12319 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12320 && code == EQ_EXPR)
12321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12323 /* bool_var != 1 becomes !bool_var. */
12324 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12325 && code == NE_EXPR)
12326 return fold_convert_loc (loc, type,
12327 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12328 TREE_TYPE (arg0), arg0));
12330 /* bool_var == 0 becomes !bool_var. */
12331 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12332 && code == EQ_EXPR)
12333 return fold_convert_loc (loc, type,
12334 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12335 TREE_TYPE (arg0), arg0));
12337 /* !exp != 0 becomes !exp */
12338 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12339 && code == NE_EXPR)
12340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12342 /* If this is an equality comparison of the address of two non-weak,
12343 unaliased symbols neither of which are extern (since we do not
12344 have access to attributes for externs), then we know the result. */
12345 if (TREE_CODE (arg0) == ADDR_EXPR
12346 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12347 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12348 && ! lookup_attribute ("alias",
12349 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12350 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12351 && TREE_CODE (arg1) == ADDR_EXPR
12352 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12353 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12354 && ! lookup_attribute ("alias",
12355 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12356 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12358 /* We know that we're looking at the address of two
12359 non-weak, unaliased, static _DECL nodes.
12361 It is both wasteful and incorrect to call operand_equal_p
12362 to compare the two ADDR_EXPR nodes. It is wasteful in that
12363 all we need to do is test pointer equality for the arguments
12364 to the two ADDR_EXPR nodes. It is incorrect to use
12365 operand_equal_p as that function is NOT equivalent to a
12366 C equality test. It can in fact return false for two
12367 objects which would test as equal using the C equality
12369 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12370 return constant_boolean_node (equal
12371 ? code == EQ_EXPR : code != EQ_EXPR,
12375 /* Similarly for a NEGATE_EXPR. */
12376 if (TREE_CODE (arg0) == NEGATE_EXPR
12377 && TREE_CODE (arg1) == INTEGER_CST
12378 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12380 && TREE_CODE (tem) == INTEGER_CST
12381 && !TREE_OVERFLOW (tem))
12382 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12384 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12385 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12386 && TREE_CODE (arg1) == INTEGER_CST
12387 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12389 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12390 fold_convert_loc (loc,
12393 TREE_OPERAND (arg0, 1)));
12395 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12396 if ((TREE_CODE (arg0) == PLUS_EXPR
12397 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12398 || TREE_CODE (arg0) == MINUS_EXPR)
12399 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12402 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12403 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12405 tree val = TREE_OPERAND (arg0, 1);
12406 return omit_two_operands_loc (loc, type,
12407 fold_build2_loc (loc, code, type,
12409 build_int_cst (TREE_TYPE (val),
12411 TREE_OPERAND (arg0, 0), arg1);
12414 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12415 if (TREE_CODE (arg0) == MINUS_EXPR
12416 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12417 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12420 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12422 return omit_two_operands_loc (loc, type,
12424 ? boolean_true_node : boolean_false_node,
12425 TREE_OPERAND (arg0, 1), arg1);
12428 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12429 if (TREE_CODE (arg0) == ABS_EXPR
12430 && (integer_zerop (arg1) || real_zerop (arg1)))
12431 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12433 /* If this is an EQ or NE comparison with zero and ARG0 is
12434 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12435 two operations, but the latter can be done in one less insn
12436 on machines that have only two-operand insns or on which a
12437 constant cannot be the first operand. */
12438 if (TREE_CODE (arg0) == BIT_AND_EXPR
12439 && integer_zerop (arg1))
12441 tree arg00 = TREE_OPERAND (arg0, 0);
12442 tree arg01 = TREE_OPERAND (arg0, 1);
12443 if (TREE_CODE (arg00) == LSHIFT_EXPR
12444 && integer_onep (TREE_OPERAND (arg00, 0)))
12446 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12447 arg01, TREE_OPERAND (arg00, 1));
12448 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12449 build_int_cst (TREE_TYPE (arg0), 1));
12450 return fold_build2_loc (loc, code, type,
12451 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12454 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12455 && integer_onep (TREE_OPERAND (arg01, 0)))
12457 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12458 arg00, TREE_OPERAND (arg01, 1));
12459 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12460 build_int_cst (TREE_TYPE (arg0), 1));
12461 return fold_build2_loc (loc, code, type,
12462 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12467 /* If this is an NE or EQ comparison of zero against the result of a
12468 signed MOD operation whose second operand is a power of 2, make
12469 the MOD operation unsigned since it is simpler and equivalent. */
12470 if (integer_zerop (arg1)
12471 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12472 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12473 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12474 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12475 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12478 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12479 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12480 fold_convert_loc (loc, newtype,
12481 TREE_OPERAND (arg0, 0)),
12482 fold_convert_loc (loc, newtype,
12483 TREE_OPERAND (arg0, 1)));
12485 return fold_build2_loc (loc, code, type, newmod,
12486 fold_convert_loc (loc, newtype, arg1));
12489 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12490 C1 is a valid shift constant, and C2 is a power of two, i.e.
12492 if (TREE_CODE (arg0) == BIT_AND_EXPR
12493 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12496 && integer_pow2p (TREE_OPERAND (arg0, 1))
12497 && integer_zerop (arg1))
12499 tree itype = TREE_TYPE (arg0);
12500 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12501 prec = TYPE_PRECISION (itype);
12503 /* Check for a valid shift count. */
12504 if (wi::ltu_p (arg001, prec))
12506 tree arg01 = TREE_OPERAND (arg0, 1);
12507 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12508 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12509 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12510 can be rewritten as (X & (C2 << C1)) != 0. */
12511 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12513 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12514 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12515 return fold_build2_loc (loc, code, type, tem,
12516 fold_convert_loc (loc, itype, arg1));
12518 /* Otherwise, for signed (arithmetic) shifts,
12519 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12520 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12521 else if (!TYPE_UNSIGNED (itype))
12522 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12523 arg000, build_int_cst (itype, 0));
12524 /* Otherwise, of unsigned (logical) shifts,
12525 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12526 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12528 return omit_one_operand_loc (loc, type,
12529 code == EQ_EXPR ? integer_one_node
12530 : integer_zero_node,
12535 /* If we have (A & C) == C where C is a power of 2, convert this into
12536 (A & C) != 0. Similarly for NE_EXPR. */
12537 if (TREE_CODE (arg0) == BIT_AND_EXPR
12538 && integer_pow2p (TREE_OPERAND (arg0, 1))
12539 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12540 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12541 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12542 integer_zero_node));
12544 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12545 bit, then fold the expression into A < 0 or A >= 0. */
12546 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12550 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12551 Similarly for NE_EXPR. */
12552 if (TREE_CODE (arg0) == BIT_AND_EXPR
12553 && TREE_CODE (arg1) == INTEGER_CST
12554 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12556 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12557 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12558 TREE_OPERAND (arg0, 1));
12560 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12561 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12563 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12564 if (integer_nonzerop (dandnotc))
12565 return omit_one_operand_loc (loc, type, rslt, arg0);
12568 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12569 Similarly for NE_EXPR. */
12570 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12571 && TREE_CODE (arg1) == INTEGER_CST
12572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12574 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12576 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12577 TREE_OPERAND (arg0, 1),
12578 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12579 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12580 if (integer_nonzerop (candnotd))
12581 return omit_one_operand_loc (loc, type, rslt, arg0);
12584 /* If this is a comparison of a field, we may be able to simplify it. */
12585 if ((TREE_CODE (arg0) == COMPONENT_REF
12586 || TREE_CODE (arg0) == BIT_FIELD_REF)
12587 /* Handle the constant case even without -O
12588 to make sure the warnings are given. */
12589 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12591 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12596 /* Optimize comparisons of strlen vs zero to a compare of the
12597 first character of the string vs zero. To wit,
12598 strlen(ptr) == 0 => *ptr == 0
12599 strlen(ptr) != 0 => *ptr != 0
12600 Other cases should reduce to one of these two (or a constant)
12601 due to the return value of strlen being unsigned. */
12602 if (TREE_CODE (arg0) == CALL_EXPR
12603 && integer_zerop (arg1))
12605 tree fndecl = get_callee_fndecl (arg0);
12608 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12609 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12610 && call_expr_nargs (arg0) == 1
12611 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12613 tree iref = build_fold_indirect_ref_loc (loc,
12614 CALL_EXPR_ARG (arg0, 0));
12615 return fold_build2_loc (loc, code, type, iref,
12616 build_int_cst (TREE_TYPE (iref), 0));
12620 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12621 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12622 if (TREE_CODE (arg0) == RSHIFT_EXPR
12623 && integer_zerop (arg1)
12624 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12626 tree arg00 = TREE_OPERAND (arg0, 0);
12627 tree arg01 = TREE_OPERAND (arg0, 1);
12628 tree itype = TREE_TYPE (arg00);
12629 if (wi::eq_p (arg01, element_precision (itype) - 1))
12631 if (TYPE_UNSIGNED (itype))
12633 itype = signed_type_for (itype);
12634 arg00 = fold_convert_loc (loc, itype, arg00);
12636 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12637 type, arg00, build_zero_cst (itype));
12641 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12642 if (integer_zerop (arg1)
12643 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12644 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12645 TREE_OPERAND (arg0, 1));
12647 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12649 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12650 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12651 build_zero_cst (TREE_TYPE (arg0)));
12652 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12653 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12654 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12655 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12656 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12657 build_zero_cst (TREE_TYPE (arg0)));
12659 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12660 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12661 && TREE_CODE (arg1) == INTEGER_CST
12662 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12663 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12664 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12665 TREE_OPERAND (arg0, 1), arg1));
12667 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12668 (X & C) == 0 when C is a single bit. */
12669 if (TREE_CODE (arg0) == BIT_AND_EXPR
12670 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12671 && integer_zerop (arg1)
12672 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12674 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12675 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12676 TREE_OPERAND (arg0, 1));
12677 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12679 fold_convert_loc (loc, TREE_TYPE (arg0),
12683 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12684 constant C is a power of two, i.e. a single bit. */
12685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12686 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12687 && integer_zerop (arg1)
12688 && integer_pow2p (TREE_OPERAND (arg0, 1))
12689 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12690 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12692 tree arg00 = TREE_OPERAND (arg0, 0);
12693 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12694 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12697 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12698 when is C is a power of two, i.e. a single bit. */
12699 if (TREE_CODE (arg0) == BIT_AND_EXPR
12700 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12701 && integer_zerop (arg1)
12702 && integer_pow2p (TREE_OPERAND (arg0, 1))
12703 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12704 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12706 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12707 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12708 arg000, TREE_OPERAND (arg0, 1));
12709 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12710 tem, build_int_cst (TREE_TYPE (tem), 0));
12713 if (integer_zerop (arg1)
12714 && tree_expr_nonzero_p (arg0))
12716 tree res = constant_boolean_node (code==NE_EXPR, type);
12717 return omit_one_operand_loc (loc, type, res, arg0);
12720 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12721 if (TREE_CODE (arg0) == NEGATE_EXPR
12722 && TREE_CODE (arg1) == NEGATE_EXPR)
12723 return fold_build2_loc (loc, code, type,
12724 TREE_OPERAND (arg0, 0),
12725 fold_convert_loc (loc, TREE_TYPE (arg0),
12726 TREE_OPERAND (arg1, 0)));
12728 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12729 if (TREE_CODE (arg0) == BIT_AND_EXPR
12730 && TREE_CODE (arg1) == BIT_AND_EXPR)
12732 tree arg00 = TREE_OPERAND (arg0, 0);
12733 tree arg01 = TREE_OPERAND (arg0, 1);
12734 tree arg10 = TREE_OPERAND (arg1, 0);
12735 tree arg11 = TREE_OPERAND (arg1, 1);
12736 tree itype = TREE_TYPE (arg0);
12738 if (operand_equal_p (arg01, arg11, 0))
12739 return fold_build2_loc (loc, code, type,
12740 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12741 fold_build2_loc (loc,
12742 BIT_XOR_EXPR, itype,
12745 build_zero_cst (itype));
12747 if (operand_equal_p (arg01, arg10, 0))
12748 return fold_build2_loc (loc, code, type,
12749 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12750 fold_build2_loc (loc,
12751 BIT_XOR_EXPR, itype,
12754 build_zero_cst (itype));
12756 if (operand_equal_p (arg00, arg11, 0))
12757 return fold_build2_loc (loc, code, type,
12758 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12759 fold_build2_loc (loc,
12760 BIT_XOR_EXPR, itype,
12763 build_zero_cst (itype));
12765 if (operand_equal_p (arg00, arg10, 0))
12766 return fold_build2_loc (loc, code, type,
12767 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12768 fold_build2_loc (loc,
12769 BIT_XOR_EXPR, itype,
12772 build_zero_cst (itype));
12775 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12776 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12778 tree arg00 = TREE_OPERAND (arg0, 0);
12779 tree arg01 = TREE_OPERAND (arg0, 1);
12780 tree arg10 = TREE_OPERAND (arg1, 0);
12781 tree arg11 = TREE_OPERAND (arg1, 1);
12782 tree itype = TREE_TYPE (arg0);
12784 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12785 operand_equal_p guarantees no side-effects so we don't need
12786 to use omit_one_operand on Z. */
12787 if (operand_equal_p (arg01, arg11, 0))
12788 return fold_build2_loc (loc, code, type, arg00,
12789 fold_convert_loc (loc, TREE_TYPE (arg00),
12791 if (operand_equal_p (arg01, arg10, 0))
12792 return fold_build2_loc (loc, code, type, arg00,
12793 fold_convert_loc (loc, TREE_TYPE (arg00),
12795 if (operand_equal_p (arg00, arg11, 0))
12796 return fold_build2_loc (loc, code, type, arg01,
12797 fold_convert_loc (loc, TREE_TYPE (arg01),
12799 if (operand_equal_p (arg00, arg10, 0))
12800 return fold_build2_loc (loc, code, type, arg01,
12801 fold_convert_loc (loc, TREE_TYPE (arg01),
12804 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12805 if (TREE_CODE (arg01) == INTEGER_CST
12806 && TREE_CODE (arg11) == INTEGER_CST)
12808 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12809 fold_convert_loc (loc, itype, arg11));
12810 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12811 return fold_build2_loc (loc, code, type, tem,
12812 fold_convert_loc (loc, itype, arg10));
12816 /* Attempt to simplify equality/inequality comparisons of complex
12817 values. Only lower the comparison if the result is known or
12818 can be simplified to a single scalar comparison. */
12819 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12820 || TREE_CODE (arg0) == COMPLEX_CST)
12821 && (TREE_CODE (arg1) == COMPLEX_EXPR
12822 || TREE_CODE (arg1) == COMPLEX_CST))
12824 tree real0, imag0, real1, imag1;
12827 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12829 real0 = TREE_OPERAND (arg0, 0);
12830 imag0 = TREE_OPERAND (arg0, 1);
12834 real0 = TREE_REALPART (arg0);
12835 imag0 = TREE_IMAGPART (arg0);
12838 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12840 real1 = TREE_OPERAND (arg1, 0);
12841 imag1 = TREE_OPERAND (arg1, 1);
12845 real1 = TREE_REALPART (arg1);
12846 imag1 = TREE_IMAGPART (arg1);
12849 rcond = fold_binary_loc (loc, code, type, real0, real1);
12850 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12852 if (integer_zerop (rcond))
12854 if (code == EQ_EXPR)
12855 return omit_two_operands_loc (loc, type, boolean_false_node,
12857 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12861 if (code == NE_EXPR)
12862 return omit_two_operands_loc (loc, type, boolean_true_node,
12864 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12868 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12869 if (icond && TREE_CODE (icond) == INTEGER_CST)
12871 if (integer_zerop (icond))
12873 if (code == EQ_EXPR)
12874 return omit_two_operands_loc (loc, type, boolean_false_node,
12876 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12880 if (code == NE_EXPR)
12881 return omit_two_operands_loc (loc, type, boolean_true_node,
12883 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12894 tem = fold_comparison (loc, code, type, op0, op1);
12895 if (tem != NULL_TREE)
12898 /* Transform comparisons of the form X +- C CMP X. */
12899 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12900 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12901 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12902 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12903 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12904 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12906 tree arg01 = TREE_OPERAND (arg0, 1);
12907 enum tree_code code0 = TREE_CODE (arg0);
12910 if (TREE_CODE (arg01) == REAL_CST)
12911 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12913 is_positive = tree_int_cst_sgn (arg01);
12915 /* (X - c) > X becomes false. */
12916 if (code == GT_EXPR
12917 && ((code0 == MINUS_EXPR && is_positive >= 0)
12918 || (code0 == PLUS_EXPR && is_positive <= 0)))
12920 if (TREE_CODE (arg01) == INTEGER_CST
12921 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12922 fold_overflow_warning (("assuming signed overflow does not "
12923 "occur when assuming that (X - c) > X "
12924 "is always false"),
12925 WARN_STRICT_OVERFLOW_ALL);
12926 return constant_boolean_node (0, type);
12929 /* Likewise (X + c) < X becomes false. */
12930 if (code == LT_EXPR
12931 && ((code0 == PLUS_EXPR && is_positive >= 0)
12932 || (code0 == MINUS_EXPR && is_positive <= 0)))
12934 if (TREE_CODE (arg01) == INTEGER_CST
12935 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12936 fold_overflow_warning (("assuming signed overflow does not "
12937 "occur when assuming that "
12938 "(X + c) < X is always false"),
12939 WARN_STRICT_OVERFLOW_ALL);
12940 return constant_boolean_node (0, type);
12943 /* Convert (X - c) <= X to true. */
12944 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12946 && ((code0 == MINUS_EXPR && is_positive >= 0)
12947 || (code0 == PLUS_EXPR && is_positive <= 0)))
12949 if (TREE_CODE (arg01) == INTEGER_CST
12950 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12951 fold_overflow_warning (("assuming signed overflow does not "
12952 "occur when assuming that "
12953 "(X - c) <= X is always true"),
12954 WARN_STRICT_OVERFLOW_ALL);
12955 return constant_boolean_node (1, type);
12958 /* Convert (X + c) >= X to true. */
12959 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12961 && ((code0 == PLUS_EXPR && is_positive >= 0)
12962 || (code0 == MINUS_EXPR && is_positive <= 0)))
12964 if (TREE_CODE (arg01) == INTEGER_CST
12965 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12966 fold_overflow_warning (("assuming signed overflow does not "
12967 "occur when assuming that "
12968 "(X + c) >= X is always true"),
12969 WARN_STRICT_OVERFLOW_ALL);
12970 return constant_boolean_node (1, type);
12973 if (TREE_CODE (arg01) == INTEGER_CST)
12975 /* Convert X + c > X and X - c < X to true for integers. */
12976 if (code == GT_EXPR
12977 && ((code0 == PLUS_EXPR && is_positive > 0)
12978 || (code0 == MINUS_EXPR && is_positive < 0)))
12980 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12981 fold_overflow_warning (("assuming signed overflow does "
12982 "not occur when assuming that "
12983 "(X + c) > X is always true"),
12984 WARN_STRICT_OVERFLOW_ALL);
12985 return constant_boolean_node (1, type);
12988 if (code == LT_EXPR
12989 && ((code0 == MINUS_EXPR && is_positive > 0)
12990 || (code0 == PLUS_EXPR && is_positive < 0)))
12992 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12993 fold_overflow_warning (("assuming signed overflow does "
12994 "not occur when assuming that "
12995 "(X - c) < X is always true"),
12996 WARN_STRICT_OVERFLOW_ALL);
12997 return constant_boolean_node (1, type);
13000 /* Convert X + c <= X and X - c >= X to false for integers. */
13001 if (code == LE_EXPR
13002 && ((code0 == PLUS_EXPR && is_positive > 0)
13003 || (code0 == MINUS_EXPR && is_positive < 0)))
13005 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13006 fold_overflow_warning (("assuming signed overflow does "
13007 "not occur when assuming that "
13008 "(X + c) <= X is always false"),
13009 WARN_STRICT_OVERFLOW_ALL);
13010 return constant_boolean_node (0, type);
13013 if (code == GE_EXPR
13014 && ((code0 == MINUS_EXPR && is_positive > 0)
13015 || (code0 == PLUS_EXPR && is_positive < 0)))
13017 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13018 fold_overflow_warning (("assuming signed overflow does "
13019 "not occur when assuming that "
13020 "(X - c) >= X is always false"),
13021 WARN_STRICT_OVERFLOW_ALL);
13022 return constant_boolean_node (0, type);
13027 /* Comparisons with the highest or lowest possible integer of
13028 the specified precision will have known values. */
13030 tree arg1_type = TREE_TYPE (arg1);
13031 unsigned int prec = TYPE_PRECISION (arg1_type);
13033 if (TREE_CODE (arg1) == INTEGER_CST
13034 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13036 wide_int max = wi::max_value (arg1_type);
13037 wide_int signed_max = wi::max_value (prec, SIGNED);
13038 wide_int min = wi::min_value (arg1_type);
13040 if (wi::eq_p (arg1, max))
13044 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13047 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13050 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13053 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13055 /* The GE_EXPR and LT_EXPR cases above are not normally
13056 reached because of previous transformations. */
13061 else if (wi::eq_p (arg1, max - 1))
13065 arg1 = const_binop (PLUS_EXPR, arg1,
13066 build_int_cst (TREE_TYPE (arg1), 1));
13067 return fold_build2_loc (loc, EQ_EXPR, type,
13068 fold_convert_loc (loc,
13069 TREE_TYPE (arg1), arg0),
13072 arg1 = const_binop (PLUS_EXPR, arg1,
13073 build_int_cst (TREE_TYPE (arg1), 1));
13074 return fold_build2_loc (loc, NE_EXPR, type,
13075 fold_convert_loc (loc, TREE_TYPE (arg1),
13081 else if (wi::eq_p (arg1, min))
13085 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13088 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13091 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13094 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13099 else if (wi::eq_p (arg1, min + 1))
13103 arg1 = const_binop (MINUS_EXPR, arg1,
13104 build_int_cst (TREE_TYPE (arg1), 1));
13105 return fold_build2_loc (loc, NE_EXPR, type,
13106 fold_convert_loc (loc,
13107 TREE_TYPE (arg1), arg0),
13110 arg1 = const_binop (MINUS_EXPR, arg1,
13111 build_int_cst (TREE_TYPE (arg1), 1));
13112 return fold_build2_loc (loc, EQ_EXPR, type,
13113 fold_convert_loc (loc, TREE_TYPE (arg1),
13120 else if (wi::eq_p (arg1, signed_max)
13121 && TYPE_UNSIGNED (arg1_type)
13122 /* We will flip the signedness of the comparison operator
13123 associated with the mode of arg1, so the sign bit is
13124 specified by this mode. Check that arg1 is the signed
13125 max associated with this sign bit. */
13126 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13127 /* signed_type does not work on pointer types. */
13128 && INTEGRAL_TYPE_P (arg1_type))
13130 /* The following case also applies to X < signed_max+1
13131 and X >= signed_max+1 because previous transformations. */
13132 if (code == LE_EXPR || code == GT_EXPR)
13134 tree st = signed_type_for (arg1_type);
13135 return fold_build2_loc (loc,
13136 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13137 type, fold_convert_loc (loc, st, arg0),
13138 build_int_cst (st, 0));
13144 /* If we are comparing an ABS_EXPR with a constant, we can
13145 convert all the cases into explicit comparisons, but they may
13146 well not be faster than doing the ABS and one comparison.
13147 But ABS (X) <= C is a range comparison, which becomes a subtraction
13148 and a comparison, and is probably faster. */
13149 if (code == LE_EXPR
13150 && TREE_CODE (arg1) == INTEGER_CST
13151 && TREE_CODE (arg0) == ABS_EXPR
13152 && ! TREE_SIDE_EFFECTS (arg0)
13153 && (0 != (tem = negate_expr (arg1)))
13154 && TREE_CODE (tem) == INTEGER_CST
13155 && !TREE_OVERFLOW (tem))
13156 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13157 build2 (GE_EXPR, type,
13158 TREE_OPERAND (arg0, 0), tem),
13159 build2 (LE_EXPR, type,
13160 TREE_OPERAND (arg0, 0), arg1));
13162 /* Convert ABS_EXPR<x> >= 0 to true. */
13163 strict_overflow_p = false;
13164 if (code == GE_EXPR
13165 && (integer_zerop (arg1)
13166 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13167 && real_zerop (arg1)))
13168 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13170 if (strict_overflow_p)
13171 fold_overflow_warning (("assuming signed overflow does not occur "
13172 "when simplifying comparison of "
13173 "absolute value and zero"),
13174 WARN_STRICT_OVERFLOW_CONDITIONAL);
13175 return omit_one_operand_loc (loc, type,
13176 constant_boolean_node (true, type),
13180 /* Convert ABS_EXPR<x> < 0 to false. */
13181 strict_overflow_p = false;
13182 if (code == LT_EXPR
13183 && (integer_zerop (arg1) || real_zerop (arg1))
13184 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13186 if (strict_overflow_p)
13187 fold_overflow_warning (("assuming signed overflow does not occur "
13188 "when simplifying comparison of "
13189 "absolute value and zero"),
13190 WARN_STRICT_OVERFLOW_CONDITIONAL);
13191 return omit_one_operand_loc (loc, type,
13192 constant_boolean_node (false, type),
13196 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13197 and similarly for >= into !=. */
13198 if ((code == LT_EXPR || code == GE_EXPR)
13199 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13200 && TREE_CODE (arg1) == LSHIFT_EXPR
13201 && integer_onep (TREE_OPERAND (arg1, 0)))
13202 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13203 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13204 TREE_OPERAND (arg1, 1)),
13205 build_zero_cst (TREE_TYPE (arg0)));
13207 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13208 otherwise Y might be >= # of bits in X's type and thus e.g.
13209 (unsigned char) (1 << Y) for Y 15 might be 0.
13210 If the cast is widening, then 1 << Y should have unsigned type,
13211 otherwise if Y is number of bits in the signed shift type minus 1,
13212 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13213 31 might be 0xffffffff80000000. */
13214 if ((code == LT_EXPR || code == GE_EXPR)
13215 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13216 && CONVERT_EXPR_P (arg1)
13217 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13218 && (TYPE_PRECISION (TREE_TYPE (arg1))
13219 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13220 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13221 || (TYPE_PRECISION (TREE_TYPE (arg1))
13222 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13223 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13225 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13226 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13227 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13228 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13229 build_zero_cst (TREE_TYPE (arg0)));
13234 case UNORDERED_EXPR:
13242 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13244 t1 = fold_relational_const (code, type, arg0, arg1);
13245 if (t1 != NULL_TREE)
13249 /* If the first operand is NaN, the result is constant. */
13250 if (TREE_CODE (arg0) == REAL_CST
13251 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13252 && (code != LTGT_EXPR || ! flag_trapping_math))
13254 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13255 ? integer_zero_node
13256 : integer_one_node;
13257 return omit_one_operand_loc (loc, type, t1, arg1);
13260 /* If the second operand is NaN, the result is constant. */
13261 if (TREE_CODE (arg1) == REAL_CST
13262 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13263 && (code != LTGT_EXPR || ! flag_trapping_math))
13265 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13266 ? integer_zero_node
13267 : integer_one_node;
13268 return omit_one_operand_loc (loc, type, t1, arg0);
13271 /* Simplify unordered comparison of something with itself. */
13272 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13273 && operand_equal_p (arg0, arg1, 0))
13274 return constant_boolean_node (1, type);
13276 if (code == LTGT_EXPR
13277 && !flag_trapping_math
13278 && operand_equal_p (arg0, arg1, 0))
13279 return constant_boolean_node (0, type);
13281 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13283 tree targ0 = strip_float_extensions (arg0);
13284 tree targ1 = strip_float_extensions (arg1);
13285 tree newtype = TREE_TYPE (targ0);
13287 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13288 newtype = TREE_TYPE (targ1);
13290 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13291 return fold_build2_loc (loc, code, type,
13292 fold_convert_loc (loc, newtype, targ0),
13293 fold_convert_loc (loc, newtype, targ1));
13298 case COMPOUND_EXPR:
13299 /* When pedantic, a compound expression can be neither an lvalue
13300 nor an integer constant expression. */
13301 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13303 /* Don't let (0, 0) be null pointer constant. */
13304 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13305 : fold_convert_loc (loc, type, arg1);
13306 return pedantic_non_lvalue_loc (loc, tem);
13309 if ((TREE_CODE (arg0) == REAL_CST
13310 && TREE_CODE (arg1) == REAL_CST)
13311 || (TREE_CODE (arg0) == INTEGER_CST
13312 && TREE_CODE (arg1) == INTEGER_CST))
13313 return build_complex (type, arg0, arg1);
13317 /* An ASSERT_EXPR should never be passed to fold_binary. */
13318 gcc_unreachable ();
13320 case VEC_PACK_TRUNC_EXPR:
13321 case VEC_PACK_FIX_TRUNC_EXPR:
13323 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13326 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13327 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13328 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13331 elts = XALLOCAVEC (tree, nelts);
13332 if (!vec_cst_ctor_to_array (arg0, elts)
13333 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13336 for (i = 0; i < nelts; i++)
13338 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13339 ? NOP_EXPR : FIX_TRUNC_EXPR,
13340 TREE_TYPE (type), elts[i]);
13341 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13345 return build_vector (type, elts);
13348 case VEC_WIDEN_MULT_LO_EXPR:
13349 case VEC_WIDEN_MULT_HI_EXPR:
13350 case VEC_WIDEN_MULT_EVEN_EXPR:
13351 case VEC_WIDEN_MULT_ODD_EXPR:
13353 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13354 unsigned int out, ofs, scale;
13357 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13358 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13359 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13362 elts = XALLOCAVEC (tree, nelts * 4);
13363 if (!vec_cst_ctor_to_array (arg0, elts)
13364 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13367 if (code == VEC_WIDEN_MULT_LO_EXPR)
13368 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13369 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13370 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13371 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13372 scale = 1, ofs = 0;
13373 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13374 scale = 1, ofs = 1;
13376 for (out = 0; out < nelts; out++)
13378 unsigned int in1 = (out << scale) + ofs;
13379 unsigned int in2 = in1 + nelts * 2;
13382 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13383 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13385 if (t1 == NULL_TREE || t2 == NULL_TREE)
13387 elts[out] = const_binop (MULT_EXPR, t1, t2);
13388 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13392 return build_vector (type, elts);
13397 } /* switch (code) */
13400 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13401 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13405 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13407 switch (TREE_CODE (*tp))
13413 *walk_subtrees = 0;
13415 /* ... fall through ... */
13422 /* Return whether the sub-tree ST contains a label which is accessible from
13423 outside the sub-tree. */
13426 contains_label_p (tree st)
13429 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13432 /* Fold a ternary expression of code CODE and type TYPE with operands
13433 OP0, OP1, and OP2. Return the folded expression if folding is
13434 successful. Otherwise, return NULL_TREE. */
13437 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13438 tree op0, tree op1, tree op2)
13441 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13442 enum tree_code_class kind = TREE_CODE_CLASS (code);
13444 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13445 && TREE_CODE_LENGTH (code) == 3);
13447 /* If this is a commutative operation, and OP0 is a constant, move it
13448 to OP1 to reduce the number of tests below. */
13449 if (commutative_ternary_tree_code (code)
13450 && tree_swap_operands_p (op0, op1, true))
13451 return fold_build3_loc (loc, code, type, op1, op0, op2);
13453 tem = generic_simplify (loc, code, type, op0, op1, op2);
13457 /* Strip any conversions that don't change the mode. This is safe
13458 for every expression, except for a comparison expression because
13459 its signedness is derived from its operands. So, in the latter
13460 case, only strip conversions that don't change the signedness.
13462 Note that this is done as an internal manipulation within the
13463 constant folder, in order to find the simplest representation of
13464 the arguments so that their form can be studied. In any cases,
13465 the appropriate type conversions should be put back in the tree
13466 that will get out of the constant folder. */
13487 case COMPONENT_REF:
13488 if (TREE_CODE (arg0) == CONSTRUCTOR
13489 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13491 unsigned HOST_WIDE_INT idx;
13493 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13500 case VEC_COND_EXPR:
13501 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13502 so all simple results must be passed through pedantic_non_lvalue. */
13503 if (TREE_CODE (arg0) == INTEGER_CST)
13505 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13506 tem = integer_zerop (arg0) ? op2 : op1;
13507 /* Only optimize constant conditions when the selected branch
13508 has the same type as the COND_EXPR. This avoids optimizing
13509 away "c ? x : throw", where the throw has a void type.
13510 Avoid throwing away that operand which contains label. */
13511 if ((!TREE_SIDE_EFFECTS (unused_op)
13512 || !contains_label_p (unused_op))
13513 && (! VOID_TYPE_P (TREE_TYPE (tem))
13514 || VOID_TYPE_P (type)))
13515 return pedantic_non_lvalue_loc (loc, tem);
13518 else if (TREE_CODE (arg0) == VECTOR_CST)
13520 if ((TREE_CODE (arg1) == VECTOR_CST
13521 || TREE_CODE (arg1) == CONSTRUCTOR)
13522 && (TREE_CODE (arg2) == VECTOR_CST
13523 || TREE_CODE (arg2) == CONSTRUCTOR))
13525 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13526 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13527 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13528 for (i = 0; i < nelts; i++)
13530 tree val = VECTOR_CST_ELT (arg0, i);
13531 if (integer_all_onesp (val))
13533 else if (integer_zerop (val))
13534 sel[i] = nelts + i;
13535 else /* Currently unreachable. */
13538 tree t = fold_vec_perm (type, arg1, arg2, sel);
13539 if (t != NULL_TREE)
13544 /* If we have A op B ? A : C, we may be able to convert this to a
13545 simpler expression, depending on the operation and the values
13546 of B and C. Signed zeros prevent all of these transformations,
13547 for reasons given above each one.
13549 Also try swapping the arguments and inverting the conditional. */
13550 if (COMPARISON_CLASS_P (arg0)
13551 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13552 arg1, TREE_OPERAND (arg0, 1))
13553 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13555 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13560 if (COMPARISON_CLASS_P (arg0)
13561 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13563 TREE_OPERAND (arg0, 1))
13564 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13566 location_t loc0 = expr_location_or (arg0, loc);
13567 tem = fold_invert_truthvalue (loc0, arg0);
13568 if (tem && COMPARISON_CLASS_P (tem))
13570 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13576 /* If the second operand is simpler than the third, swap them
13577 since that produces better jump optimization results. */
13578 if (truth_value_p (TREE_CODE (arg0))
13579 && tree_swap_operands_p (op1, op2, false))
13581 location_t loc0 = expr_location_or (arg0, loc);
13582 /* See if this can be inverted. If it can't, possibly because
13583 it was a floating-point inequality comparison, don't do
13585 tem = fold_invert_truthvalue (loc0, arg0);
13587 return fold_build3_loc (loc, code, type, tem, op2, op1);
13590 /* Convert A ? 1 : 0 to simply A. */
13591 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13592 : (integer_onep (op1)
13593 && !VECTOR_TYPE_P (type)))
13594 && integer_zerop (op2)
13595 /* If we try to convert OP0 to our type, the
13596 call to fold will try to move the conversion inside
13597 a COND, which will recurse. In that case, the COND_EXPR
13598 is probably the best choice, so leave it alone. */
13599 && type == TREE_TYPE (arg0))
13600 return pedantic_non_lvalue_loc (loc, arg0);
13602 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13603 over COND_EXPR in cases such as floating point comparisons. */
13604 if (integer_zerop (op1)
13605 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13606 : (integer_onep (op2)
13607 && !VECTOR_TYPE_P (type)))
13608 && truth_value_p (TREE_CODE (arg0)))
13609 return pedantic_non_lvalue_loc (loc,
13610 fold_convert_loc (loc, type,
13611 invert_truthvalue_loc (loc,
13614 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13615 if (TREE_CODE (arg0) == LT_EXPR
13616 && integer_zerop (TREE_OPERAND (arg0, 1))
13617 && integer_zerop (op2)
13618 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13620 /* sign_bit_p looks through both zero and sign extensions,
13621 but for this optimization only sign extensions are
13623 tree tem2 = TREE_OPERAND (arg0, 0);
13624 while (tem != tem2)
13626 if (TREE_CODE (tem2) != NOP_EXPR
13627 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13632 tem2 = TREE_OPERAND (tem2, 0);
13634 /* sign_bit_p only checks ARG1 bits within A's precision.
13635 If <sign bit of A> has wider type than A, bits outside
13636 of A's precision in <sign bit of A> need to be checked.
13637 If they are all 0, this optimization needs to be done
13638 in unsigned A's type, if they are all 1 in signed A's type,
13639 otherwise this can't be done. */
13641 && TYPE_PRECISION (TREE_TYPE (tem))
13642 < TYPE_PRECISION (TREE_TYPE (arg1))
13643 && TYPE_PRECISION (TREE_TYPE (tem))
13644 < TYPE_PRECISION (type))
13646 int inner_width, outer_width;
13649 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13650 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13651 if (outer_width > TYPE_PRECISION (type))
13652 outer_width = TYPE_PRECISION (type);
13654 wide_int mask = wi::shifted_mask
13655 (inner_width, outer_width - inner_width, false,
13656 TYPE_PRECISION (TREE_TYPE (arg1)));
13658 wide_int common = mask & arg1;
13659 if (common == mask)
13661 tem_type = signed_type_for (TREE_TYPE (tem));
13662 tem = fold_convert_loc (loc, tem_type, tem);
13664 else if (common == 0)
13666 tem_type = unsigned_type_for (TREE_TYPE (tem));
13667 tem = fold_convert_loc (loc, tem_type, tem);
13675 fold_convert_loc (loc, type,
13676 fold_build2_loc (loc, BIT_AND_EXPR,
13677 TREE_TYPE (tem), tem,
13678 fold_convert_loc (loc,
13683 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13684 already handled above. */
13685 if (TREE_CODE (arg0) == BIT_AND_EXPR
13686 && integer_onep (TREE_OPERAND (arg0, 1))
13687 && integer_zerop (op2)
13688 && integer_pow2p (arg1))
13690 tree tem = TREE_OPERAND (arg0, 0);
13692 if (TREE_CODE (tem) == RSHIFT_EXPR
13693 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13694 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13695 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13696 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13697 TREE_OPERAND (tem, 0), arg1);
13700 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13701 is probably obsolete because the first operand should be a
13702 truth value (that's why we have the two cases above), but let's
13703 leave it in until we can confirm this for all front-ends. */
13704 if (integer_zerop (op2)
13705 && TREE_CODE (arg0) == NE_EXPR
13706 && integer_zerop (TREE_OPERAND (arg0, 1))
13707 && integer_pow2p (arg1)
13708 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13709 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13710 arg1, OEP_ONLY_CONST))
13711 return pedantic_non_lvalue_loc (loc,
13712 fold_convert_loc (loc, type,
13713 TREE_OPERAND (arg0, 0)));
13715 /* Disable the transformations below for vectors, since
13716 fold_binary_op_with_conditional_arg may undo them immediately,
13717 yielding an infinite loop. */
13718 if (code == VEC_COND_EXPR)
13721 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13722 if (integer_zerop (op2)
13723 && truth_value_p (TREE_CODE (arg0))
13724 && truth_value_p (TREE_CODE (arg1))
13725 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13726 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13727 : TRUTH_ANDIF_EXPR,
13728 type, fold_convert_loc (loc, type, arg0), arg1);
13730 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13731 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13732 && truth_value_p (TREE_CODE (arg0))
13733 && truth_value_p (TREE_CODE (arg1))
13734 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13736 location_t loc0 = expr_location_or (arg0, loc);
13737 /* Only perform transformation if ARG0 is easily inverted. */
13738 tem = fold_invert_truthvalue (loc0, arg0);
13740 return fold_build2_loc (loc, code == VEC_COND_EXPR
13743 type, fold_convert_loc (loc, type, tem),
13747 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13748 if (integer_zerop (arg1)
13749 && truth_value_p (TREE_CODE (arg0))
13750 && truth_value_p (TREE_CODE (op2))
13751 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13753 location_t loc0 = expr_location_or (arg0, loc);
13754 /* Only perform transformation if ARG0 is easily inverted. */
13755 tem = fold_invert_truthvalue (loc0, arg0);
13757 return fold_build2_loc (loc, code == VEC_COND_EXPR
13758 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13759 type, fold_convert_loc (loc, type, tem),
13763 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13764 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13765 && truth_value_p (TREE_CODE (arg0))
13766 && truth_value_p (TREE_CODE (op2))
13767 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13768 return fold_build2_loc (loc, code == VEC_COND_EXPR
13769 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13770 type, fold_convert_loc (loc, type, arg0), op2);
13775 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13776 of fold_ternary on them. */
13777 gcc_unreachable ();
13779 case BIT_FIELD_REF:
13780 if ((TREE_CODE (arg0) == VECTOR_CST
13781 || (TREE_CODE (arg0) == CONSTRUCTOR
13782 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13783 && (type == TREE_TYPE (TREE_TYPE (arg0))
13784 || (TREE_CODE (type) == VECTOR_TYPE
13785 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13787 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13788 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13789 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13790 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13793 && (idx % width) == 0
13794 && (n % width) == 0
13795 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13800 if (TREE_CODE (arg0) == VECTOR_CST)
13803 return VECTOR_CST_ELT (arg0, idx);
13805 tree *vals = XALLOCAVEC (tree, n);
13806 for (unsigned i = 0; i < n; ++i)
13807 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13808 return build_vector (type, vals);
13811 /* Constructor elements can be subvectors. */
13812 unsigned HOST_WIDE_INT k = 1;
13813 if (CONSTRUCTOR_NELTS (arg0) != 0)
13815 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13816 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13817 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13820 /* We keep an exact subset of the constructor elements. */
13821 if ((idx % k) == 0 && (n % k) == 0)
13823 if (CONSTRUCTOR_NELTS (arg0) == 0)
13824 return build_constructor (type, NULL);
13829 if (idx < CONSTRUCTOR_NELTS (arg0))
13830 return CONSTRUCTOR_ELT (arg0, idx)->value;
13831 return build_zero_cst (type);
13834 vec<constructor_elt, va_gc> *vals;
13835 vec_alloc (vals, n);
13836 for (unsigned i = 0;
13837 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13839 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13841 (arg0, idx + i)->value);
13842 return build_constructor (type, vals);
13844 /* The bitfield references a single constructor element. */
13845 else if (idx + n <= (idx / k + 1) * k)
13847 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13848 return build_zero_cst (type);
13850 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13852 return fold_build3_loc (loc, code, type,
13853 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13854 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13859 /* A bit-field-ref that referenced the full argument can be stripped. */
13860 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13861 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13862 && integer_zerop (op2))
13863 return fold_convert_loc (loc, type, arg0);
13865 /* On constants we can use native encode/interpret to constant
13866 fold (nearly) all BIT_FIELD_REFs. */
13867 if (CONSTANT_CLASS_P (arg0)
13868 && can_native_interpret_type_p (type)
13869 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13870 /* This limitation should not be necessary, we just need to
13871 round this up to mode size. */
13872 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13873 /* Need bit-shifting of the buffer to relax the following. */
13874 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13876 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13877 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13878 unsigned HOST_WIDE_INT clen;
13879 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13880 /* ??? We cannot tell native_encode_expr to start at
13881 some random byte only. So limit us to a reasonable amount
13885 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13886 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13888 && len * BITS_PER_UNIT >= bitpos + bitsize)
13890 tree v = native_interpret_expr (type,
13891 b + bitpos / BITS_PER_UNIT,
13892 bitsize / BITS_PER_UNIT);
13902 /* For integers we can decompose the FMA if possible. */
13903 if (TREE_CODE (arg0) == INTEGER_CST
13904 && TREE_CODE (arg1) == INTEGER_CST)
13905 return fold_build2_loc (loc, PLUS_EXPR, type,
13906 const_binop (MULT_EXPR, arg0, arg1), arg2);
13907 if (integer_zerop (arg2))
13908 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13910 return fold_fma (loc, type, arg0, arg1, arg2);
13912 case VEC_PERM_EXPR:
13913 if (TREE_CODE (arg2) == VECTOR_CST)
13915 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13916 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13917 unsigned char *sel2 = sel + nelts;
13918 bool need_mask_canon = false;
13919 bool need_mask_canon2 = false;
13920 bool all_in_vec0 = true;
13921 bool all_in_vec1 = true;
13922 bool maybe_identity = true;
13923 bool single_arg = (op0 == op1);
13924 bool changed = false;
13926 mask2 = 2 * nelts - 1;
13927 mask = single_arg ? (nelts - 1) : mask2;
13928 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13929 for (i = 0; i < nelts; i++)
13931 tree val = VECTOR_CST_ELT (arg2, i);
13932 if (TREE_CODE (val) != INTEGER_CST)
13935 /* Make sure that the perm value is in an acceptable
13938 need_mask_canon |= wi::gtu_p (t, mask);
13939 need_mask_canon2 |= wi::gtu_p (t, mask2);
13940 sel[i] = t.to_uhwi () & mask;
13941 sel2[i] = t.to_uhwi () & mask2;
13943 if (sel[i] < nelts)
13944 all_in_vec1 = false;
13946 all_in_vec0 = false;
13948 if ((sel[i] & (nelts-1)) != i)
13949 maybe_identity = false;
13952 if (maybe_identity)
13962 else if (all_in_vec1)
13965 for (i = 0; i < nelts; i++)
13967 need_mask_canon = true;
13970 if ((TREE_CODE (op0) == VECTOR_CST
13971 || TREE_CODE (op0) == CONSTRUCTOR)
13972 && (TREE_CODE (op1) == VECTOR_CST
13973 || TREE_CODE (op1) == CONSTRUCTOR))
13975 tree t = fold_vec_perm (type, op0, op1, sel);
13976 if (t != NULL_TREE)
13980 if (op0 == op1 && !single_arg)
13983 /* Some targets are deficient and fail to expand a single
13984 argument permutation while still allowing an equivalent
13985 2-argument version. */
13986 if (need_mask_canon && arg2 == op2
13987 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13988 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13990 need_mask_canon = need_mask_canon2;
13994 if (need_mask_canon && arg2 == op2)
13996 tree *tsel = XALLOCAVEC (tree, nelts);
13997 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13998 for (i = 0; i < nelts; i++)
13999 tsel[i] = build_int_cst (eltype, sel[i]);
14000 op2 = build_vector (TREE_TYPE (arg2), tsel);
14005 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14011 } /* switch (code) */
14014 /* Perform constant folding and related simplification of EXPR.
14015 The related simplifications include x*1 => x, x*0 => 0, etc.,
14016 and application of the associative law.
14017 NOP_EXPR conversions may be removed freely (as long as we
14018 are careful not to change the type of the overall expression).
14019 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14020 but we can constant-fold them if they have constant operands. */
14022 #ifdef ENABLE_FOLD_CHECKING
14023 # define fold(x) fold_1 (x)
14024 static tree fold_1 (tree);
14030 const tree t = expr;
14031 enum tree_code code = TREE_CODE (t);
14032 enum tree_code_class kind = TREE_CODE_CLASS (code);
14034 location_t loc = EXPR_LOCATION (expr);
14036 /* Return right away if a constant. */
14037 if (kind == tcc_constant)
14040 /* CALL_EXPR-like objects with variable numbers of operands are
14041 treated specially. */
14042 if (kind == tcc_vl_exp)
14044 if (code == CALL_EXPR)
14046 tem = fold_call_expr (loc, expr, false);
14047 return tem ? tem : expr;
14052 if (IS_EXPR_CODE_CLASS (kind))
14054 tree type = TREE_TYPE (t);
14055 tree op0, op1, op2;
14057 switch (TREE_CODE_LENGTH (code))
14060 op0 = TREE_OPERAND (t, 0);
14061 tem = fold_unary_loc (loc, code, type, op0);
14062 return tem ? tem : expr;
14064 op0 = TREE_OPERAND (t, 0);
14065 op1 = TREE_OPERAND (t, 1);
14066 tem = fold_binary_loc (loc, code, type, op0, op1);
14067 return tem ? tem : expr;
14069 op0 = TREE_OPERAND (t, 0);
14070 op1 = TREE_OPERAND (t, 1);
14071 op2 = TREE_OPERAND (t, 2);
14072 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14073 return tem ? tem : expr;
14083 tree op0 = TREE_OPERAND (t, 0);
14084 tree op1 = TREE_OPERAND (t, 1);
14086 if (TREE_CODE (op1) == INTEGER_CST
14087 && TREE_CODE (op0) == CONSTRUCTOR
14088 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14090 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14091 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14092 unsigned HOST_WIDE_INT begin = 0;
14094 /* Find a matching index by means of a binary search. */
14095 while (begin != end)
14097 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14098 tree index = (*elts)[middle].index;
14100 if (TREE_CODE (index) == INTEGER_CST
14101 && tree_int_cst_lt (index, op1))
14102 begin = middle + 1;
14103 else if (TREE_CODE (index) == INTEGER_CST
14104 && tree_int_cst_lt (op1, index))
14106 else if (TREE_CODE (index) == RANGE_EXPR
14107 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14108 begin = middle + 1;
14109 else if (TREE_CODE (index) == RANGE_EXPR
14110 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14113 return (*elts)[middle].value;
14120 /* Return a VECTOR_CST if possible. */
14123 tree type = TREE_TYPE (t);
14124 if (TREE_CODE (type) != VECTOR_TYPE)
14127 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14128 unsigned HOST_WIDE_INT idx, pos = 0;
14131 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14133 if (!CONSTANT_CLASS_P (value))
14135 if (TREE_CODE (value) == VECTOR_CST)
14137 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14138 vec[pos++] = VECTOR_CST_ELT (value, i);
14141 vec[pos++] = value;
14143 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14144 vec[pos] = build_zero_cst (TREE_TYPE (type));
14146 return build_vector (type, vec);
14150 return fold (DECL_INITIAL (t));
14154 } /* switch (code) */
14157 #ifdef ENABLE_FOLD_CHECKING
14160 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14161 hash_table<pointer_hash<const tree_node> > *);
14162 static void fold_check_failed (const_tree, const_tree);
14163 void print_fold_checksum (const_tree);
14165 /* When --enable-checking=fold, compute a digest of expr before
14166 and after actual fold call to see if fold did not accidentally
14167 change original expr. */
14173 struct md5_ctx ctx;
14174 unsigned char checksum_before[16], checksum_after[16];
14175 hash_table<pointer_hash<const tree_node> > ht (32);
14177 md5_init_ctx (&ctx);
14178 fold_checksum_tree (expr, &ctx, &ht);
14179 md5_finish_ctx (&ctx, checksum_before);
14182 ret = fold_1 (expr);
14184 md5_init_ctx (&ctx);
14185 fold_checksum_tree (expr, &ctx, &ht);
14186 md5_finish_ctx (&ctx, checksum_after);
14188 if (memcmp (checksum_before, checksum_after, 16))
14189 fold_check_failed (expr, ret);
14195 print_fold_checksum (const_tree expr)
14197 struct md5_ctx ctx;
14198 unsigned char checksum[16], cnt;
14199 hash_table<pointer_hash<const tree_node> > ht (32);
14201 md5_init_ctx (&ctx);
14202 fold_checksum_tree (expr, &ctx, &ht);
14203 md5_finish_ctx (&ctx, checksum);
14204 for (cnt = 0; cnt < 16; ++cnt)
14205 fprintf (stderr, "%02x", checksum[cnt]);
14206 putc ('\n', stderr);
14210 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14212 internal_error ("fold check: original tree changed by fold");
14216 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14217 hash_table<pointer_hash <const tree_node> > *ht)
14219 const tree_node **slot;
14220 enum tree_code code;
14221 union tree_node buf;
14227 slot = ht->find_slot (expr, INSERT);
14231 code = TREE_CODE (expr);
14232 if (TREE_CODE_CLASS (code) == tcc_declaration
14233 && DECL_ASSEMBLER_NAME_SET_P (expr))
14235 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14236 memcpy ((char *) &buf, expr, tree_size (expr));
14237 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14238 expr = (tree) &buf;
14240 else if (TREE_CODE_CLASS (code) == tcc_type
14241 && (TYPE_POINTER_TO (expr)
14242 || TYPE_REFERENCE_TO (expr)
14243 || TYPE_CACHED_VALUES_P (expr)
14244 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14245 || TYPE_NEXT_VARIANT (expr)))
14247 /* Allow these fields to be modified. */
14249 memcpy ((char *) &buf, expr, tree_size (expr));
14250 expr = tmp = (tree) &buf;
14251 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14252 TYPE_POINTER_TO (tmp) = NULL;
14253 TYPE_REFERENCE_TO (tmp) = NULL;
14254 TYPE_NEXT_VARIANT (tmp) = NULL;
14255 if (TYPE_CACHED_VALUES_P (tmp))
14257 TYPE_CACHED_VALUES_P (tmp) = 0;
14258 TYPE_CACHED_VALUES (tmp) = NULL;
14261 md5_process_bytes (expr, tree_size (expr), ctx);
14262 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14263 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14264 if (TREE_CODE_CLASS (code) != tcc_type
14265 && TREE_CODE_CLASS (code) != tcc_declaration
14266 && code != TREE_LIST
14267 && code != SSA_NAME
14268 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14269 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14270 switch (TREE_CODE_CLASS (code))
14276 md5_process_bytes (TREE_STRING_POINTER (expr),
14277 TREE_STRING_LENGTH (expr), ctx);
14280 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14281 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14284 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14285 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14291 case tcc_exceptional:
14295 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14296 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14297 expr = TREE_CHAIN (expr);
14298 goto recursive_label;
14301 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14302 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14308 case tcc_expression:
14309 case tcc_reference:
14310 case tcc_comparison:
14313 case tcc_statement:
14315 len = TREE_OPERAND_LENGTH (expr);
14316 for (i = 0; i < len; ++i)
14317 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14319 case tcc_declaration:
14320 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14321 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14322 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14324 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14325 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14326 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14327 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14328 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14331 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14333 if (TREE_CODE (expr) == FUNCTION_DECL)
14335 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14336 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14338 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14342 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14343 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14344 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14345 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14346 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14347 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14348 if (INTEGRAL_TYPE_P (expr)
14349 || SCALAR_FLOAT_TYPE_P (expr))
14351 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14352 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14354 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14355 if (TREE_CODE (expr) == RECORD_TYPE
14356 || TREE_CODE (expr) == UNION_TYPE
14357 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14358 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14359 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14366 /* Helper function for outputting the checksum of a tree T. When
14367 debugging with gdb, you can "define mynext" to be "next" followed
14368 by "call debug_fold_checksum (op0)", then just trace down till the
14371 DEBUG_FUNCTION void
14372 debug_fold_checksum (const_tree t)
14375 unsigned char checksum[16];
14376 struct md5_ctx ctx;
14377 hash_table<pointer_hash<const tree_node> > ht (32);
14379 md5_init_ctx (&ctx);
14380 fold_checksum_tree (t, &ctx, &ht);
14381 md5_finish_ctx (&ctx, checksum);
14384 for (i = 0; i < 16; i++)
14385 fprintf (stderr, "%d ", checksum[i]);
14387 fprintf (stderr, "\n");
14392 /* Fold a unary tree expression with code CODE of type TYPE with an
14393 operand OP0. LOC is the location of the resulting expression.
14394 Return a folded expression if successful. Otherwise, return a tree
14395 expression with code CODE of type TYPE with an operand OP0. */
14398 fold_build1_stat_loc (location_t loc,
14399 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14402 #ifdef ENABLE_FOLD_CHECKING
14403 unsigned char checksum_before[16], checksum_after[16];
14404 struct md5_ctx ctx;
14405 hash_table<pointer_hash<const tree_node> > ht (32);
14407 md5_init_ctx (&ctx);
14408 fold_checksum_tree (op0, &ctx, &ht);
14409 md5_finish_ctx (&ctx, checksum_before);
14413 tem = fold_unary_loc (loc, code, type, op0);
14415 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14417 #ifdef ENABLE_FOLD_CHECKING
14418 md5_init_ctx (&ctx);
14419 fold_checksum_tree (op0, &ctx, &ht);
14420 md5_finish_ctx (&ctx, checksum_after);
14422 if (memcmp (checksum_before, checksum_after, 16))
14423 fold_check_failed (op0, tem);
14428 /* Fold a binary tree expression with code CODE of type TYPE with
14429 operands OP0 and OP1. LOC is the location of the resulting
14430 expression. Return a folded expression if successful. Otherwise,
14431 return a tree expression with code CODE of type TYPE with operands
14435 fold_build2_stat_loc (location_t loc,
14436 enum tree_code code, tree type, tree op0, tree op1
14440 #ifdef ENABLE_FOLD_CHECKING
14441 unsigned char checksum_before_op0[16],
14442 checksum_before_op1[16],
14443 checksum_after_op0[16],
14444 checksum_after_op1[16];
14445 struct md5_ctx ctx;
14446 hash_table<pointer_hash<const tree_node> > ht (32);
14448 md5_init_ctx (&ctx);
14449 fold_checksum_tree (op0, &ctx, &ht);
14450 md5_finish_ctx (&ctx, checksum_before_op0);
14453 md5_init_ctx (&ctx);
14454 fold_checksum_tree (op1, &ctx, &ht);
14455 md5_finish_ctx (&ctx, checksum_before_op1);
14459 tem = fold_binary_loc (loc, code, type, op0, op1);
14461 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14463 #ifdef ENABLE_FOLD_CHECKING
14464 md5_init_ctx (&ctx);
14465 fold_checksum_tree (op0, &ctx, &ht);
14466 md5_finish_ctx (&ctx, checksum_after_op0);
14469 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14470 fold_check_failed (op0, tem);
14472 md5_init_ctx (&ctx);
14473 fold_checksum_tree (op1, &ctx, &ht);
14474 md5_finish_ctx (&ctx, checksum_after_op1);
14476 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14477 fold_check_failed (op1, tem);
14482 /* Fold a ternary tree expression with code CODE of type TYPE with
14483 operands OP0, OP1, and OP2. Return a folded expression if
14484 successful. Otherwise, return a tree expression with code CODE of
14485 type TYPE with operands OP0, OP1, and OP2. */
14488 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14489 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14492 #ifdef ENABLE_FOLD_CHECKING
14493 unsigned char checksum_before_op0[16],
14494 checksum_before_op1[16],
14495 checksum_before_op2[16],
14496 checksum_after_op0[16],
14497 checksum_after_op1[16],
14498 checksum_after_op2[16];
14499 struct md5_ctx ctx;
14500 hash_table<pointer_hash<const tree_node> > ht (32);
14502 md5_init_ctx (&ctx);
14503 fold_checksum_tree (op0, &ctx, &ht);
14504 md5_finish_ctx (&ctx, checksum_before_op0);
14507 md5_init_ctx (&ctx);
14508 fold_checksum_tree (op1, &ctx, &ht);
14509 md5_finish_ctx (&ctx, checksum_before_op1);
14512 md5_init_ctx (&ctx);
14513 fold_checksum_tree (op2, &ctx, &ht);
14514 md5_finish_ctx (&ctx, checksum_before_op2);
14518 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14519 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14521 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14523 #ifdef ENABLE_FOLD_CHECKING
14524 md5_init_ctx (&ctx);
14525 fold_checksum_tree (op0, &ctx, &ht);
14526 md5_finish_ctx (&ctx, checksum_after_op0);
14529 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14530 fold_check_failed (op0, tem);
14532 md5_init_ctx (&ctx);
14533 fold_checksum_tree (op1, &ctx, &ht);
14534 md5_finish_ctx (&ctx, checksum_after_op1);
14537 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14538 fold_check_failed (op1, tem);
14540 md5_init_ctx (&ctx);
14541 fold_checksum_tree (op2, &ctx, &ht);
14542 md5_finish_ctx (&ctx, checksum_after_op2);
14544 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14545 fold_check_failed (op2, tem);
14550 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14551 arguments in ARGARRAY, and a null static chain.
14552 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14553 of type TYPE from the given operands as constructed by build_call_array. */
14556 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14557 int nargs, tree *argarray)
14560 #ifdef ENABLE_FOLD_CHECKING
14561 unsigned char checksum_before_fn[16],
14562 checksum_before_arglist[16],
14563 checksum_after_fn[16],
14564 checksum_after_arglist[16];
14565 struct md5_ctx ctx;
14566 hash_table<pointer_hash<const tree_node> > ht (32);
14569 md5_init_ctx (&ctx);
14570 fold_checksum_tree (fn, &ctx, &ht);
14571 md5_finish_ctx (&ctx, checksum_before_fn);
14574 md5_init_ctx (&ctx);
14575 for (i = 0; i < nargs; i++)
14576 fold_checksum_tree (argarray[i], &ctx, &ht);
14577 md5_finish_ctx (&ctx, checksum_before_arglist);
14581 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14583 #ifdef ENABLE_FOLD_CHECKING
14584 md5_init_ctx (&ctx);
14585 fold_checksum_tree (fn, &ctx, &ht);
14586 md5_finish_ctx (&ctx, checksum_after_fn);
14589 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14590 fold_check_failed (fn, tem);
14592 md5_init_ctx (&ctx);
14593 for (i = 0; i < nargs; i++)
14594 fold_checksum_tree (argarray[i], &ctx, &ht);
14595 md5_finish_ctx (&ctx, checksum_after_arglist);
14597 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14598 fold_check_failed (NULL_TREE, tem);
14603 /* Perform constant folding and related simplification of initializer
14604 expression EXPR. These behave identically to "fold_buildN" but ignore
14605 potential run-time traps and exceptions that fold must preserve. */
14607 #define START_FOLD_INIT \
14608 int saved_signaling_nans = flag_signaling_nans;\
14609 int saved_trapping_math = flag_trapping_math;\
14610 int saved_rounding_math = flag_rounding_math;\
14611 int saved_trapv = flag_trapv;\
14612 int saved_folding_initializer = folding_initializer;\
14613 flag_signaling_nans = 0;\
14614 flag_trapping_math = 0;\
14615 flag_rounding_math = 0;\
14617 folding_initializer = 1;
14619 #define END_FOLD_INIT \
14620 flag_signaling_nans = saved_signaling_nans;\
14621 flag_trapping_math = saved_trapping_math;\
14622 flag_rounding_math = saved_rounding_math;\
14623 flag_trapv = saved_trapv;\
14624 folding_initializer = saved_folding_initializer;
14627 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14628 tree type, tree op)
14633 result = fold_build1_loc (loc, code, type, op);
14640 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14641 tree type, tree op0, tree op1)
14646 result = fold_build2_loc (loc, code, type, op0, op1);
14653 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14654 int nargs, tree *argarray)
14659 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14665 #undef START_FOLD_INIT
14666 #undef END_FOLD_INIT
14668 /* Determine if first argument is a multiple of second argument. Return 0 if
14669 it is not, or we cannot easily determined it to be.
14671 An example of the sort of thing we care about (at this point; this routine
14672 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14673 fold cases do now) is discovering that
14675 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14681 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14683 This code also handles discovering that
14685 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14687 is a multiple of 8 so we don't have to worry about dealing with a
14688 possible remainder.
14690 Note that we *look* inside a SAVE_EXPR only to determine how it was
14691 calculated; it is not safe for fold to do much of anything else with the
14692 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14693 at run time. For example, the latter example above *cannot* be implemented
14694 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14695 evaluation time of the original SAVE_EXPR is not necessarily the same at
14696 the time the new expression is evaluated. The only optimization of this
14697 sort that would be valid is changing
14699 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14703 SAVE_EXPR (I) * SAVE_EXPR (J)
14705 (where the same SAVE_EXPR (J) is used in the original and the
14706 transformed version). */
14709 multiple_of_p (tree type, const_tree top, const_tree bottom)
14711 if (operand_equal_p (top, bottom, 0))
14714 if (TREE_CODE (type) != INTEGER_TYPE)
14717 switch (TREE_CODE (top))
14720 /* Bitwise and provides a power of two multiple. If the mask is
14721 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14722 if (!integer_pow2p (bottom))
14727 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14728 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14732 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14733 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14736 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14740 op1 = TREE_OPERAND (top, 1);
14741 /* const_binop may not detect overflow correctly,
14742 so check for it explicitly here. */
14743 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14744 && 0 != (t1 = fold_convert (type,
14745 const_binop (LSHIFT_EXPR,
14748 && !TREE_OVERFLOW (t1))
14749 return multiple_of_p (type, t1, bottom);
14754 /* Can't handle conversions from non-integral or wider integral type. */
14755 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14756 || (TYPE_PRECISION (type)
14757 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14760 /* .. fall through ... */
14763 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14766 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14767 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14770 if (TREE_CODE (bottom) != INTEGER_CST
14771 || integer_zerop (bottom)
14772 || (TYPE_UNSIGNED (type)
14773 && (tree_int_cst_sgn (top) < 0
14774 || tree_int_cst_sgn (bottom) < 0)))
14776 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14784 /* Return true if CODE or TYPE is known to be non-negative. */
14787 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14789 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14790 && truth_value_p (code))
14791 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14792 have a signed:1 type (where the value is -1 and 0). */
14797 /* Return true if (CODE OP0) is known to be non-negative. If the return
14798 value is based on the assumption that signed overflow is undefined,
14799 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14800 *STRICT_OVERFLOW_P. */
14803 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14804 bool *strict_overflow_p)
14806 if (TYPE_UNSIGNED (type))
14812 /* We can't return 1 if flag_wrapv is set because
14813 ABS_EXPR<INT_MIN> = INT_MIN. */
14814 if (!INTEGRAL_TYPE_P (type))
14816 if (TYPE_OVERFLOW_UNDEFINED (type))
14818 *strict_overflow_p = true;
14823 case NON_LVALUE_EXPR:
14825 case FIX_TRUNC_EXPR:
14826 return tree_expr_nonnegative_warnv_p (op0,
14827 strict_overflow_p);
14831 tree inner_type = TREE_TYPE (op0);
14832 tree outer_type = type;
14834 if (TREE_CODE (outer_type) == REAL_TYPE)
14836 if (TREE_CODE (inner_type) == REAL_TYPE)
14837 return tree_expr_nonnegative_warnv_p (op0,
14838 strict_overflow_p);
14839 if (INTEGRAL_TYPE_P (inner_type))
14841 if (TYPE_UNSIGNED (inner_type))
14843 return tree_expr_nonnegative_warnv_p (op0,
14844 strict_overflow_p);
14847 else if (INTEGRAL_TYPE_P (outer_type))
14849 if (TREE_CODE (inner_type) == REAL_TYPE)
14850 return tree_expr_nonnegative_warnv_p (op0,
14851 strict_overflow_p);
14852 if (INTEGRAL_TYPE_P (inner_type))
14853 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14854 && TYPE_UNSIGNED (inner_type);
14860 return tree_simple_nonnegative_warnv_p (code, type);
14863 /* We don't know sign of `t', so be conservative and return false. */
14867 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14868 value is based on the assumption that signed overflow is undefined,
14869 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14870 *STRICT_OVERFLOW_P. */
14873 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14874 tree op1, bool *strict_overflow_p)
14876 if (TYPE_UNSIGNED (type))
14881 case POINTER_PLUS_EXPR:
14883 if (FLOAT_TYPE_P (type))
14884 return (tree_expr_nonnegative_warnv_p (op0,
14886 && tree_expr_nonnegative_warnv_p (op1,
14887 strict_overflow_p));
14889 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14890 both unsigned and at least 2 bits shorter than the result. */
14891 if (TREE_CODE (type) == INTEGER_TYPE
14892 && TREE_CODE (op0) == NOP_EXPR
14893 && TREE_CODE (op1) == NOP_EXPR)
14895 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14896 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14897 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14898 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14900 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14901 TYPE_PRECISION (inner2)) + 1;
14902 return prec < TYPE_PRECISION (type);
14908 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14910 /* x * x is always non-negative for floating point x
14911 or without overflow. */
14912 if (operand_equal_p (op0, op1, 0)
14913 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14914 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14916 if (TYPE_OVERFLOW_UNDEFINED (type))
14917 *strict_overflow_p = true;
14922 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14923 both unsigned and their total bits is shorter than the result. */
14924 if (TREE_CODE (type) == INTEGER_TYPE
14925 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14926 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14928 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14929 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14931 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14932 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14935 bool unsigned0 = TYPE_UNSIGNED (inner0);
14936 bool unsigned1 = TYPE_UNSIGNED (inner1);
14938 if (TREE_CODE (op0) == INTEGER_CST)
14939 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14941 if (TREE_CODE (op1) == INTEGER_CST)
14942 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14944 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14945 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14947 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14948 ? tree_int_cst_min_precision (op0, UNSIGNED)
14949 : TYPE_PRECISION (inner0);
14951 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14952 ? tree_int_cst_min_precision (op1, UNSIGNED)
14953 : TYPE_PRECISION (inner1);
14955 return precision0 + precision1 < TYPE_PRECISION (type);
14962 return (tree_expr_nonnegative_warnv_p (op0,
14964 || tree_expr_nonnegative_warnv_p (op1,
14965 strict_overflow_p));
14971 case TRUNC_DIV_EXPR:
14972 case CEIL_DIV_EXPR:
14973 case FLOOR_DIV_EXPR:
14974 case ROUND_DIV_EXPR:
14975 return (tree_expr_nonnegative_warnv_p (op0,
14977 && tree_expr_nonnegative_warnv_p (op1,
14978 strict_overflow_p));
14980 case TRUNC_MOD_EXPR:
14981 case CEIL_MOD_EXPR:
14982 case FLOOR_MOD_EXPR:
14983 case ROUND_MOD_EXPR:
14984 return tree_expr_nonnegative_warnv_p (op0,
14985 strict_overflow_p);
14987 return tree_simple_nonnegative_warnv_p (code, type);
14990 /* We don't know sign of `t', so be conservative and return false. */
14994 /* Return true if T is known to be non-negative. If the return
14995 value is based on the assumption that signed overflow is undefined,
14996 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14997 *STRICT_OVERFLOW_P. */
15000 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15002 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15005 switch (TREE_CODE (t))
15008 return tree_int_cst_sgn (t) >= 0;
15011 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15014 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15017 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15019 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15020 strict_overflow_p));
15022 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15025 /* We don't know sign of `t', so be conservative and return false. */
15029 /* Return true if T is known to be non-negative. If the return
15030 value is based on the assumption that signed overflow is undefined,
15031 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15032 *STRICT_OVERFLOW_P. */
15035 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15036 tree arg0, tree arg1, bool *strict_overflow_p)
15038 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15039 switch (DECL_FUNCTION_CODE (fndecl))
15041 CASE_FLT_FN (BUILT_IN_ACOS):
15042 CASE_FLT_FN (BUILT_IN_ACOSH):
15043 CASE_FLT_FN (BUILT_IN_CABS):
15044 CASE_FLT_FN (BUILT_IN_COSH):
15045 CASE_FLT_FN (BUILT_IN_ERFC):
15046 CASE_FLT_FN (BUILT_IN_EXP):
15047 CASE_FLT_FN (BUILT_IN_EXP10):
15048 CASE_FLT_FN (BUILT_IN_EXP2):
15049 CASE_FLT_FN (BUILT_IN_FABS):
15050 CASE_FLT_FN (BUILT_IN_FDIM):
15051 CASE_FLT_FN (BUILT_IN_HYPOT):
15052 CASE_FLT_FN (BUILT_IN_POW10):
15053 CASE_INT_FN (BUILT_IN_FFS):
15054 CASE_INT_FN (BUILT_IN_PARITY):
15055 CASE_INT_FN (BUILT_IN_POPCOUNT):
15056 CASE_INT_FN (BUILT_IN_CLZ):
15057 CASE_INT_FN (BUILT_IN_CLRSB):
15058 case BUILT_IN_BSWAP32:
15059 case BUILT_IN_BSWAP64:
15063 CASE_FLT_FN (BUILT_IN_SQRT):
15064 /* sqrt(-0.0) is -0.0. */
15065 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15067 return tree_expr_nonnegative_warnv_p (arg0,
15068 strict_overflow_p);
15070 CASE_FLT_FN (BUILT_IN_ASINH):
15071 CASE_FLT_FN (BUILT_IN_ATAN):
15072 CASE_FLT_FN (BUILT_IN_ATANH):
15073 CASE_FLT_FN (BUILT_IN_CBRT):
15074 CASE_FLT_FN (BUILT_IN_CEIL):
15075 CASE_FLT_FN (BUILT_IN_ERF):
15076 CASE_FLT_FN (BUILT_IN_EXPM1):
15077 CASE_FLT_FN (BUILT_IN_FLOOR):
15078 CASE_FLT_FN (BUILT_IN_FMOD):
15079 CASE_FLT_FN (BUILT_IN_FREXP):
15080 CASE_FLT_FN (BUILT_IN_ICEIL):
15081 CASE_FLT_FN (BUILT_IN_IFLOOR):
15082 CASE_FLT_FN (BUILT_IN_IRINT):
15083 CASE_FLT_FN (BUILT_IN_IROUND):
15084 CASE_FLT_FN (BUILT_IN_LCEIL):
15085 CASE_FLT_FN (BUILT_IN_LDEXP):
15086 CASE_FLT_FN (BUILT_IN_LFLOOR):
15087 CASE_FLT_FN (BUILT_IN_LLCEIL):
15088 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15089 CASE_FLT_FN (BUILT_IN_LLRINT):
15090 CASE_FLT_FN (BUILT_IN_LLROUND):
15091 CASE_FLT_FN (BUILT_IN_LRINT):
15092 CASE_FLT_FN (BUILT_IN_LROUND):
15093 CASE_FLT_FN (BUILT_IN_MODF):
15094 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15095 CASE_FLT_FN (BUILT_IN_RINT):
15096 CASE_FLT_FN (BUILT_IN_ROUND):
15097 CASE_FLT_FN (BUILT_IN_SCALB):
15098 CASE_FLT_FN (BUILT_IN_SCALBLN):
15099 CASE_FLT_FN (BUILT_IN_SCALBN):
15100 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15101 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15102 CASE_FLT_FN (BUILT_IN_SINH):
15103 CASE_FLT_FN (BUILT_IN_TANH):
15104 CASE_FLT_FN (BUILT_IN_TRUNC):
15105 /* True if the 1st argument is nonnegative. */
15106 return tree_expr_nonnegative_warnv_p (arg0,
15107 strict_overflow_p);
15109 CASE_FLT_FN (BUILT_IN_FMAX):
15110 /* True if the 1st OR 2nd arguments are nonnegative. */
15111 return (tree_expr_nonnegative_warnv_p (arg0,
15113 || (tree_expr_nonnegative_warnv_p (arg1,
15114 strict_overflow_p)));
15116 CASE_FLT_FN (BUILT_IN_FMIN):
15117 /* True if the 1st AND 2nd arguments are nonnegative. */
15118 return (tree_expr_nonnegative_warnv_p (arg0,
15120 && (tree_expr_nonnegative_warnv_p (arg1,
15121 strict_overflow_p)));
15123 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15124 /* True if the 2nd argument is nonnegative. */
15125 return tree_expr_nonnegative_warnv_p (arg1,
15126 strict_overflow_p);
15128 CASE_FLT_FN (BUILT_IN_POWI):
15129 /* True if the 1st argument is nonnegative or the second
15130 argument is an even integer. */
15131 if (TREE_CODE (arg1) == INTEGER_CST
15132 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15134 return tree_expr_nonnegative_warnv_p (arg0,
15135 strict_overflow_p);
15137 CASE_FLT_FN (BUILT_IN_POW):
15138 /* True if the 1st argument is nonnegative or the second
15139 argument is an even integer valued real. */
15140 if (TREE_CODE (arg1) == REAL_CST)
15145 c = TREE_REAL_CST (arg1);
15146 n = real_to_integer (&c);
15149 REAL_VALUE_TYPE cint;
15150 real_from_integer (&cint, VOIDmode, n, SIGNED);
15151 if (real_identical (&c, &cint))
15155 return tree_expr_nonnegative_warnv_p (arg0,
15156 strict_overflow_p);
15161 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15165 /* Return true if T is known to be non-negative. If the return
15166 value is based on the assumption that signed overflow is undefined,
15167 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15168 *STRICT_OVERFLOW_P. */
15171 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15173 enum tree_code code = TREE_CODE (t);
15174 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15181 tree temp = TARGET_EXPR_SLOT (t);
15182 t = TARGET_EXPR_INITIAL (t);
15184 /* If the initializer is non-void, then it's a normal expression
15185 that will be assigned to the slot. */
15186 if (!VOID_TYPE_P (t))
15187 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15189 /* Otherwise, the initializer sets the slot in some way. One common
15190 way is an assignment statement at the end of the initializer. */
15193 if (TREE_CODE (t) == BIND_EXPR)
15194 t = expr_last (BIND_EXPR_BODY (t));
15195 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15196 || TREE_CODE (t) == TRY_CATCH_EXPR)
15197 t = expr_last (TREE_OPERAND (t, 0));
15198 else if (TREE_CODE (t) == STATEMENT_LIST)
15203 if (TREE_CODE (t) == MODIFY_EXPR
15204 && TREE_OPERAND (t, 0) == temp)
15205 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15206 strict_overflow_p);
15213 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15214 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15216 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15217 get_callee_fndecl (t),
15220 strict_overflow_p);
15222 case COMPOUND_EXPR:
15224 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15225 strict_overflow_p);
15227 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15228 strict_overflow_p);
15230 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15231 strict_overflow_p);
15234 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15238 /* We don't know sign of `t', so be conservative and return false. */
15242 /* Return true if T is known to be non-negative. If the return
15243 value is based on the assumption that signed overflow is undefined,
15244 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15245 *STRICT_OVERFLOW_P. */
15248 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15250 enum tree_code code;
15251 if (t == error_mark_node)
15254 code = TREE_CODE (t);
15255 switch (TREE_CODE_CLASS (code))
15258 case tcc_comparison:
15259 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15261 TREE_OPERAND (t, 0),
15262 TREE_OPERAND (t, 1),
15263 strict_overflow_p);
15266 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15268 TREE_OPERAND (t, 0),
15269 strict_overflow_p);
15272 case tcc_declaration:
15273 case tcc_reference:
15274 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15282 case TRUTH_AND_EXPR:
15283 case TRUTH_OR_EXPR:
15284 case TRUTH_XOR_EXPR:
15285 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15287 TREE_OPERAND (t, 0),
15288 TREE_OPERAND (t, 1),
15289 strict_overflow_p);
15290 case TRUTH_NOT_EXPR:
15291 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15293 TREE_OPERAND (t, 0),
15294 strict_overflow_p);
15301 case WITH_SIZE_EXPR:
15303 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15306 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15310 /* Return true if `t' is known to be non-negative. Handle warnings
15311 about undefined signed overflow. */
15314 tree_expr_nonnegative_p (tree t)
15316 bool ret, strict_overflow_p;
15318 strict_overflow_p = false;
15319 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15320 if (strict_overflow_p)
15321 fold_overflow_warning (("assuming signed overflow does not occur when "
15322 "determining that expression is always "
15324 WARN_STRICT_OVERFLOW_MISC);
15329 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15330 For floating point we further ensure that T is not denormal.
15331 Similar logic is present in nonzero_address in rtlanal.h.
15333 If the return value is based on the assumption that signed overflow
15334 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15335 change *STRICT_OVERFLOW_P. */
15338 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15339 bool *strict_overflow_p)
15344 return tree_expr_nonzero_warnv_p (op0,
15345 strict_overflow_p);
15349 tree inner_type = TREE_TYPE (op0);
15350 tree outer_type = type;
15352 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15353 && tree_expr_nonzero_warnv_p (op0,
15354 strict_overflow_p));
15358 case NON_LVALUE_EXPR:
15359 return tree_expr_nonzero_warnv_p (op0,
15360 strict_overflow_p);
15369 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15370 For floating point we further ensure that T is not denormal.
15371 Similar logic is present in nonzero_address in rtlanal.h.
15373 If the return value is based on the assumption that signed overflow
15374 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15375 change *STRICT_OVERFLOW_P. */
15378 tree_binary_nonzero_warnv_p (enum tree_code code,
15381 tree op1, bool *strict_overflow_p)
15383 bool sub_strict_overflow_p;
15386 case POINTER_PLUS_EXPR:
15388 if (TYPE_OVERFLOW_UNDEFINED (type))
15390 /* With the presence of negative values it is hard
15391 to say something. */
15392 sub_strict_overflow_p = false;
15393 if (!tree_expr_nonnegative_warnv_p (op0,
15394 &sub_strict_overflow_p)
15395 || !tree_expr_nonnegative_warnv_p (op1,
15396 &sub_strict_overflow_p))
15398 /* One of operands must be positive and the other non-negative. */
15399 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15400 overflows, on a twos-complement machine the sum of two
15401 nonnegative numbers can never be zero. */
15402 return (tree_expr_nonzero_warnv_p (op0,
15404 || tree_expr_nonzero_warnv_p (op1,
15405 strict_overflow_p));
15410 if (TYPE_OVERFLOW_UNDEFINED (type))
15412 if (tree_expr_nonzero_warnv_p (op0,
15414 && tree_expr_nonzero_warnv_p (op1,
15415 strict_overflow_p))
15417 *strict_overflow_p = true;
15424 sub_strict_overflow_p = false;
15425 if (tree_expr_nonzero_warnv_p (op0,
15426 &sub_strict_overflow_p)
15427 && tree_expr_nonzero_warnv_p (op1,
15428 &sub_strict_overflow_p))
15430 if (sub_strict_overflow_p)
15431 *strict_overflow_p = true;
15436 sub_strict_overflow_p = false;
15437 if (tree_expr_nonzero_warnv_p (op0,
15438 &sub_strict_overflow_p))
15440 if (sub_strict_overflow_p)
15441 *strict_overflow_p = true;
15443 /* When both operands are nonzero, then MAX must be too. */
15444 if (tree_expr_nonzero_warnv_p (op1,
15445 strict_overflow_p))
15448 /* MAX where operand 0 is positive is positive. */
15449 return tree_expr_nonnegative_warnv_p (op0,
15450 strict_overflow_p);
15452 /* MAX where operand 1 is positive is positive. */
15453 else if (tree_expr_nonzero_warnv_p (op1,
15454 &sub_strict_overflow_p)
15455 && tree_expr_nonnegative_warnv_p (op1,
15456 &sub_strict_overflow_p))
15458 if (sub_strict_overflow_p)
15459 *strict_overflow_p = true;
15465 return (tree_expr_nonzero_warnv_p (op1,
15467 || tree_expr_nonzero_warnv_p (op0,
15468 strict_overflow_p));
15477 /* Return true when T is an address and is known to be nonzero.
15478 For floating point we further ensure that T is not denormal.
15479 Similar logic is present in nonzero_address in rtlanal.h.
15481 If the return value is based on the assumption that signed overflow
15482 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15483 change *STRICT_OVERFLOW_P. */
15486 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15488 bool sub_strict_overflow_p;
15489 switch (TREE_CODE (t))
15492 return !integer_zerop (t);
15496 tree base = TREE_OPERAND (t, 0);
15498 if (!DECL_P (base))
15499 base = get_base_address (base);
15504 /* For objects in symbol table check if we know they are non-zero.
15505 Don't do anything for variables and functions before symtab is built;
15506 it is quite possible that they will be declared weak later. */
15507 if (DECL_P (base) && decl_in_symtab_p (base))
15509 struct symtab_node *symbol;
15511 symbol = symtab_node::get_create (base);
15513 return symbol->nonzero_address ();
15518 /* Function local objects are never NULL. */
15520 && (DECL_CONTEXT (base)
15521 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15522 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15525 /* Constants are never weak. */
15526 if (CONSTANT_CLASS_P (base))
15533 sub_strict_overflow_p = false;
15534 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15535 &sub_strict_overflow_p)
15536 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15537 &sub_strict_overflow_p))
15539 if (sub_strict_overflow_p)
15540 *strict_overflow_p = true;
15551 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15552 attempt to fold the expression to a constant without modifying TYPE,
15555 If the expression could be simplified to a constant, then return
15556 the constant. If the expression would not be simplified to a
15557 constant, then return NULL_TREE. */
15560 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15562 tree tem = fold_binary (code, type, op0, op1);
15563 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15566 /* Given the components of a unary expression CODE, TYPE and OP0,
15567 attempt to fold the expression to a constant without modifying
15570 If the expression could be simplified to a constant, then return
15571 the constant. If the expression would not be simplified to a
15572 constant, then return NULL_TREE. */
15575 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15577 tree tem = fold_unary (code, type, op0);
15578 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15581 /* If EXP represents referencing an element in a constant string
15582 (either via pointer arithmetic or array indexing), return the
15583 tree representing the value accessed, otherwise return NULL. */
15586 fold_read_from_constant_string (tree exp)
15588 if ((TREE_CODE (exp) == INDIRECT_REF
15589 || TREE_CODE (exp) == ARRAY_REF)
15590 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15592 tree exp1 = TREE_OPERAND (exp, 0);
15595 location_t loc = EXPR_LOCATION (exp);
15597 if (TREE_CODE (exp) == INDIRECT_REF)
15598 string = string_constant (exp1, &index);
15601 tree low_bound = array_ref_low_bound (exp);
15602 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15604 /* Optimize the special-case of a zero lower bound.
15606 We convert the low_bound to sizetype to avoid some problems
15607 with constant folding. (E.g. suppose the lower bound is 1,
15608 and its mode is QI. Without the conversion,l (ARRAY
15609 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15610 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15611 if (! integer_zerop (low_bound))
15612 index = size_diffop_loc (loc, index,
15613 fold_convert_loc (loc, sizetype, low_bound));
15619 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15620 && TREE_CODE (string) == STRING_CST
15621 && TREE_CODE (index) == INTEGER_CST
15622 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15623 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15625 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15626 return build_int_cst_type (TREE_TYPE (exp),
15627 (TREE_STRING_POINTER (string)
15628 [TREE_INT_CST_LOW (index)]));
15633 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15634 an integer constant, real, or fixed-point constant.
15636 TYPE is the type of the result. */
15639 fold_negate_const (tree arg0, tree type)
15641 tree t = NULL_TREE;
15643 switch (TREE_CODE (arg0))
15648 wide_int val = wi::neg (arg0, &overflow);
15649 t = force_fit_type (type, val, 1,
15650 (overflow | TREE_OVERFLOW (arg0))
15651 && !TYPE_UNSIGNED (type));
15656 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15661 FIXED_VALUE_TYPE f;
15662 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15663 &(TREE_FIXED_CST (arg0)), NULL,
15664 TYPE_SATURATING (type));
15665 t = build_fixed (type, f);
15666 /* Propagate overflow flags. */
15667 if (overflow_p | TREE_OVERFLOW (arg0))
15668 TREE_OVERFLOW (t) = 1;
15673 gcc_unreachable ();
15679 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15680 an integer constant or real constant.
15682 TYPE is the type of the result. */
15685 fold_abs_const (tree arg0, tree type)
15687 tree t = NULL_TREE;
15689 switch (TREE_CODE (arg0))
15693 /* If the value is unsigned or non-negative, then the absolute value
15694 is the same as the ordinary value. */
15695 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15698 /* If the value is negative, then the absolute value is
15703 wide_int val = wi::neg (arg0, &overflow);
15704 t = force_fit_type (type, val, -1,
15705 overflow | TREE_OVERFLOW (arg0));
15711 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15712 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15718 gcc_unreachable ();
15724 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15725 constant. TYPE is the type of the result. */
15728 fold_not_const (const_tree arg0, tree type)
15730 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15732 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15735 /* Given CODE, a relational operator, the target type, TYPE and two
15736 constant operands OP0 and OP1, return the result of the
15737 relational operation. If the result is not a compile time
15738 constant, then return NULL_TREE. */
15741 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15743 int result, invert;
15745 /* From here on, the only cases we handle are when the result is
15746 known to be a constant. */
15748 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15750 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15751 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15753 /* Handle the cases where either operand is a NaN. */
15754 if (real_isnan (c0) || real_isnan (c1))
15764 case UNORDERED_EXPR:
15778 if (flag_trapping_math)
15784 gcc_unreachable ();
15787 return constant_boolean_node (result, type);
15790 return constant_boolean_node (real_compare (code, c0, c1), type);
15793 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15795 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15796 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15797 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15800 /* Handle equality/inequality of complex constants. */
15801 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15803 tree rcond = fold_relational_const (code, type,
15804 TREE_REALPART (op0),
15805 TREE_REALPART (op1));
15806 tree icond = fold_relational_const (code, type,
15807 TREE_IMAGPART (op0),
15808 TREE_IMAGPART (op1));
15809 if (code == EQ_EXPR)
15810 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15811 else if (code == NE_EXPR)
15812 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15817 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15819 unsigned count = VECTOR_CST_NELTS (op0);
15820 tree *elts = XALLOCAVEC (tree, count);
15821 gcc_assert (VECTOR_CST_NELTS (op1) == count
15822 && TYPE_VECTOR_SUBPARTS (type) == count);
15824 for (unsigned i = 0; i < count; i++)
15826 tree elem_type = TREE_TYPE (type);
15827 tree elem0 = VECTOR_CST_ELT (op0, i);
15828 tree elem1 = VECTOR_CST_ELT (op1, i);
15830 tree tem = fold_relational_const (code, elem_type,
15833 if (tem == NULL_TREE)
15836 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15839 return build_vector (type, elts);
15842 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15844 To compute GT, swap the arguments and do LT.
15845 To compute GE, do LT and invert the result.
15846 To compute LE, swap the arguments, do LT and invert the result.
15847 To compute NE, do EQ and invert the result.
15849 Therefore, the code below must handle only EQ and LT. */
15851 if (code == LE_EXPR || code == GT_EXPR)
15856 code = swap_tree_comparison (code);
15859 /* Note that it is safe to invert for real values here because we
15860 have already handled the one case that it matters. */
15863 if (code == NE_EXPR || code == GE_EXPR)
15866 code = invert_tree_comparison (code, false);
15869 /* Compute a result for LT or EQ if args permit;
15870 Otherwise return T. */
15871 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15873 if (code == EQ_EXPR)
15874 result = tree_int_cst_equal (op0, op1);
15876 result = tree_int_cst_lt (op0, op1);
15883 return constant_boolean_node (result, type);
15886 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15887 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15891 fold_build_cleanup_point_expr (tree type, tree expr)
15893 /* If the expression does not have side effects then we don't have to wrap
15894 it with a cleanup point expression. */
15895 if (!TREE_SIDE_EFFECTS (expr))
15898 /* If the expression is a return, check to see if the expression inside the
15899 return has no side effects or the right hand side of the modify expression
15900 inside the return. If either don't have side effects set we don't need to
15901 wrap the expression in a cleanup point expression. Note we don't check the
15902 left hand side of the modify because it should always be a return decl. */
15903 if (TREE_CODE (expr) == RETURN_EXPR)
15905 tree op = TREE_OPERAND (expr, 0);
15906 if (!op || !TREE_SIDE_EFFECTS (op))
15908 op = TREE_OPERAND (op, 1);
15909 if (!TREE_SIDE_EFFECTS (op))
15913 return build1 (CLEANUP_POINT_EXPR, type, expr);
15916 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15917 of an indirection through OP0, or NULL_TREE if no simplification is
15921 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15927 subtype = TREE_TYPE (sub);
15928 if (!POINTER_TYPE_P (subtype))
15931 if (TREE_CODE (sub) == ADDR_EXPR)
15933 tree op = TREE_OPERAND (sub, 0);
15934 tree optype = TREE_TYPE (op);
15935 /* *&CONST_DECL -> to the value of the const decl. */
15936 if (TREE_CODE (op) == CONST_DECL)
15937 return DECL_INITIAL (op);
15938 /* *&p => p; make sure to handle *&"str"[cst] here. */
15939 if (type == optype)
15941 tree fop = fold_read_from_constant_string (op);
15947 /* *(foo *)&fooarray => fooarray[0] */
15948 else if (TREE_CODE (optype) == ARRAY_TYPE
15949 && type == TREE_TYPE (optype)
15950 && (!in_gimple_form
15951 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15953 tree type_domain = TYPE_DOMAIN (optype);
15954 tree min_val = size_zero_node;
15955 if (type_domain && TYPE_MIN_VALUE (type_domain))
15956 min_val = TYPE_MIN_VALUE (type_domain);
15958 && TREE_CODE (min_val) != INTEGER_CST)
15960 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15961 NULL_TREE, NULL_TREE);
15963 /* *(foo *)&complexfoo => __real__ complexfoo */
15964 else if (TREE_CODE (optype) == COMPLEX_TYPE
15965 && type == TREE_TYPE (optype))
15966 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15967 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15968 else if (TREE_CODE (optype) == VECTOR_TYPE
15969 && type == TREE_TYPE (optype))
15971 tree part_width = TYPE_SIZE (type);
15972 tree index = bitsize_int (0);
15973 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15977 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15978 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15980 tree op00 = TREE_OPERAND (sub, 0);
15981 tree op01 = TREE_OPERAND (sub, 1);
15984 if (TREE_CODE (op00) == ADDR_EXPR)
15987 op00 = TREE_OPERAND (op00, 0);
15988 op00type = TREE_TYPE (op00);
15990 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15991 if (TREE_CODE (op00type) == VECTOR_TYPE
15992 && type == TREE_TYPE (op00type))
15994 HOST_WIDE_INT offset = tree_to_shwi (op01);
15995 tree part_width = TYPE_SIZE (type);
15996 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15997 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15998 tree index = bitsize_int (indexi);
16000 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16001 return fold_build3_loc (loc,
16002 BIT_FIELD_REF, type, op00,
16003 part_width, index);
16006 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16007 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16008 && type == TREE_TYPE (op00type))
16010 tree size = TYPE_SIZE_UNIT (type);
16011 if (tree_int_cst_equal (size, op01))
16012 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16014 /* ((foo *)&fooarray)[1] => fooarray[1] */
16015 else if (TREE_CODE (op00type) == ARRAY_TYPE
16016 && type == TREE_TYPE (op00type))
16018 tree type_domain = TYPE_DOMAIN (op00type);
16019 tree min_val = size_zero_node;
16020 if (type_domain && TYPE_MIN_VALUE (type_domain))
16021 min_val = TYPE_MIN_VALUE (type_domain);
16022 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16023 TYPE_SIZE_UNIT (type));
16024 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16025 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16026 NULL_TREE, NULL_TREE);
16031 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16032 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16033 && type == TREE_TYPE (TREE_TYPE (subtype))
16034 && (!in_gimple_form
16035 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16038 tree min_val = size_zero_node;
16039 sub = build_fold_indirect_ref_loc (loc, sub);
16040 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16041 if (type_domain && TYPE_MIN_VALUE (type_domain))
16042 min_val = TYPE_MIN_VALUE (type_domain);
16044 && TREE_CODE (min_val) != INTEGER_CST)
16046 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16053 /* Builds an expression for an indirection through T, simplifying some
16057 build_fold_indirect_ref_loc (location_t loc, tree t)
16059 tree type = TREE_TYPE (TREE_TYPE (t));
16060 tree sub = fold_indirect_ref_1 (loc, type, t);
16065 return build1_loc (loc, INDIRECT_REF, type, t);
16068 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16071 fold_indirect_ref_loc (location_t loc, tree t)
16073 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16081 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16082 whose result is ignored. The type of the returned tree need not be
16083 the same as the original expression. */
16086 fold_ignored_result (tree t)
16088 if (!TREE_SIDE_EFFECTS (t))
16089 return integer_zero_node;
16092 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16095 t = TREE_OPERAND (t, 0);
16099 case tcc_comparison:
16100 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16101 t = TREE_OPERAND (t, 0);
16102 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16103 t = TREE_OPERAND (t, 1);
16108 case tcc_expression:
16109 switch (TREE_CODE (t))
16111 case COMPOUND_EXPR:
16112 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16114 t = TREE_OPERAND (t, 0);
16118 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16119 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16121 t = TREE_OPERAND (t, 0);
16134 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16137 round_up_loc (location_t loc, tree value, unsigned int divisor)
16139 tree div = NULL_TREE;
16144 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16145 have to do anything. Only do this when we are not given a const,
16146 because in that case, this check is more expensive than just
16148 if (TREE_CODE (value) != INTEGER_CST)
16150 div = build_int_cst (TREE_TYPE (value), divisor);
16152 if (multiple_of_p (TREE_TYPE (value), value, div))
16156 /* If divisor is a power of two, simplify this to bit manipulation. */
16157 if (divisor == (divisor & -divisor))
16159 if (TREE_CODE (value) == INTEGER_CST)
16161 wide_int val = value;
16164 if ((val & (divisor - 1)) == 0)
16167 overflow_p = TREE_OVERFLOW (value);
16168 val &= ~(divisor - 1);
16173 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16179 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16180 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16181 t = build_int_cst (TREE_TYPE (value), -divisor);
16182 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16188 div = build_int_cst (TREE_TYPE (value), divisor);
16189 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16190 value = size_binop_loc (loc, MULT_EXPR, value, div);
16196 /* Likewise, but round down. */
16199 round_down_loc (location_t loc, tree value, int divisor)
16201 tree div = NULL_TREE;
16203 gcc_assert (divisor > 0);
16207 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16208 have to do anything. Only do this when we are not given a const,
16209 because in that case, this check is more expensive than just
16211 if (TREE_CODE (value) != INTEGER_CST)
16213 div = build_int_cst (TREE_TYPE (value), divisor);
16215 if (multiple_of_p (TREE_TYPE (value), value, div))
16219 /* If divisor is a power of two, simplify this to bit manipulation. */
16220 if (divisor == (divisor & -divisor))
16224 t = build_int_cst (TREE_TYPE (value), -divisor);
16225 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16230 div = build_int_cst (TREE_TYPE (value), divisor);
16231 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16232 value = size_binop_loc (loc, MULT_EXPR, value, div);
16238 /* Returns the pointer to the base of the object addressed by EXP and
16239 extracts the information about the offset of the access, storing it
16240 to PBITPOS and POFFSET. */
16243 split_address_to_core_and_offset (tree exp,
16244 HOST_WIDE_INT *pbitpos, tree *poffset)
16248 int unsignedp, volatilep;
16249 HOST_WIDE_INT bitsize;
16250 location_t loc = EXPR_LOCATION (exp);
16252 if (TREE_CODE (exp) == ADDR_EXPR)
16254 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16255 poffset, &mode, &unsignedp, &volatilep,
16257 core = build_fold_addr_expr_loc (loc, core);
16263 *poffset = NULL_TREE;
16269 /* Returns true if addresses of E1 and E2 differ by a constant, false
16270 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16273 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16276 HOST_WIDE_INT bitpos1, bitpos2;
16277 tree toffset1, toffset2, tdiff, type;
16279 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16280 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16282 if (bitpos1 % BITS_PER_UNIT != 0
16283 || bitpos2 % BITS_PER_UNIT != 0
16284 || !operand_equal_p (core1, core2, 0))
16287 if (toffset1 && toffset2)
16289 type = TREE_TYPE (toffset1);
16290 if (type != TREE_TYPE (toffset2))
16291 toffset2 = fold_convert (type, toffset2);
16293 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16294 if (!cst_and_fits_in_hwi (tdiff))
16297 *diff = int_cst_value (tdiff);
16299 else if (toffset1 || toffset2)
16301 /* If only one of the offsets is non-constant, the difference cannot
16308 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16312 /* Simplify the floating point expression EXP when the sign of the
16313 result is not significant. Return NULL_TREE if no simplification
16317 fold_strip_sign_ops (tree exp)
16320 location_t loc = EXPR_LOCATION (exp);
16322 switch (TREE_CODE (exp))
16326 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16327 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16331 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16333 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16334 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16335 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16336 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16337 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16338 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16341 case COMPOUND_EXPR:
16342 arg0 = TREE_OPERAND (exp, 0);
16343 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16345 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16349 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16350 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16352 return fold_build3_loc (loc,
16353 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16354 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16355 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16360 const enum built_in_function fcode = builtin_mathfn_code (exp);
16363 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16364 /* Strip copysign function call, return the 1st argument. */
16365 arg0 = CALL_EXPR_ARG (exp, 0);
16366 arg1 = CALL_EXPR_ARG (exp, 1);
16367 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16370 /* Strip sign ops from the argument of "odd" math functions. */
16371 if (negate_mathfn_p (fcode))
16373 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16375 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);