1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
132 machine_mode *, int *, int *,
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
185 SET_EXPR_LOCATION (x, loc);
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
270 if (gimple_no_warning_p (stmt))
273 /* Use the smallest code level when deciding to issue the
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
282 locus = input_location;
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
328 negate_mathfn_p (enum built_in_function code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
369 /* Check whether we may negate an integer constant T without causing
373 may_negate_without_overflow_p (const_tree t)
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
390 negate_expr_p (tree t)
397 type = TREE_TYPE (t);
400 switch (TREE_CODE (t))
403 if (TYPE_OVERFLOW_WRAPS (type))
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
444 return negate_expr_p (TREE_OPERAND (t, 0));
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return negate_expr_p (TREE_OPERAND (t, 1));
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
510 return negate_expr_p (tem);
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
542 fold_negate_expr (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
571 tem = fold_negate_const (t, type);
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
592 for (i = 0; i < count; i++)
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
599 return build_vector (type, elts);
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 return TREE_OPERAND (t, 0);
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 if (TYPE_UNSIGNED (type))
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
774 tem = fold_negate_expr (loc, t);
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
851 var = op1, neg_var_p = neg1_p;
853 /* Now do any needed negations. */
855 *minus_litp = *litp, *litp = 0;
857 *conp = negate_expr (*conp);
859 var = negate_expr (var);
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
868 else if (TREE_CONSTANT (in))
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
983 res = wi::bit_or (arg1, arg2);
987 res = wi::bit_xor (arg1, arg2);
991 res = wi::bit_and (arg1, arg2);
996 if (wi::neg_p (arg2))
999 if (code == RSHIFT_EXPR)
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1011 res = wi::lshift (arg1, arg2);
1016 if (wi::neg_p (arg2))
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1022 code = RROTATE_EXPR;
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1028 res = wi::lrotate (arg1, arg2);
1032 res = wi::add (arg1, arg2, sign, &overflow);
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1054 case FLOOR_DIV_EXPR:
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1066 case ROUND_DIV_EXPR:
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1072 case TRUNC_MOD_EXPR:
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1078 case FLOOR_MOD_EXPR:
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1090 case ROUND_MOD_EXPR:
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1097 res = wi::min (arg1, arg2, sign);
1101 res = wi::max (arg1, arg2, sign);
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 /* Sanity check for the recursive cases. */
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1140 if (TREE_CODE (arg1) == REAL_CST)
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1150 /* The following codes are handled by real_arithmetic. */
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1188 else if (REAL_VALUE_ISNAN (d2))
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1212 t = build_real (type, result);
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1218 if (TREE_CODE (arg1) == FIXED_CST)
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1227 /* The following codes are handled by fixed_arithmetic. */
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1299 case TRUNC_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1339 if (integer_nonzerop (compare))
1341 /* In the TRUE branch, we compute
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1361 /* In the FALSE branch, we compute
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1388 return build_complex (type, real, imag);
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1398 for (i = 0; i < count; i++)
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1403 elts[i] = const_binop (code, elem1, elem2);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1411 return build_vector (type, elts);
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1422 if (code == VEC_RSHIFT_EXPR)
1424 if (!tree_fits_uhwi_p (arg2))
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1442 if (i + offset < 0 || i + offset >= count)
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1449 for (i = 0; i < count; i++)
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1453 elts[i] = const_binop (code, elem1, arg2);
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1461 return build_vector (type, elts);
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1483 tree type = TREE_TYPE (arg0);
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1502 else if (code == MINUS_EXPR)
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1507 else if (code == MULT_EXPR)
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1529 tree type = TREE_TYPE (arg0);
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1539 if (type == sizetype)
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1544 ctype = signed_type_for (type);
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1591 bool overflow = false;
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1621 val = wi::zero (TYPE_PRECISION (type));
1624 /* See if R is less than the lower bound or greater than the
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1640 tree ut = TYPE_MAX_VALUE (type);
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1666 double_int temp, temp_trunc;
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1713 REAL_VALUE_TYPE value;
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1746 REAL_VALUE_TYPE value;
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1762 FIXED_VALUE_TYPE value;
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1782 FIXED_VALUE_TYPE value;
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1812 FIXED_VALUE_TYPE value;
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1833 if (TREE_TYPE (arg1) == type)
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1867 /* Construct a vector of zero elements of vector type TYPE. */
1870 build_zero_vector (tree type)
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1881 fold_convertible_p (const_tree type, const_tree arg)
1883 tree orig = TREE_TYPE (arg);
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1896 switch (TREE_CODE (type))
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1908 case FIXED_POINT_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1925 tree orig = TREE_TYPE (arg);
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1936 switch (TREE_CODE (type))
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1967 if (TREE_CODE (arg) == INTEGER_CST)
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1973 else if (TREE_CODE (arg) == REAL_CST)
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1979 else if (TREE_CODE (arg) == FIXED_CST)
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1986 switch (TREE_CODE (orig))
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2016 switch (TREE_CODE (orig))
2018 case FIXED_POINT_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2034 switch (TREE_CODE (orig))
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2088 protected_set_expr_location_unshare (tem, loc);
2092 /* Return false if expr can be assumed not to be an lvalue, true
2096 maybe_lvalue_p (const_tree x)
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2112 case ARRAY_RANGE_REF:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2143 non_lvalue_loc (location_t loc, tree x)
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2150 if (! maybe_lvalue_p (x))
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2155 /* When pedantic, return an expr equal to X but certainly not valid as a
2156 pedantic lvalue. Otherwise, return X. */
2159 pedantic_non_lvalue_loc (location_t loc, tree x)
2161 return protected_set_expr_location_unshare (x, loc);
2164 /* Given a tree comparison code, return the code that is the logical inverse.
2165 It is generally not safe to do this for floating-point comparisons, except
2166 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2167 ERROR_MARK in this case. */
2170 invert_tree_comparison (enum tree_code code, bool honor_nans)
2172 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2173 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2183 return honor_nans ? UNLE_EXPR : LE_EXPR;
2185 return honor_nans ? UNLT_EXPR : LT_EXPR;
2187 return honor_nans ? UNGE_EXPR : GE_EXPR;
2189 return honor_nans ? UNGT_EXPR : GT_EXPR;
2203 return UNORDERED_EXPR;
2204 case UNORDERED_EXPR:
2205 return ORDERED_EXPR;
2211 /* Similar, but return the comparison that results if the operands are
2212 swapped. This is safe for floating-point. */
2215 swap_tree_comparison (enum tree_code code)
2222 case UNORDERED_EXPR:
2248 /* Convert a comparison tree code from an enum tree_code representation
2249 into a compcode bit-based encoding. This function is the inverse of
2250 compcode_to_comparison. */
2252 static enum comparison_code
2253 comparison_to_compcode (enum tree_code code)
2270 return COMPCODE_ORD;
2271 case UNORDERED_EXPR:
2272 return COMPCODE_UNORD;
2274 return COMPCODE_UNLT;
2276 return COMPCODE_UNEQ;
2278 return COMPCODE_UNLE;
2280 return COMPCODE_UNGT;
2282 return COMPCODE_LTGT;
2284 return COMPCODE_UNGE;
2290 /* Convert a compcode bit-based encoding of a comparison operator back
2291 to GCC's enum tree_code representation. This function is the
2292 inverse of comparison_to_compcode. */
2294 static enum tree_code
2295 compcode_to_comparison (enum comparison_code code)
2312 return ORDERED_EXPR;
2313 case COMPCODE_UNORD:
2314 return UNORDERED_EXPR;
2332 /* Return a tree for the comparison which is the combination of
2333 doing the AND or OR (depending on CODE) of the two operations LCODE
2334 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2335 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2336 if this makes the transformation invalid. */
2339 combine_comparisons (location_t loc,
2340 enum tree_code code, enum tree_code lcode,
2341 enum tree_code rcode, tree truth_type,
2342 tree ll_arg, tree lr_arg)
2344 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2345 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2346 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2351 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2352 compcode = lcompcode & rcompcode;
2355 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2356 compcode = lcompcode | rcompcode;
2365 /* Eliminate unordered comparisons, as well as LTGT and ORD
2366 which are not used unless the mode has NaNs. */
2367 compcode &= ~COMPCODE_UNORD;
2368 if (compcode == COMPCODE_LTGT)
2369 compcode = COMPCODE_NE;
2370 else if (compcode == COMPCODE_ORD)
2371 compcode = COMPCODE_TRUE;
2373 else if (flag_trapping_math)
2375 /* Check that the original operation and the optimized ones will trap
2376 under the same condition. */
2377 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2378 && (lcompcode != COMPCODE_EQ)
2379 && (lcompcode != COMPCODE_ORD);
2380 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2381 && (rcompcode != COMPCODE_EQ)
2382 && (rcompcode != COMPCODE_ORD);
2383 bool trap = (compcode & COMPCODE_UNORD) == 0
2384 && (compcode != COMPCODE_EQ)
2385 && (compcode != COMPCODE_ORD);
2387 /* In a short-circuited boolean expression the LHS might be
2388 such that the RHS, if evaluated, will never trap. For
2389 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2390 if neither x nor y is NaN. (This is a mixed blessing: for
2391 example, the expression above will never trap, hence
2392 optimizing it to x < y would be invalid). */
2393 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2394 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2397 /* If the comparison was short-circuited, and only the RHS
2398 trapped, we may now generate a spurious trap. */
2400 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2403 /* If we changed the conditions that cause a trap, we lose. */
2404 if ((ltrap || rtrap) != trap)
2408 if (compcode == COMPCODE_TRUE)
2409 return constant_boolean_node (true, truth_type);
2410 else if (compcode == COMPCODE_FALSE)
2411 return constant_boolean_node (false, truth_type);
2414 enum tree_code tcode;
2416 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2417 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2421 /* Return nonzero if two operands (typically of the same tree node)
2422 are necessarily equal. If either argument has side-effects this
2423 function returns zero. FLAGS modifies behavior as follows:
2425 If OEP_ONLY_CONST is set, only return nonzero for constants.
2426 This function tests whether the operands are indistinguishable;
2427 it does not test whether they are equal using C's == operation.
2428 The distinction is important for IEEE floating point, because
2429 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2430 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2432 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2433 even though it may hold multiple values during a function.
2434 This is because a GCC tree node guarantees that nothing else is
2435 executed between the evaluation of its "operands" (which may often
2436 be evaluated in arbitrary order). Hence if the operands themselves
2437 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2438 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2439 unset means assuming isochronic (or instantaneous) tree equivalence.
2440 Unless comparing arbitrary expression trees, such as from different
2441 statements, this flag can usually be left unset.
2443 If OEP_PURE_SAME is set, then pure functions with identical arguments
2444 are considered the same. It is used when the caller has other ways
2445 to ensure that global memory is unchanged in between. */
2448 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2450 /* If either is ERROR_MARK, they aren't equal. */
2451 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2452 || TREE_TYPE (arg0) == error_mark_node
2453 || TREE_TYPE (arg1) == error_mark_node)
2456 /* Similar, if either does not have a type (like a released SSA name),
2457 they aren't equal. */
2458 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2461 /* Check equality of integer constants before bailing out due to
2462 precision differences. */
2463 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2464 return tree_int_cst_equal (arg0, arg1);
2466 /* If both types don't have the same signedness, then we can't consider
2467 them equal. We must check this before the STRIP_NOPS calls
2468 because they may change the signedness of the arguments. As pointers
2469 strictly don't have a signedness, require either two pointers or
2470 two non-pointers as well. */
2471 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2472 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2475 /* We cannot consider pointers to different address space equal. */
2476 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2477 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2478 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2481 /* If both types don't have the same precision, then it is not safe
2483 if (element_precision (TREE_TYPE (arg0))
2484 != element_precision (TREE_TYPE (arg1)))
2490 /* In case both args are comparisons but with different comparison
2491 code, try to swap the comparison operands of one arg to produce
2492 a match and compare that variant. */
2493 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2494 && COMPARISON_CLASS_P (arg0)
2495 && COMPARISON_CLASS_P (arg1))
2497 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2499 if (TREE_CODE (arg0) == swap_code)
2500 return operand_equal_p (TREE_OPERAND (arg0, 0),
2501 TREE_OPERAND (arg1, 1), flags)
2502 && operand_equal_p (TREE_OPERAND (arg0, 1),
2503 TREE_OPERAND (arg1, 0), flags);
2506 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2507 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2508 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2511 /* This is needed for conversions and for COMPONENT_REF.
2512 Might as well play it safe and always test this. */
2513 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2514 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2515 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2518 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2519 We don't care about side effects in that case because the SAVE_EXPR
2520 takes care of that for us. In all other cases, two expressions are
2521 equal if they have no side effects. If we have two identical
2522 expressions with side effects that should be treated the same due
2523 to the only side effects being identical SAVE_EXPR's, that will
2524 be detected in the recursive calls below.
2525 If we are taking an invariant address of two identical objects
2526 they are necessarily equal as well. */
2527 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2528 && (TREE_CODE (arg0) == SAVE_EXPR
2529 || (flags & OEP_CONSTANT_ADDRESS_OF)
2530 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2533 /* Next handle constant cases, those for which we can return 1 even
2534 if ONLY_CONST is set. */
2535 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2536 switch (TREE_CODE (arg0))
2539 return tree_int_cst_equal (arg0, arg1);
2542 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2543 TREE_FIXED_CST (arg1));
2546 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2547 TREE_REAL_CST (arg1)))
2551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2553 /* If we do not distinguish between signed and unsigned zero,
2554 consider them equal. */
2555 if (real_zerop (arg0) && real_zerop (arg1))
2564 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2567 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2569 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2570 VECTOR_CST_ELT (arg1, i), flags))
2577 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2579 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2583 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2584 && ! memcmp (TREE_STRING_POINTER (arg0),
2585 TREE_STRING_POINTER (arg1),
2586 TREE_STRING_LENGTH (arg0)));
2589 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2590 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2591 ? OEP_CONSTANT_ADDRESS_OF : 0);
2596 if (flags & OEP_ONLY_CONST)
2599 /* Define macros to test an operand from arg0 and arg1 for equality and a
2600 variant that allows null and views null as being different from any
2601 non-null value. In the latter case, if either is null, the both
2602 must be; otherwise, do the normal comparison. */
2603 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2604 TREE_OPERAND (arg1, N), flags)
2606 #define OP_SAME_WITH_NULL(N) \
2607 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2608 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2610 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2613 /* Two conversions are equal only if signedness and modes match. */
2614 switch (TREE_CODE (arg0))
2617 case FIX_TRUNC_EXPR:
2618 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2619 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2629 case tcc_comparison:
2631 if (OP_SAME (0) && OP_SAME (1))
2634 /* For commutative ops, allow the other order. */
2635 return (commutative_tree_code (TREE_CODE (arg0))
2636 && operand_equal_p (TREE_OPERAND (arg0, 0),
2637 TREE_OPERAND (arg1, 1), flags)
2638 && operand_equal_p (TREE_OPERAND (arg0, 1),
2639 TREE_OPERAND (arg1, 0), flags));
2642 /* If either of the pointer (or reference) expressions we are
2643 dereferencing contain a side effect, these cannot be equal,
2644 but their addresses can be. */
2645 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2646 && (TREE_SIDE_EFFECTS (arg0)
2647 || TREE_SIDE_EFFECTS (arg1)))
2650 switch (TREE_CODE (arg0))
2653 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2660 case TARGET_MEM_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 /* Require equal extra operands and then fall through to MEM_REF
2663 handling of the two common operands. */
2664 if (!OP_SAME_WITH_NULL (2)
2665 || !OP_SAME_WITH_NULL (3)
2666 || !OP_SAME_WITH_NULL (4))
2670 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2671 /* Require equal access sizes, and similar pointer types.
2672 We can have incomplete types for array references of
2673 variable-sized arrays from the Fortran frontend
2674 though. Also verify the types are compatible. */
2675 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2676 || (TYPE_SIZE (TREE_TYPE (arg0))
2677 && TYPE_SIZE (TREE_TYPE (arg1))
2678 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2679 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2680 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2681 && alias_ptr_types_compatible_p
2682 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2683 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2684 && OP_SAME (0) && OP_SAME (1));
2687 case ARRAY_RANGE_REF:
2688 /* Operands 2 and 3 may be null.
2689 Compare the array index by value if it is constant first as we
2690 may have different types but same value here. */
2693 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2694 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2695 TREE_OPERAND (arg1, 1))
2697 && OP_SAME_WITH_NULL (2)
2698 && OP_SAME_WITH_NULL (3));
2701 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2702 may be NULL when we're called to compare MEM_EXPRs. */
2703 if (!OP_SAME_WITH_NULL (0)
2706 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2707 return OP_SAME_WITH_NULL (2);
2712 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2713 return OP_SAME (1) && OP_SAME (2);
2719 case tcc_expression:
2720 switch (TREE_CODE (arg0))
2723 case TRUTH_NOT_EXPR:
2726 case TRUTH_ANDIF_EXPR:
2727 case TRUTH_ORIF_EXPR:
2728 return OP_SAME (0) && OP_SAME (1);
2731 case WIDEN_MULT_PLUS_EXPR:
2732 case WIDEN_MULT_MINUS_EXPR:
2735 /* The multiplcation operands are commutative. */
2738 case TRUTH_AND_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 if (OP_SAME (0) && OP_SAME (1))
2744 /* Otherwise take into account this is a commutative operation. */
2745 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags));
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2760 switch (TREE_CODE (arg0))
2763 /* If the CALL_EXPRs call different functions, then they
2764 clearly can not be equal. */
2765 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2770 unsigned int cef = call_expr_flags (arg0);
2771 if (flags & OEP_PURE_SAME)
2772 cef &= ECF_CONST | ECF_PURE;
2779 /* Now see if all the arguments are the same. */
2781 const_call_expr_arg_iterator iter0, iter1;
2783 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2784 a1 = first_const_call_expr_arg (arg1, &iter1);
2786 a0 = next_const_call_expr_arg (&iter0),
2787 a1 = next_const_call_expr_arg (&iter1))
2788 if (! operand_equal_p (a0, a1, flags))
2791 /* If we get here and both argument lists are exhausted
2792 then the CALL_EXPRs are equal. */
2793 return ! (a0 || a1);
2799 case tcc_declaration:
2800 /* Consider __builtin_sqrt equal to sqrt. */
2801 return (TREE_CODE (arg0) == FUNCTION_DECL
2802 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2803 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2804 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2811 #undef OP_SAME_WITH_NULL
2814 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2815 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2817 When in doubt, return 0. */
2820 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2822 int unsignedp1, unsignedpo;
2823 tree primarg0, primarg1, primother;
2824 unsigned int correct_width;
2826 if (operand_equal_p (arg0, arg1, 0))
2829 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2830 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2833 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2834 and see if the inner values are the same. This removes any
2835 signedness comparison, which doesn't matter here. */
2836 primarg0 = arg0, primarg1 = arg1;
2837 STRIP_NOPS (primarg0);
2838 STRIP_NOPS (primarg1);
2839 if (operand_equal_p (primarg0, primarg1, 0))
2842 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2843 actual comparison operand, ARG0.
2845 First throw away any conversions to wider types
2846 already present in the operands. */
2848 primarg1 = get_narrower (arg1, &unsignedp1);
2849 primother = get_narrower (other, &unsignedpo);
2851 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2852 if (unsignedp1 == unsignedpo
2853 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2854 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2856 tree type = TREE_TYPE (arg0);
2858 /* Make sure shorter operand is extended the right way
2859 to match the longer operand. */
2860 primarg1 = fold_convert (signed_or_unsigned_type_for
2861 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2863 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2870 /* See if ARG is an expression that is either a comparison or is performing
2871 arithmetic on comparisons. The comparisons must only be comparing
2872 two different values, which will be stored in *CVAL1 and *CVAL2; if
2873 they are nonzero it means that some operands have already been found.
2874 No variables may be used anywhere else in the expression except in the
2875 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2876 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2878 If this is true, return 1. Otherwise, return zero. */
2881 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2883 enum tree_code code = TREE_CODE (arg);
2884 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2886 /* We can handle some of the tcc_expression cases here. */
2887 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2889 else if (tclass == tcc_expression
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2891 || code == COMPOUND_EXPR))
2892 tclass = tcc_binary;
2894 else if (tclass == tcc_expression && code == SAVE_EXPR
2895 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2897 /* If we've already found a CVAL1 or CVAL2, this expression is
2898 two complex to handle. */
2899 if (*cval1 || *cval2)
2909 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2912 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2913 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2914 cval1, cval2, save_p));
2919 case tcc_expression:
2920 if (code == COND_EXPR)
2921 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2922 cval1, cval2, save_p)
2923 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2924 cval1, cval2, save_p)
2925 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2926 cval1, cval2, save_p));
2929 case tcc_comparison:
2930 /* First see if we can handle the first operand, then the second. For
2931 the second operand, we know *CVAL1 can't be zero. It must be that
2932 one side of the comparison is each of the values; test for the
2933 case where this isn't true by failing if the two operands
2936 if (operand_equal_p (TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (arg, 1), 0))
2941 *cval1 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2944 else if (*cval2 == 0)
2945 *cval2 = TREE_OPERAND (arg, 0);
2946 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2951 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2953 else if (*cval2 == 0)
2954 *cval2 = TREE_OPERAND (arg, 1);
2955 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2967 /* ARG is a tree that is known to contain just arithmetic operations and
2968 comparisons. Evaluate the operations in the tree substituting NEW0 for
2969 any occurrence of OLD0 as an operand of a comparison and likewise for
2973 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2974 tree old1, tree new1)
2976 tree type = TREE_TYPE (arg);
2977 enum tree_code code = TREE_CODE (arg);
2978 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2980 /* We can handle some of the tcc_expression cases here. */
2981 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2983 else if (tclass == tcc_expression
2984 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2985 tclass = tcc_binary;
2990 return fold_build1_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1));
2995 return fold_build2_loc (loc, code, type,
2996 eval_subst (loc, TREE_OPERAND (arg, 0),
2997 old0, new0, old1, new1),
2998 eval_subst (loc, TREE_OPERAND (arg, 1),
2999 old0, new0, old1, new1));
3001 case tcc_expression:
3005 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3009 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3013 return fold_build3_loc (loc, code, type,
3014 eval_subst (loc, TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1),
3016 eval_subst (loc, TREE_OPERAND (arg, 1),
3017 old0, new0, old1, new1),
3018 eval_subst (loc, TREE_OPERAND (arg, 2),
3019 old0, new0, old1, new1));
3023 /* Fall through - ??? */
3025 case tcc_comparison:
3027 tree arg0 = TREE_OPERAND (arg, 0);
3028 tree arg1 = TREE_OPERAND (arg, 1);
3030 /* We need to check both for exact equality and tree equality. The
3031 former will be true if the operand has a side-effect. In that
3032 case, we know the operand occurred exactly once. */
3034 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3036 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3039 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3041 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3044 return fold_build2_loc (loc, code, type, arg0, arg1);
3052 /* Return a tree for the case when the result of an expression is RESULT
3053 converted to TYPE and OMITTED was previously an operand of the expression
3054 but is now not needed (e.g., we folded OMITTED * 0).
3056 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3057 the conversion of RESULT to TYPE. */
3060 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3062 tree t = fold_convert_loc (loc, type, result);
3064 /* If the resulting operand is an empty statement, just return the omitted
3065 statement casted to void. */
3066 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3067 return build1_loc (loc, NOP_EXPR, void_type_node,
3068 fold_ignored_result (omitted));
3070 if (TREE_SIDE_EFFECTS (omitted))
3071 return build2_loc (loc, COMPOUND_EXPR, type,
3072 fold_ignored_result (omitted), t);
3074 return non_lvalue_loc (loc, t);
3077 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3080 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3083 tree t = fold_convert_loc (loc, type, result);
3085 /* If the resulting operand is an empty statement, just return the omitted
3086 statement casted to void. */
3087 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3088 return build1_loc (loc, NOP_EXPR, void_type_node,
3089 fold_ignored_result (omitted));
3091 if (TREE_SIDE_EFFECTS (omitted))
3092 return build2_loc (loc, COMPOUND_EXPR, type,
3093 fold_ignored_result (omitted), t);
3095 return pedantic_non_lvalue_loc (loc, t);
3098 /* Return a tree for the case when the result of an expression is RESULT
3099 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3100 of the expression but are now not needed.
3102 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3103 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3104 evaluated before OMITTED2. Otherwise, if neither has side effects,
3105 just do the conversion of RESULT to TYPE. */
3108 omit_two_operands_loc (location_t loc, tree type, tree result,
3109 tree omitted1, tree omitted2)
3111 tree t = fold_convert_loc (loc, type, result);
3113 if (TREE_SIDE_EFFECTS (omitted2))
3114 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3115 if (TREE_SIDE_EFFECTS (omitted1))
3116 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3118 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3122 /* Return a simplified tree node for the truth-negation of ARG. This
3123 never alters ARG itself. We assume that ARG is an operation that
3124 returns a truth value (0 or 1).
3126 FIXME: one would think we would fold the result, but it causes
3127 problems with the dominator optimizer. */
3130 fold_truth_not_expr (location_t loc, tree arg)
3132 tree type = TREE_TYPE (arg);
3133 enum tree_code code = TREE_CODE (arg);
3134 location_t loc1, loc2;
3136 /* If this is a comparison, we can simply invert it, except for
3137 floating-point non-equality comparisons, in which case we just
3138 enclose a TRUTH_NOT_EXPR around what we have. */
3140 if (TREE_CODE_CLASS (code) == tcc_comparison)
3142 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3143 if (FLOAT_TYPE_P (op_type)
3144 && flag_trapping_math
3145 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3146 && code != NE_EXPR && code != EQ_EXPR)
3149 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3150 if (code == ERROR_MARK)
3153 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3154 TREE_OPERAND (arg, 1));
3160 return constant_boolean_node (integer_zerop (arg), type);
3162 case TRUTH_AND_EXPR:
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3164 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3165 return build2_loc (loc, TRUTH_OR_EXPR, type,
3166 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3167 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_AND_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176 case TRUTH_XOR_EXPR:
3177 /* Here we can invert either operand. We invert the first operand
3178 unless the second operand is a TRUTH_NOT_EXPR in which case our
3179 result is the XOR of the first operand with the inside of the
3180 negation of the second operand. */
3182 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3183 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3184 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3186 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3187 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3188 TREE_OPERAND (arg, 1));
3190 case TRUTH_ANDIF_EXPR:
3191 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3192 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3193 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3194 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3195 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3197 case TRUTH_ORIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_NOT_EXPR:
3205 return TREE_OPERAND (arg, 0);
3209 tree arg1 = TREE_OPERAND (arg, 1);
3210 tree arg2 = TREE_OPERAND (arg, 2);
3212 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3213 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3215 /* A COND_EXPR may have a throw as one operand, which
3216 then has void type. Just leave void operands
3218 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3219 VOID_TYPE_P (TREE_TYPE (arg1))
3220 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3221 VOID_TYPE_P (TREE_TYPE (arg2))
3222 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 return build2_loc (loc, COMPOUND_EXPR, type,
3228 TREE_OPERAND (arg, 0),
3229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3231 case NON_LVALUE_EXPR:
3232 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3233 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3236 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3237 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3239 /* ... fall through ... */
3242 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3243 return build1_loc (loc, TREE_CODE (arg), type,
3244 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3247 if (!integer_onep (TREE_OPERAND (arg, 1)))
3249 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3252 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3254 case CLEANUP_POINT_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3257 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3264 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3265 assume that ARG is an operation that returns a truth value (0 or 1
3266 for scalars, 0 or -1 for vectors). Return the folded expression if
3267 folding is successful. Otherwise, return NULL_TREE. */
3270 fold_invert_truthvalue (location_t loc, tree arg)
3272 tree type = TREE_TYPE (arg);
3273 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3284 invert_truthvalue_loc (location_t loc, tree arg)
3286 if (TREE_CODE (arg) == ERROR_MARK)
3289 tree type = TREE_TYPE (arg);
3290 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3296 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3297 operands are another bit-wise operation with a common input. If so,
3298 distribute the bit operations to save an operation and possibly two if
3299 constants are involved. For example, convert
3300 (A | B) & (A | C) into A | (B & C)
3301 Further simplification will occur if B and C are constants.
3303 If this optimization cannot be done, 0 will be returned. */
3306 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3307 tree arg0, tree arg1)
3312 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3313 || TREE_CODE (arg0) == code
3314 || (TREE_CODE (arg0) != BIT_AND_EXPR
3315 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3318 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3320 common = TREE_OPERAND (arg0, 0);
3321 left = TREE_OPERAND (arg0, 1);
3322 right = TREE_OPERAND (arg1, 1);
3324 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3326 common = TREE_OPERAND (arg0, 0);
3327 left = TREE_OPERAND (arg0, 1);
3328 right = TREE_OPERAND (arg1, 0);
3330 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3332 common = TREE_OPERAND (arg0, 1);
3333 left = TREE_OPERAND (arg0, 0);
3334 right = TREE_OPERAND (arg1, 1);
3336 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3338 common = TREE_OPERAND (arg0, 1);
3339 left = TREE_OPERAND (arg0, 0);
3340 right = TREE_OPERAND (arg1, 0);
3345 common = fold_convert_loc (loc, type, common);
3346 left = fold_convert_loc (loc, type, left);
3347 right = fold_convert_loc (loc, type, right);
3348 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3349 fold_build2_loc (loc, code, type, left, right));
3352 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3353 with code CODE. This optimization is unsafe. */
3355 distribute_real_division (location_t loc, enum tree_code code, tree type,
3356 tree arg0, tree arg1)
3358 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3359 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3361 /* (A / C) +- (B / C) -> (A +- B) / C. */
3363 && operand_equal_p (TREE_OPERAND (arg0, 1),
3364 TREE_OPERAND (arg1, 1), 0))
3365 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3366 fold_build2_loc (loc, code, type,
3367 TREE_OPERAND (arg0, 0),
3368 TREE_OPERAND (arg1, 0)),
3369 TREE_OPERAND (arg0, 1));
3371 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3372 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3373 TREE_OPERAND (arg1, 0), 0)
3374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3377 REAL_VALUE_TYPE r0, r1;
3378 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3379 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3381 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3383 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3384 real_arithmetic (&r0, code, &r0, &r1);
3385 return fold_build2_loc (loc, MULT_EXPR, type,
3386 TREE_OPERAND (arg0, 0),
3387 build_real (type, r0));
3393 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3394 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3397 make_bit_field_ref (location_t loc, tree inner, tree type,
3398 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3400 tree result, bftype;
3404 tree size = TYPE_SIZE (TREE_TYPE (inner));
3405 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3406 || POINTER_TYPE_P (TREE_TYPE (inner)))
3407 && tree_fits_shwi_p (size)
3408 && tree_to_shwi (size) == bitsize)
3409 return fold_convert_loc (loc, type, inner);
3413 if (TYPE_PRECISION (bftype) != bitsize
3414 || TYPE_UNSIGNED (bftype) == !unsignedp)
3415 bftype = build_nonstandard_integer_type (bitsize, 0);
3417 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3418 size_int (bitsize), bitsize_int (bitpos));
3421 result = fold_convert_loc (loc, type, result);
3426 /* Optimize a bit-field compare.
3428 There are two cases: First is a compare against a constant and the
3429 second is a comparison of two items where the fields are at the same
3430 bit position relative to the start of a chunk (byte, halfword, word)
3431 large enough to contain it. In these cases we can avoid the shift
3432 implicit in bitfield extractions.
3434 For constants, we emit a compare of the shifted constant with the
3435 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3436 compared. For two fields at the same position, we do the ANDs with the
3437 similar mask and compare the result of the ANDs.
3439 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3440 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3441 are the left and right operands of the comparison, respectively.
3443 If the optimization described above can be done, we return the resulting
3444 tree. Otherwise we return zero. */
3447 optimize_bit_field_compare (location_t loc, enum tree_code code,
3448 tree compare_type, tree lhs, tree rhs)
3450 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3451 tree type = TREE_TYPE (lhs);
3453 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3454 machine_mode lmode, rmode, nmode;
3455 int lunsignedp, runsignedp;
3456 int lvolatilep = 0, rvolatilep = 0;
3457 tree linner, rinner = NULL_TREE;
3461 /* Get all the information about the extractions being done. If the bit size
3462 if the same as the size of the underlying object, we aren't doing an
3463 extraction at all and so can do nothing. We also don't want to
3464 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3465 then will no longer be able to replace it. */
3466 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3467 &lunsignedp, &lvolatilep, false);
3468 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3469 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3474 /* If this is not a constant, we can only do something if bit positions,
3475 sizes, and signedness are the same. */
3476 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3477 &runsignedp, &rvolatilep, false);
3479 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3480 || lunsignedp != runsignedp || offset != 0
3481 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3485 /* See if we can find a mode to refer to this field. We should be able to,
3486 but fail if we can't. */
3487 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3488 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3489 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3490 TYPE_ALIGN (TREE_TYPE (rinner))),
3492 if (nmode == VOIDmode)
3495 /* Set signed and unsigned types of the precision of this mode for the
3497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3499 /* Compute the bit position and size for the new reference and our offset
3500 within it. If the new reference is the same size as the original, we
3501 won't optimize anything, so return zero. */
3502 nbitsize = GET_MODE_BITSIZE (nmode);
3503 nbitpos = lbitpos & ~ (nbitsize - 1);
3505 if (nbitsize == lbitsize)
3508 if (BYTES_BIG_ENDIAN)
3509 lbitpos = nbitsize - lbitsize - lbitpos;
3511 /* Make the mask to be used against the extracted field. */
3512 mask = build_int_cst_type (unsigned_type, -1);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3514 mask = const_binop (RSHIFT_EXPR, mask,
3515 size_int (nbitsize - lbitsize - lbitpos));
3518 /* If not comparing with constant, just rework the comparison
3520 return fold_build2_loc (loc, code, compare_type,
3521 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3522 make_bit_field_ref (loc, linner,
3527 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3528 make_bit_field_ref (loc, rinner,
3534 /* Otherwise, we are handling the constant case. See if the constant is too
3535 big for the field. Warn and return a tree of for 0 (false) if so. We do
3536 this not only for its own sake, but to avoid having to test for this
3537 error case below. If we didn't, we might generate wrong code.
3539 For unsigned fields, the constant shifted right by the field length should
3540 be all zero. For signed fields, the high-order bits should agree with
3545 if (wi::lrshift (rhs, lbitsize) != 0)
3547 warning (0, "comparison is always %d due to width of bit-field",
3549 return constant_boolean_node (code == NE_EXPR, compare_type);
3554 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3555 if (tem != 0 && tem != -1)
3557 warning (0, "comparison is always %d due to width of bit-field",
3559 return constant_boolean_node (code == NE_EXPR, compare_type);
3563 /* Single-bit compares should always be against zero. */
3564 if (lbitsize == 1 && ! integer_zerop (rhs))
3566 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3567 rhs = build_int_cst (type, 0);
3570 /* Make a new bitfield reference, shift the constant over the
3571 appropriate number of bits and mask it with the computed mask
3572 (in case this was a signed field). If we changed it, make a new one. */
3573 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3575 rhs = const_binop (BIT_AND_EXPR,
3576 const_binop (LSHIFT_EXPR,
3577 fold_convert_loc (loc, unsigned_type, rhs),
3578 size_int (lbitpos)),
3581 lhs = build2_loc (loc, code, compare_type,
3582 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3586 /* Subroutine for fold_truth_andor_1: decode a field reference.
3588 If EXP is a comparison reference, we return the innermost reference.
3590 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3591 set to the starting bit number.
3593 If the innermost field can be completely contained in a mode-sized
3594 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3596 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3597 otherwise it is not changed.
3599 *PUNSIGNEDP is set to the signedness of the field.
3601 *PMASK is set to the mask used. This is either contained in a
3602 BIT_AND_EXPR or derived from the width of the field.
3604 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3606 Return 0 if this is not a component reference or is one that we can't
3607 do anything with. */
3610 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3611 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3612 int *punsignedp, int *pvolatilep,
3613 tree *pmask, tree *pand_mask)
3615 tree outer_type = 0;
3617 tree mask, inner, offset;
3619 unsigned int precision;
3621 /* All the optimizations using this function assume integer fields.
3622 There are problems with FP fields since the type_for_size call
3623 below can fail for, e.g., XFmode. */
3624 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3627 /* We are interested in the bare arrangement of bits, so strip everything
3628 that doesn't affect the machine mode. However, record the type of the
3629 outermost expression if it may matter below. */
3630 if (CONVERT_EXPR_P (exp)
3631 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3632 outer_type = TREE_TYPE (exp);
3635 if (TREE_CODE (exp) == BIT_AND_EXPR)
3637 and_mask = TREE_OPERAND (exp, 1);
3638 exp = TREE_OPERAND (exp, 0);
3639 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3640 if (TREE_CODE (and_mask) != INTEGER_CST)
3644 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3645 punsignedp, pvolatilep, false);
3646 if ((inner == exp && and_mask == 0)
3647 || *pbitsize < 0 || offset != 0
3648 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3651 /* If the number of bits in the reference is the same as the bitsize of
3652 the outer type, then the outer type gives the signedness. Otherwise
3653 (in case of a small bitfield) the signedness is unchanged. */
3654 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3655 *punsignedp = TYPE_UNSIGNED (outer_type);
3657 /* Compute the mask to access the bitfield. */
3658 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3659 precision = TYPE_PRECISION (unsigned_type);
3661 mask = build_int_cst_type (unsigned_type, -1);
3663 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3664 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3666 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3668 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3669 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3672 *pand_mask = and_mask;
3676 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3677 bit positions and MASK is SIGNED. */
3680 all_ones_mask_p (const_tree mask, unsigned int size)
3682 tree type = TREE_TYPE (mask);
3683 unsigned int precision = TYPE_PRECISION (type);
3685 /* If this function returns true when the type of the mask is
3686 UNSIGNED, then there will be errors. In particular see
3687 gcc.c-torture/execute/990326-1.c. There does not appear to be
3688 any documentation paper trail as to why this is so. But the pre
3689 wide-int worked with that restriction and it has been preserved
3691 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3694 return wi::mask (size, false, precision) == mask;
3697 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3698 represents the sign bit of EXP's type. If EXP represents a sign
3699 or zero extension, also test VAL against the unextended type.
3700 The return value is the (sub)expression whose sign bit is VAL,
3701 or NULL_TREE otherwise. */
3704 sign_bit_p (tree exp, const_tree val)
3709 /* Tree EXP must have an integral type. */
3710 t = TREE_TYPE (exp);
3711 if (! INTEGRAL_TYPE_P (t))
3714 /* Tree VAL must be an integer constant. */
3715 if (TREE_CODE (val) != INTEGER_CST
3716 || TREE_OVERFLOW (val))
3719 width = TYPE_PRECISION (t);
3720 if (wi::only_sign_bit_p (val, width))
3723 /* Handle extension from a narrower type. */
3724 if (TREE_CODE (exp) == NOP_EXPR
3725 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3726 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3731 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3732 to be evaluated unconditionally. */
3735 simple_operand_p (const_tree exp)
3737 /* Strip any conversions that don't change the machine mode. */
3740 return (CONSTANT_CLASS_P (exp)
3741 || TREE_CODE (exp) == SSA_NAME
3743 && ! TREE_ADDRESSABLE (exp)
3744 && ! TREE_THIS_VOLATILE (exp)
3745 && ! DECL_NONLOCAL (exp)
3746 /* Don't regard global variables as simple. They may be
3747 allocated in ways unknown to the compiler (shared memory,
3748 #pragma weak, etc). */
3749 && ! TREE_PUBLIC (exp)
3750 && ! DECL_EXTERNAL (exp)
3751 /* Weakrefs are not safe to be read, since they can be NULL.
3752 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3753 have DECL_WEAK flag set. */
3754 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3760 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3761 to be evaluated unconditionally.
3762 I addition to simple_operand_p, we assume that comparisons, conversions,
3763 and logic-not operations are simple, if their operands are simple, too. */
3766 simple_operand_p_2 (tree exp)
3768 enum tree_code code;
3770 if (TREE_SIDE_EFFECTS (exp)
3771 || tree_could_trap_p (exp))
3774 while (CONVERT_EXPR_P (exp))
3775 exp = TREE_OPERAND (exp, 0);
3777 code = TREE_CODE (exp);
3779 if (TREE_CODE_CLASS (code) == tcc_comparison)
3780 return (simple_operand_p (TREE_OPERAND (exp, 0))
3781 && simple_operand_p (TREE_OPERAND (exp, 1)));
3783 if (code == TRUTH_NOT_EXPR)
3784 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3786 return simple_operand_p (exp);
3790 /* The following functions are subroutines to fold_range_test and allow it to
3791 try to change a logical combination of comparisons into a range test.
3794 X == 2 || X == 3 || X == 4 || X == 5
3798 (unsigned) (X - 2) <= 3
3800 We describe each set of comparisons as being either inside or outside
3801 a range, using a variable named like IN_P, and then describe the
3802 range with a lower and upper bound. If one of the bounds is omitted,
3803 it represents either the highest or lowest value of the type.
3805 In the comments below, we represent a range by two numbers in brackets
3806 preceded by a "+" to designate being inside that range, or a "-" to
3807 designate being outside that range, so the condition can be inverted by
3808 flipping the prefix. An omitted bound is represented by a "-". For
3809 example, "- [-, 10]" means being outside the range starting at the lowest
3810 possible value and ending at 10, in other words, being greater than 10.
3811 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3814 We set up things so that the missing bounds are handled in a consistent
3815 manner so neither a missing bound nor "true" and "false" need to be
3816 handled using a special case. */
3818 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3819 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3820 and UPPER1_P are nonzero if the respective argument is an upper bound
3821 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3822 must be specified for a comparison. ARG1 will be converted to ARG0's
3823 type if both are specified. */
3826 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3827 tree arg1, int upper1_p)
3833 /* If neither arg represents infinity, do the normal operation.
3834 Else, if not a comparison, return infinity. Else handle the special
3835 comparison rules. Note that most of the cases below won't occur, but
3836 are handled for consistency. */
3838 if (arg0 != 0 && arg1 != 0)
3840 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3841 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3843 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3846 if (TREE_CODE_CLASS (code) != tcc_comparison)
3849 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3850 for neither. In real maths, we cannot assume open ended ranges are
3851 the same. But, this is computer arithmetic, where numbers are finite.
3852 We can therefore make the transformation of any unbounded range with
3853 the value Z, Z being greater than any representable number. This permits
3854 us to treat unbounded ranges as equal. */
3855 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3856 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3860 result = sgn0 == sgn1;
3863 result = sgn0 != sgn1;
3866 result = sgn0 < sgn1;
3869 result = sgn0 <= sgn1;
3872 result = sgn0 > sgn1;
3875 result = sgn0 >= sgn1;
3881 return constant_boolean_node (result, type);
3884 /* Helper routine for make_range. Perform one step for it, return
3885 new expression if the loop should continue or NULL_TREE if it should
3889 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3890 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3891 bool *strict_overflow_p)
3893 tree arg0_type = TREE_TYPE (arg0);
3894 tree n_low, n_high, low = *p_low, high = *p_high;
3895 int in_p = *p_in_p, n_in_p;
3899 case TRUTH_NOT_EXPR:
3900 /* We can only do something if the range is testing for zero. */
3901 if (low == NULL_TREE || high == NULL_TREE
3902 || ! integer_zerop (low) || ! integer_zerop (high))
3907 case EQ_EXPR: case NE_EXPR:
3908 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3909 /* We can only do something if the range is testing for zero
3910 and if the second operand is an integer constant. Note that
3911 saying something is "in" the range we make is done by
3912 complementing IN_P since it will set in the initial case of
3913 being not equal to zero; "out" is leaving it alone. */
3914 if (low == NULL_TREE || high == NULL_TREE
3915 || ! integer_zerop (low) || ! integer_zerop (high)
3916 || TREE_CODE (arg1) != INTEGER_CST)
3921 case NE_EXPR: /* - [c, c] */
3924 case EQ_EXPR: /* + [c, c] */
3925 in_p = ! in_p, low = high = arg1;
3927 case GT_EXPR: /* - [-, c] */
3928 low = 0, high = arg1;
3930 case GE_EXPR: /* + [c, -] */
3931 in_p = ! in_p, low = arg1, high = 0;
3933 case LT_EXPR: /* - [c, -] */
3934 low = arg1, high = 0;
3936 case LE_EXPR: /* + [-, c] */
3937 in_p = ! in_p, low = 0, high = arg1;
3943 /* If this is an unsigned comparison, we also know that EXP is
3944 greater than or equal to zero. We base the range tests we make
3945 on that fact, so we record it here so we can parse existing
3946 range tests. We test arg0_type since often the return type
3947 of, e.g. EQ_EXPR, is boolean. */
3948 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3950 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3952 build_int_cst (arg0_type, 0),
3956 in_p = n_in_p, low = n_low, high = n_high;
3958 /* If the high bound is missing, but we have a nonzero low
3959 bound, reverse the range so it goes from zero to the low bound
3961 if (high == 0 && low && ! integer_zerop (low))
3964 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3965 build_int_cst (TREE_TYPE (low), 1), 0);
3966 low = build_int_cst (arg0_type, 0);
3976 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3977 low and high are non-NULL, then normalize will DTRT. */
3978 if (!TYPE_UNSIGNED (arg0_type)
3979 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3981 if (low == NULL_TREE)
3982 low = TYPE_MIN_VALUE (arg0_type);
3983 if (high == NULL_TREE)
3984 high = TYPE_MAX_VALUE (arg0_type);
3987 /* (-x) IN [a,b] -> x in [-b, -a] */
3988 n_low = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3991 n_high = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3994 if (n_high != 0 && TREE_OVERFLOW (n_high))
4000 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4001 build_int_cst (exp_type, 1));
4005 if (TREE_CODE (arg1) != INTEGER_CST)
4008 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4009 move a constant to the other side. */
4010 if (!TYPE_UNSIGNED (arg0_type)
4011 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4014 /* If EXP is signed, any overflow in the computation is undefined,
4015 so we don't worry about it so long as our computations on
4016 the bounds don't overflow. For unsigned, overflow is defined
4017 and this is exactly the right thing. */
4018 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4019 arg0_type, low, 0, arg1, 0);
4020 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4021 arg0_type, high, 1, arg1, 0);
4022 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4023 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4026 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4027 *strict_overflow_p = true;
4030 /* Check for an unsigned range which has wrapped around the maximum
4031 value thus making n_high < n_low, and normalize it. */
4032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4034 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4035 build_int_cst (TREE_TYPE (n_high), 1), 0);
4036 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4037 build_int_cst (TREE_TYPE (n_low), 1), 0);
4039 /* If the range is of the form +/- [ x+1, x ], we won't
4040 be able to normalize it. But then, it represents the
4041 whole range or the empty set, so make it
4043 if (tree_int_cst_equal (n_low, low)
4044 && tree_int_cst_equal (n_high, high))
4050 low = n_low, high = n_high;
4058 case NON_LVALUE_EXPR:
4059 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4062 if (! INTEGRAL_TYPE_P (arg0_type)
4063 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4064 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4067 n_low = low, n_high = high;
4070 n_low = fold_convert_loc (loc, arg0_type, n_low);
4073 n_high = fold_convert_loc (loc, arg0_type, n_high);
4075 /* If we're converting arg0 from an unsigned type, to exp,
4076 a signed type, we will be doing the comparison as unsigned.
4077 The tests above have already verified that LOW and HIGH
4080 So we have to ensure that we will handle large unsigned
4081 values the same way that the current signed bounds treat
4084 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4088 /* For fixed-point modes, we need to pass the saturating flag
4089 as the 2nd parameter. */
4090 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4092 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4093 TYPE_SATURATING (arg0_type));
4096 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4098 /* A range without an upper bound is, naturally, unbounded.
4099 Since convert would have cropped a very large value, use
4100 the max value for the destination type. */
4102 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4103 : TYPE_MAX_VALUE (arg0_type);
4105 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4106 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4107 fold_convert_loc (loc, arg0_type,
4109 build_int_cst (arg0_type, 1));
4111 /* If the low bound is specified, "and" the range with the
4112 range for which the original unsigned value will be
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4117 1, fold_convert_loc (loc, arg0_type,
4122 in_p = (n_in_p == in_p);
4126 /* Otherwise, "or" the range with the range of the input
4127 that will be interpreted as negative. */
4128 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4129 1, fold_convert_loc (loc, arg0_type,
4134 in_p = (in_p != n_in_p);
4148 /* Given EXP, a logical expression, set the range it is testing into
4149 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4150 actually being tested. *PLOW and *PHIGH will be made of the same
4151 type as the returned expression. If EXP is not a comparison, we
4152 will most likely not be returning a useful value and range. Set
4153 *STRICT_OVERFLOW_P to true if the return value is only valid
4154 because signed overflow is undefined; otherwise, do not change
4155 *STRICT_OVERFLOW_P. */
4158 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4159 bool *strict_overflow_p)
4161 enum tree_code code;
4162 tree arg0, arg1 = NULL_TREE;
4163 tree exp_type, nexp;
4166 location_t loc = EXPR_LOCATION (exp);
4168 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4169 and see if we can refine the range. Some of the cases below may not
4170 happen, but it doesn't seem worth worrying about this. We "continue"
4171 the outer loop when we've changed something; otherwise we "break"
4172 the switch, which will "break" the while. */
4175 low = high = build_int_cst (TREE_TYPE (exp), 0);
4179 code = TREE_CODE (exp);
4180 exp_type = TREE_TYPE (exp);
4183 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4185 if (TREE_OPERAND_LENGTH (exp) > 0)
4186 arg0 = TREE_OPERAND (exp, 0);
4187 if (TREE_CODE_CLASS (code) == tcc_binary
4188 || TREE_CODE_CLASS (code) == tcc_comparison
4189 || (TREE_CODE_CLASS (code) == tcc_expression
4190 && TREE_OPERAND_LENGTH (exp) > 1))
4191 arg1 = TREE_OPERAND (exp, 1);
4193 if (arg0 == NULL_TREE)
4196 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4197 &high, &in_p, strict_overflow_p);
4198 if (nexp == NULL_TREE)
4203 /* If EXP is a constant, we can evaluate whether this is true or false. */
4204 if (TREE_CODE (exp) == INTEGER_CST)
4206 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4208 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4214 *pin_p = in_p, *plow = low, *phigh = high;
4218 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4219 type, TYPE, return an expression to test if EXP is in (or out of, depending
4220 on IN_P) the range. Return 0 if the test couldn't be created. */
4223 build_range_check (location_t loc, tree type, tree exp, int in_p,
4224 tree low, tree high)
4226 tree etype = TREE_TYPE (exp), value;
4228 #ifdef HAVE_canonicalize_funcptr_for_compare
4229 /* Disable this optimization for function pointer expressions
4230 on targets that require function pointer canonicalization. */
4231 if (HAVE_canonicalize_funcptr_for_compare
4232 && TREE_CODE (etype) == POINTER_TYPE
4233 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4239 value = build_range_check (loc, type, exp, 1, low, high);
4241 return invert_truthvalue_loc (loc, value);
4246 if (low == 0 && high == 0)
4247 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4250 return fold_build2_loc (loc, LE_EXPR, type, exp,
4251 fold_convert_loc (loc, etype, high));
4254 return fold_build2_loc (loc, GE_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, low));
4257 if (operand_equal_p (low, high, 0))
4258 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, low));
4261 if (integer_zerop (low))
4263 if (! TYPE_UNSIGNED (etype))
4265 etype = unsigned_type_for (etype);
4266 high = fold_convert_loc (loc, etype, high);
4267 exp = fold_convert_loc (loc, etype, exp);
4269 return build_range_check (loc, type, exp, 1, 0, high);
4272 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4273 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4275 int prec = TYPE_PRECISION (etype);
4277 if (wi::mask (prec - 1, false, prec) == high)
4279 if (TYPE_UNSIGNED (etype))
4281 tree signed_etype = signed_type_for (etype);
4282 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4284 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4286 etype = signed_etype;
4287 exp = fold_convert_loc (loc, etype, exp);
4289 return fold_build2_loc (loc, GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression.
4296 First make sure that arithmetics in this type is valid, then make sure
4297 that it wraps around. */
4298 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4299 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4300 TYPE_UNSIGNED (etype));
4302 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4304 tree utype, minv, maxv;
4306 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4307 for the type in question, as we rely on this here. */
4308 utype = unsigned_type_for (etype);
4309 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4310 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4311 build_int_cst (TREE_TYPE (maxv), 1), 1);
4312 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4314 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4321 high = fold_convert_loc (loc, etype, high);
4322 low = fold_convert_loc (loc, etype, low);
4323 exp = fold_convert_loc (loc, etype, exp);
4325 value = const_binop (MINUS_EXPR, high, low);
4328 if (POINTER_TYPE_P (etype))
4330 if (value != 0 && !TREE_OVERFLOW (value))
4332 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4333 return build_range_check (loc, type,
4334 fold_build_pointer_plus_loc (loc, exp, low),
4335 1, build_int_cst (etype, 0), value);
4340 if (value != 0 && !TREE_OVERFLOW (value))
4341 return build_range_check (loc, type,
4342 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4343 1, build_int_cst (etype, 0), value);
4348 /* Return the predecessor of VAL in its type, handling the infinite case. */
4351 range_predecessor (tree val)
4353 tree type = TREE_TYPE (val);
4355 if (INTEGRAL_TYPE_P (type)
4356 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4359 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4360 build_int_cst (TREE_TYPE (val), 1), 0);
4363 /* Return the successor of VAL in its type, handling the infinite case. */
4366 range_successor (tree val)
4368 tree type = TREE_TYPE (val);
4370 if (INTEGRAL_TYPE_P (type)
4371 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4375 build_int_cst (TREE_TYPE (val), 1), 0);
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4382 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4383 tree high0, int in1_p, tree low1, tree high1)
4391 int lowequal = ((low0 == 0 && low1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 low0, 0, low1, 0)));
4394 int highequal = ((high0 == 0 && high1 == 0)
4395 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 high0, 1, high1, 1)));
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4403 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4404 high1, 1, high0, 1))))
4406 temp = in0_p, in0_p = in1_p, in1_p = temp;
4407 tem = low0, low0 = low1, low1 = tem;
4408 tem = high0, high0 = high1, high1 = tem;
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4415 high0, 1, low1, 0));
4416 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 high1, 1, high0, 1));
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4427 in_p = 0, low = high = 0;
4429 in_p = 1, low = low1, high = high1;
4431 in_p = 1, low = low1, high = high0;
4434 else if (in0_p && ! in1_p)
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4445 in_p = 1, low = low0, high = high0;
4446 else if (lowequal && highequal)
4447 in_p = 0, low = high = 0;
4448 else if (subset && lowequal)
4450 low = range_successor (high1);
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4460 else if (! subset || highequal)
4463 high = range_predecessor (low1);
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 else if (! in0_p && in1_p)
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4482 in_p = 1, low = low1, high = high1;
4483 else if (subset || highequal)
4484 in_p = 0, low = high = 0;
4487 low = range_successor (high0);
4492 /* high1 > high0 but high0 has no successor. Punt. */
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4508 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4509 range_successor (high0),
4511 in_p = 0, low = low0, high = high1;
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4516 switch (TREE_CODE (TREE_TYPE (low0)))
4519 if (TYPE_PRECISION (TREE_TYPE (low0))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4524 if (tree_int_cst_equal (low0,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4530 && integer_zerop (low0))
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4539 switch (TREE_CODE (TREE_TYPE (high1)))
4542 if (TYPE_PRECISION (TREE_TYPE (high1))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4547 if (tree_int_cst_equal (high1,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4553 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555 build_int_cst (TREE_TYPE (high1), 1),
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0 == 0 && high1 == 0)
4569 low = range_successor (high0);
4570 high = range_predecessor (low1);
4571 if (low == 0 || high == 0)
4581 in_p = 0, low = low0, high = high0;
4583 in_p = 0, low = low0, high = high1;
4586 *pin_p = in_p, *plow = low, *phigh = high;
4591 /* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4600 fold_cond_expr_with_comparison (location_t loc, tree type,
4601 tree arg0, tree arg1, tree arg2)
4603 enum tree_code comp_code = TREE_CODE (arg0);
4604 tree arg00 = TREE_OPERAND (arg0, 0);
4605 tree arg01 = TREE_OPERAND (arg0, 1);
4606 tree arg1_type = TREE_TYPE (arg1);
4612 /* If we have A op 0 ? A : -A, consider applying the following
4615 A == 0? A : -A same as -A
4616 A != 0? A : -A same as A
4617 A >= 0? A : -A same as abs (A)
4618 A > 0? A : -A same as abs (A)
4619 A <= 0? A : -A same as -abs (A)
4620 A < 0? A : -A same as -abs (A)
4622 None of these transformations work for modes with signed
4623 zeros. If A is +/-0, the first two transformations will
4624 change the sign of the result (from +0 to -0, or vice
4625 versa). The last four will fix the sign of the result,
4626 even though the original expressions could be positive or
4627 negative, depending on the sign of A.
4629 Note that all these transformations are correct if A is
4630 NaN, since the two alternatives (A and -A) are also NaNs. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4633 ? real_zerop (arg01)
4634 : integer_zerop (arg01))
4635 && ((TREE_CODE (arg2) == NEGATE_EXPR
4636 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4637 /* In the case that A is of the form X-Y, '-A' (arg2) may
4638 have already been folded to Y-X, check for that. */
4639 || (TREE_CODE (arg1) == MINUS_EXPR
4640 && TREE_CODE (arg2) == MINUS_EXPR
4641 && operand_equal_p (TREE_OPERAND (arg1, 0),
4642 TREE_OPERAND (arg2, 1), 0)
4643 && operand_equal_p (TREE_OPERAND (arg1, 1),
4644 TREE_OPERAND (arg2, 0), 0))))
4649 tem = fold_convert_loc (loc, arg1_type, arg1);
4650 return pedantic_non_lvalue_loc (loc,
4651 fold_convert_loc (loc, type,
4652 negate_expr (tem)));
4655 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4658 if (flag_trapping_math)
4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4664 arg1 = fold_convert_loc (loc, signed_type_for
4665 (TREE_TYPE (arg1)), arg1);
4666 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4670 if (flag_trapping_math)
4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4675 arg1 = fold_convert_loc (loc, signed_type_for
4676 (TREE_TYPE (arg1)), arg1);
4677 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4678 return negate_expr (fold_convert_loc (loc, type, tem));
4680 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4686 both transformations are correct when A is NaN: A != 0
4687 is then true, and A == 0 is false. */
4689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4690 && integer_zerop (arg01) && integer_zerop (arg2))
4692 if (comp_code == NE_EXPR)
4693 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4694 else if (comp_code == EQ_EXPR)
4695 return build_zero_cst (type);
4698 /* Try some transformations of A op B ? A : B.
4700 A == B? A : B same as B
4701 A != B? A : B same as A
4702 A >= B? A : B same as max (A, B)
4703 A > B? A : B same as max (B, A)
4704 A <= B? A : B same as min (A, B)
4705 A < B? A : B same as min (B, A)
4707 As above, these transformations don't work in the presence
4708 of signed zeros. For example, if A and B are zeros of
4709 opposite sign, the first two transformations will change
4710 the sign of the result. In the last four, the original
4711 expressions give different results for (A=+0, B=-0) and
4712 (A=-0, B=+0), but the transformed expressions do not.
4714 The first two transformations are correct if either A or B
4715 is a NaN. In the first transformation, the condition will
4716 be false, and B will indeed be chosen. In the case of the
4717 second transformation, the condition A != B will be true,
4718 and A will be chosen.
4720 The conversions to max() and min() are not correct if B is
4721 a number and A is not. The conditions in the original
4722 expressions will be false, so all four give B. The min()
4723 and max() versions would give a NaN instead. */
4724 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4725 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4726 /* Avoid these transformations if the COND_EXPR may be used
4727 as an lvalue in the C++ front-end. PR c++/19199. */
4729 || VECTOR_TYPE_P (type)
4730 || (strcmp (lang_hooks.name, "GNU C++") != 0
4731 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4732 || ! maybe_lvalue_p (arg1)
4733 || ! maybe_lvalue_p (arg2)))
4735 tree comp_op0 = arg00;
4736 tree comp_op1 = arg01;
4737 tree comp_type = TREE_TYPE (comp_op0);
4739 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4740 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4750 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4752 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4757 /* In C++ a ?: expression can be an lvalue, so put the
4758 operand which will be used if they are equal first
4759 so that we can convert this back to the
4760 corresponding COND_EXPR. */
4761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4763 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4764 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4765 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4766 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4767 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4768 comp_op1, comp_op0);
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4780 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4781 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4782 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4783 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4784 comp_op1, comp_op0);
4785 return pedantic_non_lvalue_loc (loc,
4786 fold_convert_loc (loc, type, tem));
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, arg2));
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, arg1));
4800 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4805 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806 we might still be able to simplify this. For example,
4807 if C1 is one less or one more than C2, this might have started
4808 out as a MIN or MAX and been transformed by this function.
4809 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4811 if (INTEGRAL_TYPE_P (type)
4812 && TREE_CODE (arg01) == INTEGER_CST
4813 && TREE_CODE (arg2) == INTEGER_CST)
4817 if (TREE_CODE (arg1) == INTEGER_CST)
4819 /* We can replace A with C1 in this case. */
4820 arg1 = fold_convert_loc (loc, type, arg01);
4821 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4824 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 MIN_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4828 && operand_equal_p (arg01,
4829 const_binop (PLUS_EXPR, arg2,
4830 build_int_cst (type, 1)),
4833 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4834 fold_convert_loc (loc, TREE_TYPE (arg00),
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4842 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4844 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4846 && operand_equal_p (arg01,
4847 const_binop (MINUS_EXPR, arg2,
4848 build_int_cst (type, 1)),
4851 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 fold_convert_loc (loc, TREE_TYPE (arg00),
4854 return pedantic_non_lvalue_loc (loc,
4855 fold_convert_loc (loc, type, tem));
4860 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 MAX_EXPR, to preserve the signedness of the comparison. */
4862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4864 && operand_equal_p (arg01,
4865 const_binop (MINUS_EXPR, arg2,
4866 build_int_cst (type, 1)),
4869 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4870 fold_convert_loc (loc, TREE_TYPE (arg00),
4872 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4877 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4878 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4880 && operand_equal_p (arg01,
4881 const_binop (PLUS_EXPR, arg2,
4882 build_int_cst (type, 1)),
4885 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4886 fold_convert_loc (loc, TREE_TYPE (arg00),
4888 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4908 /* EXP is some logical combination of boolean tests. See if we can
4909 merge it into some range test. Return the new tree if so. */
4912 fold_range_test (location_t loc, enum tree_code code, tree type,
4915 int or_op = (code == TRUTH_ORIF_EXPR
4916 || code == TRUTH_OR_EXPR);
4917 int in0_p, in1_p, in_p;
4918 tree low0, low1, low, high0, high1, high;
4919 bool strict_overflow_p = false;
4921 const char * const warnmsg = G_("assuming signed overflow does not occur "
4922 "when simplifying range test");
4924 if (!INTEGRAL_TYPE_P (type))
4927 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4928 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4930 /* If this is an OR operation, invert both sides; we will invert
4931 again at the end. */
4933 in0_p = ! in0_p, in1_p = ! in1_p;
4935 /* If both expressions are the same, if we can merge the ranges, and we
4936 can build the range test, return it or it inverted. If one of the
4937 ranges is always true or always false, consider it to be the same
4938 expression as the other. */
4939 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4940 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4942 && 0 != (tem = (build_range_check (loc, type,
4944 : rhs != 0 ? rhs : integer_zero_node,
4947 if (strict_overflow_p)
4948 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4949 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4952 /* On machines where the branch cost is expensive, if this is a
4953 short-circuited branch and the underlying object on both sides
4954 is the same, make a non-short-circuit operation. */
4955 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4956 && lhs != 0 && rhs != 0
4957 && (code == TRUTH_ANDIF_EXPR
4958 || code == TRUTH_ORIF_EXPR)
4959 && operand_equal_p (lhs, rhs, 0))
4961 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4962 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4963 which cases we can't do this. */
4964 if (simple_operand_p (lhs))
4965 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4966 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4969 else if (!lang_hooks.decls.global_bindings_p ()
4970 && !CONTAINS_PLACEHOLDER_P (lhs))
4972 tree common = save_expr (lhs);
4974 if (0 != (lhs = build_range_check (loc, type, common,
4975 or_op ? ! in0_p : in0_p,
4977 && (0 != (rhs = build_range_check (loc, type, common,
4978 or_op ? ! in1_p : in1_p,
4981 if (strict_overflow_p)
4982 fold_overflow_warning (warnmsg,
4983 WARN_STRICT_OVERFLOW_COMPARISON);
4984 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4985 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4995 bit value. Arrange things so the extra bits will be set to zero if and
4996 only if C is signed-extended to its full width. If MASK is nonzero,
4997 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5000 unextend (tree c, int p, int unsignedp, tree mask)
5002 tree type = TREE_TYPE (c);
5003 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5006 if (p == modesize || unsignedp)
5009 /* We work by getting just the sign bit into the low-order bit, then
5010 into the high-order bit, then sign-extend. We then XOR that value
5012 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5014 /* We must use a signed type in order to get an arithmetic right shift.
5015 However, we must also avoid introducing accidental overflows, so that
5016 a subsequent call to integer_zerop will work. Hence we must
5017 do the type conversion here. At this point, the constant is either
5018 zero or one, and the conversion to a signed type can never overflow.
5019 We could get an overflow if this conversion is done anywhere else. */
5020 if (TYPE_UNSIGNED (type))
5021 temp = fold_convert (signed_type_for (type), temp);
5023 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5024 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5026 temp = const_binop (BIT_AND_EXPR, temp,
5027 fold_convert (TREE_TYPE (c), mask));
5028 /* If necessary, convert the type back to match the type of C. */
5029 if (TYPE_UNSIGNED (type))
5030 temp = fold_convert (type, temp);
5032 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5035 /* For an expression that has the form
5039 we can drop one of the inner expressions and simplify to
5043 LOC is the location of the resulting expression. OP is the inner
5044 logical operation; the left-hand side in the examples above, while CMPOP
5045 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5046 removing a condition that guards another, as in
5047 (A != NULL && A->...) || A == NULL
5048 which we must not transform. If RHS_ONLY is true, only eliminate the
5049 right-most operand of the inner logical operation. */
5052 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5055 tree type = TREE_TYPE (cmpop);
5056 enum tree_code code = TREE_CODE (cmpop);
5057 enum tree_code truthop_code = TREE_CODE (op);
5058 tree lhs = TREE_OPERAND (op, 0);
5059 tree rhs = TREE_OPERAND (op, 1);
5060 tree orig_lhs = lhs, orig_rhs = rhs;
5061 enum tree_code rhs_code = TREE_CODE (rhs);
5062 enum tree_code lhs_code = TREE_CODE (lhs);
5063 enum tree_code inv_code;
5065 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5068 if (TREE_CODE_CLASS (code) != tcc_comparison)
5071 if (rhs_code == truthop_code)
5073 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5074 if (newrhs != NULL_TREE)
5077 rhs_code = TREE_CODE (rhs);
5080 if (lhs_code == truthop_code && !rhs_only)
5082 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5083 if (newlhs != NULL_TREE)
5086 lhs_code = TREE_CODE (lhs);
5090 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5091 if (inv_code == rhs_code
5092 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 if (!rhs_only && inv_code == lhs_code
5096 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5099 if (rhs != orig_rhs || lhs != orig_lhs)
5100 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5105 /* Find ways of folding logical expressions of LHS and RHS:
5106 Try to merge two comparisons to the same innermost item.
5107 Look for range tests like "ch >= '0' && ch <= '9'".
5108 Look for combinations of simple terms on machines with expensive branches
5109 and evaluate the RHS unconditionally.
5111 For example, if we have p->a == 2 && p->b == 4 and we can make an
5112 object large enough to span both A and B, we can do this with a comparison
5113 against the object ANDed with the a mask.
5115 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5116 operations to do this with one comparison.
5118 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5119 function and the one above.
5121 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5122 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5124 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5127 We return the simplified tree or 0 if no optimization is possible. */
5130 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5133 /* If this is the "or" of two comparisons, we can do something if
5134 the comparisons are NE_EXPR. If this is the "and", we can do something
5135 if the comparisons are EQ_EXPR. I.e.,
5136 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5138 WANTED_CODE is this operation code. For single bit fields, we can
5139 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5140 comparison for one-bit fields. */
5142 enum tree_code wanted_code;
5143 enum tree_code lcode, rcode;
5144 tree ll_arg, lr_arg, rl_arg, rr_arg;
5145 tree ll_inner, lr_inner, rl_inner, rr_inner;
5146 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5147 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5148 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5149 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5150 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5151 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5152 machine_mode lnmode, rnmode;
5153 tree ll_mask, lr_mask, rl_mask, rr_mask;
5154 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5155 tree l_const, r_const;
5156 tree lntype, rntype, result;
5157 HOST_WIDE_INT first_bit, end_bit;
5160 /* Start by getting the comparison codes. Fail if anything is volatile.
5161 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5162 it were surrounded with a NE_EXPR. */
5164 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5167 lcode = TREE_CODE (lhs);
5168 rcode = TREE_CODE (rhs);
5170 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5172 lhs = build2 (NE_EXPR, truth_type, lhs,
5173 build_int_cst (TREE_TYPE (lhs), 0));
5177 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5179 rhs = build2 (NE_EXPR, truth_type, rhs,
5180 build_int_cst (TREE_TYPE (rhs), 0));
5184 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5185 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5188 ll_arg = TREE_OPERAND (lhs, 0);
5189 lr_arg = TREE_OPERAND (lhs, 1);
5190 rl_arg = TREE_OPERAND (rhs, 0);
5191 rr_arg = TREE_OPERAND (rhs, 1);
5193 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5194 if (simple_operand_p (ll_arg)
5195 && simple_operand_p (lr_arg))
5197 if (operand_equal_p (ll_arg, rl_arg, 0)
5198 && operand_equal_p (lr_arg, rr_arg, 0))
5200 result = combine_comparisons (loc, code, lcode, rcode,
5201 truth_type, ll_arg, lr_arg);
5205 else if (operand_equal_p (ll_arg, rr_arg, 0)
5206 && operand_equal_p (lr_arg, rl_arg, 0))
5208 result = combine_comparisons (loc, code, lcode,
5209 swap_tree_comparison (rcode),
5210 truth_type, ll_arg, lr_arg);
5216 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5217 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5219 /* If the RHS can be evaluated unconditionally and its operands are
5220 simple, it wins to evaluate the RHS unconditionally on machines
5221 with expensive branches. In this case, this isn't a comparison
5222 that can be merged. */
5224 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5226 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5227 && simple_operand_p (rl_arg)
5228 && simple_operand_p (rr_arg))
5230 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5231 if (code == TRUTH_OR_EXPR
5232 && lcode == NE_EXPR && integer_zerop (lr_arg)
5233 && rcode == NE_EXPR && integer_zerop (rr_arg)
5234 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5235 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5236 return build2_loc (loc, NE_EXPR, truth_type,
5237 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5239 build_int_cst (TREE_TYPE (ll_arg), 0));
5241 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5242 if (code == TRUTH_AND_EXPR
5243 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5244 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, EQ_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5253 /* See if the comparisons can be merged. Then get all the parameters for
5256 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5257 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5261 ll_inner = decode_field_reference (loc, ll_arg,
5262 &ll_bitsize, &ll_bitpos, &ll_mode,
5263 &ll_unsignedp, &volatilep, &ll_mask,
5265 lr_inner = decode_field_reference (loc, lr_arg,
5266 &lr_bitsize, &lr_bitpos, &lr_mode,
5267 &lr_unsignedp, &volatilep, &lr_mask,
5269 rl_inner = decode_field_reference (loc, rl_arg,
5270 &rl_bitsize, &rl_bitpos, &rl_mode,
5271 &rl_unsignedp, &volatilep, &rl_mask,
5273 rr_inner = decode_field_reference (loc, rr_arg,
5274 &rr_bitsize, &rr_bitpos, &rr_mode,
5275 &rr_unsignedp, &volatilep, &rr_mask,
5278 /* It must be true that the inner operation on the lhs of each
5279 comparison must be the same if we are to be able to do anything.
5280 Then see if we have constants. If not, the same must be true for
5282 if (volatilep || ll_inner == 0 || rl_inner == 0
5283 || ! operand_equal_p (ll_inner, rl_inner, 0))
5286 if (TREE_CODE (lr_arg) == INTEGER_CST
5287 && TREE_CODE (rr_arg) == INTEGER_CST)
5288 l_const = lr_arg, r_const = rr_arg;
5289 else if (lr_inner == 0 || rr_inner == 0
5290 || ! operand_equal_p (lr_inner, rr_inner, 0))
5293 l_const = r_const = 0;
5295 /* If either comparison code is not correct for our logical operation,
5296 fail. However, we can convert a one-bit comparison against zero into
5297 the opposite comparison against that bit being set in the field. */
5299 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5300 if (lcode != wanted_code)
5302 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5304 /* Make the left operand unsigned, since we are only interested
5305 in the value of one bit. Otherwise we are doing the wrong
5314 /* This is analogous to the code for l_const above. */
5315 if (rcode != wanted_code)
5317 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 /* See if we can find a mode that contains both fields being compared on
5327 the left. If we can't, fail. Otherwise, update all constants and masks
5328 to be relative to a field of that size. */
5329 first_bit = MIN (ll_bitpos, rl_bitpos);
5330 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5331 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5332 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5334 if (lnmode == VOIDmode)
5337 lnbitsize = GET_MODE_BITSIZE (lnmode);
5338 lnbitpos = first_bit & ~ (lnbitsize - 1);
5339 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5340 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5342 if (BYTES_BIG_ENDIAN)
5344 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5345 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5348 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5349 size_int (xll_bitpos));
5350 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5351 size_int (xrl_bitpos));
5355 l_const = fold_convert_loc (loc, lntype, l_const);
5356 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5357 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5358 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5359 fold_build1_loc (loc, BIT_NOT_EXPR,
5362 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5369 r_const = fold_convert_loc (loc, lntype, r_const);
5370 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5371 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5372 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5373 fold_build1_loc (loc, BIT_NOT_EXPR,
5376 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5378 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5382 /* If the right sides are not constant, do the same for it. Also,
5383 disallow this optimization if a size or signedness mismatch occurs
5384 between the left and right sides. */
5387 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5388 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5389 /* Make sure the two fields on the right
5390 correspond to the left without being swapped. */
5391 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5394 first_bit = MIN (lr_bitpos, rr_bitpos);
5395 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5396 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5397 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5399 if (rnmode == VOIDmode)
5402 rnbitsize = GET_MODE_BITSIZE (rnmode);
5403 rnbitpos = first_bit & ~ (rnbitsize - 1);
5404 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5405 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5407 if (BYTES_BIG_ENDIAN)
5409 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5410 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5413 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 size_int (xlr_bitpos));
5416 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 size_int (xrr_bitpos));
5420 /* Make a mask that corresponds to both fields being compared.
5421 Do this for both items being compared. If the operands are the
5422 same size and the bits being compared are in the same position
5423 then we can do this by masking both and comparing the masked
5425 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5426 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5427 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5429 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5430 ll_unsignedp || rl_unsignedp);
5431 if (! all_ones_mask_p (ll_mask, lnbitsize))
5432 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5434 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5435 lr_unsignedp || rr_unsignedp);
5436 if (! all_ones_mask_p (lr_mask, rnbitsize))
5437 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5439 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5442 /* There is still another way we can do something: If both pairs of
5443 fields being compared are adjacent, we may be able to make a wider
5444 field containing them both.
5446 Note that we still must mask the lhs/rhs expressions. Furthermore,
5447 the mask must be shifted to account for the shift done by
5448 make_bit_field_ref. */
5449 if ((ll_bitsize + ll_bitpos == rl_bitpos
5450 && lr_bitsize + lr_bitpos == rr_bitpos)
5451 || (ll_bitpos == rl_bitpos + rl_bitsize
5452 && lr_bitpos == rr_bitpos + rr_bitsize))
5456 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5457 ll_bitsize + rl_bitsize,
5458 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5459 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5460 lr_bitsize + rr_bitsize,
5461 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5463 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5464 size_int (MIN (xll_bitpos, xrl_bitpos)));
5465 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5466 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5468 /* Convert to the smaller type before masking out unwanted bits. */
5470 if (lntype != rntype)
5472 if (lnbitsize > rnbitsize)
5474 lhs = fold_convert_loc (loc, rntype, lhs);
5475 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5478 else if (lnbitsize < rnbitsize)
5480 rhs = fold_convert_loc (loc, lntype, rhs);
5481 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5486 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5487 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5489 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5490 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5492 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5498 /* Handle the case of comparisons with constants. If there is something in
5499 common between the masks, those bits of the constants must be the same.
5500 If not, the condition is always false. Test for this to avoid generating
5501 incorrect code below. */
5502 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5503 if (! integer_zerop (result)
5504 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5505 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5507 if (wanted_code == NE_EXPR)
5509 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5510 return constant_boolean_node (true, truth_type);
5514 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5515 return constant_boolean_node (false, truth_type);
5519 /* Construct the expression we will return. First get the component
5520 reference we will make. Unless the mask is all ones the width of
5521 that field, perform the mask operation. Then compare with the
5523 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5524 ll_unsignedp || rl_unsignedp);
5526 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5527 if (! all_ones_mask_p (ll_mask, lnbitsize))
5528 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5530 return build2_loc (loc, wanted_code, truth_type, result,
5531 const_binop (BIT_IOR_EXPR, l_const, r_const));
5534 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5538 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5542 enum tree_code op_code;
5545 int consts_equal, consts_lt;
5548 STRIP_SIGN_NOPS (arg0);
5550 op_code = TREE_CODE (arg0);
5551 minmax_const = TREE_OPERAND (arg0, 1);
5552 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5553 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5554 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5555 inner = TREE_OPERAND (arg0, 0);
5557 /* If something does not permit us to optimize, return the original tree. */
5558 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5559 || TREE_CODE (comp_const) != INTEGER_CST
5560 || TREE_OVERFLOW (comp_const)
5561 || TREE_CODE (minmax_const) != INTEGER_CST
5562 || TREE_OVERFLOW (minmax_const))
5565 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5566 and GT_EXPR, doing the rest with recursive calls using logical
5570 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5573 = optimize_minmax_comparison (loc,
5574 invert_tree_comparison (code, false),
5577 return invert_truthvalue_loc (loc, tem);
5583 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5584 optimize_minmax_comparison
5585 (loc, EQ_EXPR, type, arg0, comp_const),
5586 optimize_minmax_comparison
5587 (loc, GT_EXPR, type, arg0, comp_const));
5590 if (op_code == MAX_EXPR && consts_equal)
5591 /* MAX (X, 0) == 0 -> X <= 0 */
5592 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594 else if (op_code == MAX_EXPR && consts_lt)
5595 /* MAX (X, 0) == 5 -> X == 5 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) == -1 -> false */
5600 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602 else if (consts_equal)
5603 /* MIN (X, 0) == 0 -> X >= 0 */
5604 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5607 /* MIN (X, 0) == 5 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5611 /* MIN (X, 0) == -1 -> X == -1 */
5612 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5615 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5616 /* MAX (X, 0) > 0 -> X > 0
5617 MAX (X, 0) > 5 -> X > 5 */
5618 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620 else if (op_code == MAX_EXPR)
5621 /* MAX (X, 0) > -1 -> true */
5622 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5625 /* MIN (X, 0) > 0 -> false
5626 MIN (X, 0) > 5 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5630 /* MIN (X, 0) > -1 -> X > -1 */
5631 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5638 /* T is an integer expression that is being multiplied, divided, or taken a
5639 modulus (CODE says which and what kind of divide or modulus) by a
5640 constant C. See if we can eliminate that operation by folding it with
5641 other operations already in T. WIDE_TYPE, if non-null, is a type that
5642 should be used for the computation if wider than our type.
5644 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5645 (X * 2) + (Y * 4). We must, however, be assured that either the original
5646 expression would not overflow or that overflow is undefined for the type
5647 in the language in question.
5649 If we return a non-null expression, it is an equivalent form of the
5650 original computation, but need not be in the original type.
5652 We set *STRICT_OVERFLOW_P to true if the return values depends on
5653 signed overflow being undefined. Otherwise we do not change
5654 *STRICT_OVERFLOW_P. */
5657 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 /* To avoid exponential search depth, refuse to allow recursion past
5661 three levels. Beyond that (1) it's highly unlikely that we'll find
5662 something interesting and (2) we've probably processed it before
5663 when we built the inner expression. */
5672 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5679 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5680 bool *strict_overflow_p)
5682 tree type = TREE_TYPE (t);
5683 enum tree_code tcode = TREE_CODE (t);
5684 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5685 > GET_MODE_SIZE (TYPE_MODE (type)))
5686 ? wide_type : type);
5688 int same_p = tcode == code;
5689 tree op0 = NULL_TREE, op1 = NULL_TREE;
5690 bool sub_strict_overflow_p;
5692 /* Don't deal with constants of zero here; they confuse the code below. */
5693 if (integer_zerop (c))
5696 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5697 op0 = TREE_OPERAND (t, 0);
5699 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5700 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702 /* Note that we need not handle conditional operations here since fold
5703 already handles those cases. So just do arithmetic here. */
5707 /* For a constant, we can always simplify if we are a multiply
5708 or (for divide and modulus) if it is a multiple of our constant. */
5709 if (code == MULT_EXPR
5710 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5711 return const_binop (code, fold_convert (ctype, t),
5712 fold_convert (ctype, c));
5715 CASE_CONVERT: case NON_LVALUE_EXPR:
5716 /* If op0 is an expression ... */
5717 if ((COMPARISON_CLASS_P (op0)
5718 || UNARY_CLASS_P (op0)
5719 || BINARY_CLASS_P (op0)
5720 || VL_EXP_CLASS_P (op0)
5721 || EXPRESSION_CLASS_P (op0))
5722 /* ... and has wrapping overflow, and its type is smaller
5723 than ctype, then we cannot pass through as widening. */
5724 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5725 && (TYPE_PRECISION (ctype)
5726 > TYPE_PRECISION (TREE_TYPE (op0))))
5727 /* ... or this is a truncation (t is narrower than op0),
5728 then we cannot pass through this narrowing. */
5729 || (TYPE_PRECISION (type)
5730 < TYPE_PRECISION (TREE_TYPE (op0)))
5731 /* ... or signedness changes for division or modulus,
5732 then we cannot pass through this conversion. */
5733 || (code != MULT_EXPR
5734 && (TYPE_UNSIGNED (ctype)
5735 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5736 /* ... or has undefined overflow while the converted to
5737 type has not, we cannot do the operation in the inner type
5738 as that would introduce undefined overflow. */
5739 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5740 && !TYPE_OVERFLOW_UNDEFINED (type))))
5743 /* Pass the constant down and see if we can make a simplification. If
5744 we can, replace this expression with the inner simplification for
5745 possible later conversion to our or some other type. */
5746 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5747 && TREE_CODE (t2) == INTEGER_CST
5748 && !TREE_OVERFLOW (t2)
5749 && (0 != (t1 = extract_muldiv (op0, t2, code,
5751 ? ctype : NULL_TREE,
5752 strict_overflow_p))))
5757 /* If widening the type changes it from signed to unsigned, then we
5758 must avoid building ABS_EXPR itself as unsigned. */
5759 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5761 tree cstype = (*signed_type_for) (ctype);
5762 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5765 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5766 return fold_convert (ctype, t1);
5770 /* If the constant is negative, we cannot simplify this. */
5771 if (tree_int_cst_sgn (c) == -1)
5775 /* For division and modulus, type can't be unsigned, as e.g.
5776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5777 For signed types, even with wrapping overflow, this is fine. */
5778 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5780 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5782 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5785 case MIN_EXPR: case MAX_EXPR:
5786 /* If widening the type changes the signedness, then we can't perform
5787 this optimization as that changes the result. */
5788 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5792 sub_strict_overflow_p = false;
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5794 &sub_strict_overflow_p)) != 0
5795 && (t2 = extract_muldiv (op1, c, code, wide_type,
5796 &sub_strict_overflow_p)) != 0)
5798 if (tree_int_cst_sgn (c) < 0)
5799 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5800 if (sub_strict_overflow_p)
5801 *strict_overflow_p = true;
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5803 fold_convert (ctype, t2));
5807 case LSHIFT_EXPR: case RSHIFT_EXPR:
5808 /* If the second operand is constant, this is a multiplication
5809 or floor division, by a power of two, so we can treat it that
5810 way unless the multiplier or divisor overflows. Signed
5811 left-shift overflow is implementation-defined rather than
5812 undefined in C90, so do not convert signed left shift into
5814 if (TREE_CODE (op1) == INTEGER_CST
5815 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5816 /* const_binop may not detect overflow correctly,
5817 so check for it explicitly here. */
5818 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5819 && 0 != (t1 = fold_convert (ctype,
5820 const_binop (LSHIFT_EXPR,
5823 && !TREE_OVERFLOW (t1))
5824 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5825 ? MULT_EXPR : FLOOR_DIV_EXPR,
5827 fold_convert (ctype, op0),
5829 c, code, wide_type, strict_overflow_p);
5832 case PLUS_EXPR: case MINUS_EXPR:
5833 /* See if we can eliminate the operation on both sides. If we can, we
5834 can return a new PLUS or MINUS. If we can't, the only remaining
5835 cases where we can do anything are if the second operand is a
5837 sub_strict_overflow_p = false;
5838 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5839 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5840 if (t1 != 0 && t2 != 0
5841 && (code == MULT_EXPR
5842 /* If not multiplication, we can only do this if both operands
5843 are divisible by c. */
5844 || (multiple_of_p (ctype, op0, c)
5845 && multiple_of_p (ctype, op1, c))))
5847 if (sub_strict_overflow_p)
5848 *strict_overflow_p = true;
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, t2));
5853 /* If this was a subtraction, negate OP1 and set it to be an addition.
5854 This simplifies the logic below. */
5855 if (tcode == MINUS_EXPR)
5857 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5858 /* If OP1 was not easily negatable, the constant may be OP0. */
5859 if (TREE_CODE (op0) == INTEGER_CST)
5870 if (TREE_CODE (op1) != INTEGER_CST)
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 if (code == CEIL_DIV_EXPR)
5879 code = FLOOR_DIV_EXPR;
5880 else if (code == FLOOR_DIV_EXPR)
5881 code = CEIL_DIV_EXPR;
5882 else if (code != MULT_EXPR
5883 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code == MULT_EXPR
5890 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5892 op1 = const_binop (code, fold_convert (ctype, op1),
5893 fold_convert (ctype, c));
5894 /* We allow the constant to overflow with wrapping semantics. */
5896 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5902 /* If we have an unsigned type, we cannot widen the operation since it
5903 will change the result if the original computation overflowed. */
5904 if (TYPE_UNSIGNED (ctype) && ctype != type)
5907 /* If we were able to eliminate our operation from the first side,
5908 apply our operation to the second side and reform the PLUS. */
5909 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5910 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5912 /* The last case is if we are a multiply. In that case, we can
5913 apply the distributive law to commute the multiply and addition
5914 if the multiplication of the constants doesn't overflow
5915 and overflow is defined. With undefined overflow
5916 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5917 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5918 return fold_build2 (tcode, ctype,
5919 fold_build2 (code, ctype,
5920 fold_convert (ctype, op0),
5921 fold_convert (ctype, c)),
5927 /* We have a special case here if we are doing something like
5928 (C * 8) % 4 since we know that's zero. */
5929 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5930 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5931 /* If the multiplication can overflow we cannot optimize this. */
5932 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5936 *strict_overflow_p = true;
5937 return omit_one_operand (type, integer_zero_node, op0);
5940 /* ... fall through ... */
5942 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5943 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5944 /* If we can extract our operation from the LHS, do so and return a
5945 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5946 do something only if the second operand is a constant. */
5948 && (t1 = extract_muldiv (op0, c, code, wide_type,
5949 strict_overflow_p)) != 0)
5950 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5951 fold_convert (ctype, op1));
5952 else if (tcode == MULT_EXPR && code == MULT_EXPR
5953 && (t1 = extract_muldiv (op1, c, code, wide_type,
5954 strict_overflow_p)) != 0)
5955 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5956 fold_convert (ctype, t1));
5957 else if (TREE_CODE (op1) != INTEGER_CST)
5960 /* If these are the same operation types, we can associate them
5961 assuming no overflow. */
5964 bool overflow_p = false;
5965 bool overflow_mul_p;
5966 signop sign = TYPE_SIGN (ctype);
5967 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5968 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5970 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5974 wide_int_to_tree (ctype, mul));
5977 /* If these operations "cancel" each other, we have the main
5978 optimizations of this pass, which occur when either constant is a
5979 multiple of the other, in which case we replace this with either an
5980 operation or CODE or TCODE.
5982 If we have an unsigned type, we cannot do this since it will change
5983 the result if the original computation overflowed. */
5984 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5985 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5986 || (tcode == MULT_EXPR
5987 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5988 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5989 && code != MULT_EXPR)))
5991 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5993 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5994 *strict_overflow_p = true;
5995 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5996 fold_convert (ctype,
5997 const_binop (TRUNC_DIV_EXPR,
6000 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6019 /* Return a node which has the indicated constant VALUE (either 0 or
6020 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6021 and is of the indicated TYPE. */
6024 constant_boolean_node (bool value, tree type)
6026 if (type == integer_type_node)
6027 return value ? integer_one_node : integer_zero_node;
6028 else if (type == boolean_type_node)
6029 return value ? boolean_true_node : boolean_false_node;
6030 else if (TREE_CODE (type) == VECTOR_TYPE)
6031 return build_vector_from_val (type,
6032 build_int_cst (TREE_TYPE (type),
6035 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6039 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6040 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6041 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6042 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6043 COND is the first argument to CODE; otherwise (as in the example
6044 given here), it is the second argument. TYPE is the type of the
6045 original expression. Return NULL_TREE if no simplification is
6049 fold_binary_op_with_conditional_arg (location_t loc,
6050 enum tree_code code,
6051 tree type, tree op0, tree op1,
6052 tree cond, tree arg, int cond_first_p)
6054 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6055 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6056 tree test, true_value, false_value;
6057 tree lhs = NULL_TREE;
6058 tree rhs = NULL_TREE;
6059 enum tree_code cond_code = COND_EXPR;
6061 if (TREE_CODE (cond) == COND_EXPR
6062 || TREE_CODE (cond) == VEC_COND_EXPR)
6064 test = TREE_OPERAND (cond, 0);
6065 true_value = TREE_OPERAND (cond, 1);
6066 false_value = TREE_OPERAND (cond, 2);
6067 /* If this operand throws an expression, then it does not make
6068 sense to try to perform a logical or arithmetic operation
6070 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6072 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6077 tree testtype = TREE_TYPE (cond);
6079 true_value = constant_boolean_node (true, testtype);
6080 false_value = constant_boolean_node (false, testtype);
6083 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6084 cond_code = VEC_COND_EXPR;
6086 /* This transformation is only worthwhile if we don't have to wrap ARG
6087 in a SAVE_EXPR and the operation can be simplified without recursing
6088 on at least one of the branches once its pushed inside the COND_EXPR. */
6089 if (!TREE_CONSTANT (arg)
6090 && (TREE_SIDE_EFFECTS (arg)
6091 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6092 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6095 arg = fold_convert_loc (loc, arg_type, arg);
6098 true_value = fold_convert_loc (loc, cond_type, true_value);
6100 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6102 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6106 false_value = fold_convert_loc (loc, cond_type, false_value);
6108 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6110 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6113 /* Check that we have simplified at least one of the branches. */
6114 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6117 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6121 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6123 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6124 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6125 ADDEND is the same as X.
6127 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6128 and finite. The problematic cases are when X is zero, and its mode
6129 has signed zeros. In the case of rounding towards -infinity,
6130 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6131 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6134 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6136 if (!real_zerop (addend))
6139 /* Don't allow the fold with -fsignaling-nans. */
6140 if (HONOR_SNANS (TYPE_MODE (type)))
6143 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6144 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6147 /* In a vector or complex, we would need to check the sign of all zeros. */
6148 if (TREE_CODE (addend) != REAL_CST)
6151 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6152 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6155 /* The mode has signed zeros, and we have to honor their sign.
6156 In this situation, there is only one case we can return true for.
6157 X - 0 is the same as X unless rounding towards -infinity is
6159 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6162 /* Subroutine of fold() that checks comparisons of built-in math
6163 functions against real constants.
6165 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6166 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6167 is the type of the result and ARG0 and ARG1 are the operands of the
6168 comparison. ARG1 must be a TREE_REAL_CST.
6170 The function returns the constant folded tree if a simplification
6171 can be made, and NULL_TREE otherwise. */
6174 fold_mathfn_compare (location_t loc,
6175 enum built_in_function fcode, enum tree_code code,
6176 tree type, tree arg0, tree arg1)
6180 if (BUILTIN_SQRT_P (fcode))
6182 tree arg = CALL_EXPR_ARG (arg0, 0);
6183 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6185 c = TREE_REAL_CST (arg1);
6186 if (REAL_VALUE_NEGATIVE (c))
6188 /* sqrt(x) < y is always false, if y is negative. */
6189 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6192 /* sqrt(x) > y is always true, if y is negative and we
6193 don't care about NaNs, i.e. negative values of x. */
6194 if (code == NE_EXPR || !HONOR_NANS (mode))
6195 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6197 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6198 return fold_build2_loc (loc, GE_EXPR, type, arg,
6199 build_real (TREE_TYPE (arg), dconst0));
6201 else if (code == GT_EXPR || code == GE_EXPR)
6205 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6206 real_convert (&c2, mode, &c2);
6208 if (REAL_VALUE_ISINF (c2))
6210 /* sqrt(x) > y is x == +Inf, when y is very large. */
6211 if (HONOR_INFINITIES (mode))
6212 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg), c2));
6215 /* sqrt(x) > y is always false, when y is very large
6216 and we don't care about infinities. */
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6220 /* sqrt(x) > c is the same as x > c*c. */
6221 return fold_build2_loc (loc, code, type, arg,
6222 build_real (TREE_TYPE (arg), c2));
6224 else if (code == LT_EXPR || code == LE_EXPR)
6228 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6229 real_convert (&c2, mode, &c2);
6231 if (REAL_VALUE_ISINF (c2))
6233 /* sqrt(x) < y is always true, when y is a very large
6234 value and we don't care about NaNs or Infinities. */
6235 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6236 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6238 /* sqrt(x) < y is x != +Inf when y is very large and we
6239 don't care about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, NE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < y is x >= 0 when y is very large and we
6245 don't care about Infinities. */
6246 if (! HONOR_INFINITIES (mode))
6247 return fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg), dconst0));
6250 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6251 arg = save_expr (arg);
6252 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6253 fold_build2_loc (loc, GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6256 fold_build2_loc (loc, NE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg),
6261 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6262 if (! HONOR_NANS (mode))
6263 return fold_build2_loc (loc, code, type, arg,
6264 build_real (TREE_TYPE (arg), c2));
6266 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6267 arg = save_expr (arg);
6268 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6269 fold_build2_loc (loc, GE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6272 fold_build2_loc (loc, code, type, arg,
6273 build_real (TREE_TYPE (arg),
6281 /* Subroutine of fold() that optimizes comparisons against Infinities,
6282 either +Inf or -Inf.
6284 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6285 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6286 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6288 The function returns the constant folded tree if a simplification
6289 can be made, and NULL_TREE otherwise. */
6292 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6293 tree arg0, tree arg1)
6296 REAL_VALUE_TYPE max;
6300 mode = TYPE_MODE (TREE_TYPE (arg0));
6302 /* For negative infinity swap the sense of the comparison. */
6303 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6305 code = swap_tree_comparison (code);
6310 /* x > +Inf is always false, if with ignore sNANs. */
6311 if (HONOR_SNANS (mode))
6313 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6316 /* x <= +Inf is always true, if we don't case about NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6320 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6321 arg0 = save_expr (arg0);
6322 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6326 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6327 real_maxval (&max, neg, mode);
6328 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6332 /* x < +Inf is always equal to x <= DBL_MAX. */
6333 real_maxval (&max, neg, mode);
6334 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6335 arg0, build_real (TREE_TYPE (arg0), max));
6338 /* x != +Inf is always equal to !(x > DBL_MAX). */
6339 real_maxval (&max, neg, mode);
6340 if (! HONOR_NANS (mode))
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6345 arg0, build_real (TREE_TYPE (arg0), max));
6346 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6355 /* Subroutine of fold() that optimizes comparisons of a division by
6356 a nonzero integer constant against an integer constant, i.e.
6359 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6360 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6361 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6363 The function returns the constant folded tree if a simplification
6364 can be made, and NULL_TREE otherwise. */
6367 fold_div_compare (location_t loc,
6368 enum tree_code code, tree type, tree arg0, tree arg1)
6370 tree prod, tmp, hi, lo;
6371 tree arg00 = TREE_OPERAND (arg0, 0);
6372 tree arg01 = TREE_OPERAND (arg0, 1);
6373 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6374 bool neg_overflow = false;
6377 /* We have to do this the hard way to detect unsigned overflow.
6378 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6379 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6380 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6381 neg_overflow = false;
6383 if (sign == UNSIGNED)
6385 tmp = int_const_binop (MINUS_EXPR, arg01,
6386 build_int_cst (TREE_TYPE (arg01), 1));
6389 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6390 val = wi::add (prod, tmp, sign, &overflow);
6391 hi = force_fit_type (TREE_TYPE (arg00), val,
6392 -1, overflow | TREE_OVERFLOW (prod));
6394 else if (tree_int_cst_sgn (arg01) >= 0)
6396 tmp = int_const_binop (MINUS_EXPR, arg01,
6397 build_int_cst (TREE_TYPE (arg01), 1));
6398 switch (tree_int_cst_sgn (arg1))
6401 neg_overflow = true;
6402 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6407 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6412 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6422 /* A negative divisor reverses the relational operators. */
6423 code = swap_tree_comparison (code);
6425 tmp = int_const_binop (PLUS_EXPR, arg01,
6426 build_int_cst (TREE_TYPE (arg01), 1));
6427 switch (tree_int_cst_sgn (arg1))
6430 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6435 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6440 neg_overflow = true;
6441 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6453 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6454 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6455 if (TREE_OVERFLOW (hi))
6456 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6457 if (TREE_OVERFLOW (lo))
6458 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6459 return build_range_check (loc, type, arg00, 1, lo, hi);
6462 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6463 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6464 if (TREE_OVERFLOW (hi))
6465 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6466 if (TREE_OVERFLOW (lo))
6467 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6468 return build_range_check (loc, type, arg00, 0, lo, hi);
6471 if (TREE_OVERFLOW (lo))
6473 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6474 return omit_one_operand_loc (loc, type, tmp, arg00);
6476 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6479 if (TREE_OVERFLOW (hi))
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6487 if (TREE_OVERFLOW (hi))
6489 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6495 if (TREE_OVERFLOW (lo))
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6510 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6511 equality/inequality test, then return a simplified form of the test
6512 using a sign testing. Otherwise return NULL. TYPE is the desired
6516 fold_single_bit_test_into_sign_test (location_t loc,
6517 enum tree_code code, tree arg0, tree arg1,
6520 /* If this is testing a single bit, we can optimize the test. */
6521 if ((code == NE_EXPR || code == EQ_EXPR)
6522 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6523 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6525 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6526 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6527 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6529 if (arg00 != NULL_TREE
6530 /* This is only a win if casting to a signed type is cheap,
6531 i.e. when arg00's type is not a partial mode. */
6532 && TYPE_PRECISION (TREE_TYPE (arg00))
6533 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6535 tree stype = signed_type_for (TREE_TYPE (arg00));
6536 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6538 fold_convert_loc (loc, stype, arg00),
6539 build_int_cst (stype, 0));
6546 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6547 equality/inequality test, then return a simplified form of
6548 the test using shifts and logical operations. Otherwise return
6549 NULL. TYPE is the desired result type. */
6552 fold_single_bit_test (location_t loc, enum tree_code code,
6553 tree arg0, tree arg1, tree result_type)
6555 /* If this is testing a single bit, we can optimize the test. */
6556 if ((code == NE_EXPR || code == EQ_EXPR)
6557 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6558 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6560 tree inner = TREE_OPERAND (arg0, 0);
6561 tree type = TREE_TYPE (arg0);
6562 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6563 machine_mode operand_mode = TYPE_MODE (type);
6565 tree signed_type, unsigned_type, intermediate_type;
6568 /* First, see if we can fold the single bit test into a sign-bit
6570 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6575 /* Otherwise we have (A & C) != 0 where C is a single bit,
6576 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6577 Similarly for (A & C) == 0. */
6579 /* If INNER is a right shift of a constant and it plus BITNUM does
6580 not overflow, adjust BITNUM and INNER. */
6581 if (TREE_CODE (inner) == RSHIFT_EXPR
6582 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6583 && bitnum < TYPE_PRECISION (type)
6584 && wi::ltu_p (TREE_OPERAND (inner, 1),
6585 TYPE_PRECISION (type) - bitnum))
6587 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6588 inner = TREE_OPERAND (inner, 0);
6591 /* If we are going to be able to omit the AND below, we must do our
6592 operations as unsigned. If we must use the AND, we have a choice.
6593 Normally unsigned is faster, but for some machines signed is. */
6594 #ifdef LOAD_EXTEND_OP
6595 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6596 && !flag_syntax_only) ? 0 : 1;
6601 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6602 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6603 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6604 inner = fold_convert_loc (loc, intermediate_type, inner);
6607 inner = build2 (RSHIFT_EXPR, intermediate_type,
6608 inner, size_int (bitnum));
6610 one = build_int_cst (intermediate_type, 1);
6612 if (code == EQ_EXPR)
6613 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6615 /* Put the AND last so it can combine with more things. */
6616 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6618 /* Make sure to return the proper type. */
6619 inner = fold_convert_loc (loc, result_type, inner);
6626 /* Check whether we are allowed to reorder operands arg0 and arg1,
6627 such that the evaluation of arg1 occurs before arg0. */
6630 reorder_operands_p (const_tree arg0, const_tree arg1)
6632 if (! flag_evaluation_order)
6634 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6636 return ! TREE_SIDE_EFFECTS (arg0)
6637 && ! TREE_SIDE_EFFECTS (arg1);
6640 /* Test whether it is preferable two swap two operands, ARG0 and
6641 ARG1, for example because ARG0 is an integer constant and ARG1
6642 isn't. If REORDER is true, only recommend swapping if we can
6643 evaluate the operands in reverse order. */
6646 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6648 if (CONSTANT_CLASS_P (arg1))
6650 if (CONSTANT_CLASS_P (arg0))
6653 STRIP_SIGN_NOPS (arg0);
6654 STRIP_SIGN_NOPS (arg1);
6656 if (TREE_CONSTANT (arg1))
6658 if (TREE_CONSTANT (arg0))
6661 if (reorder && flag_evaluation_order
6662 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6665 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6666 for commutative and comparison operators. Ensuring a canonical
6667 form allows the optimizers to find additional redundancies without
6668 having to explicitly check for both orderings. */
6669 if (TREE_CODE (arg0) == SSA_NAME
6670 && TREE_CODE (arg1) == SSA_NAME
6671 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6674 /* Put SSA_NAMEs last. */
6675 if (TREE_CODE (arg1) == SSA_NAME)
6677 if (TREE_CODE (arg0) == SSA_NAME)
6680 /* Put variables last. */
6689 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6690 ARG0 is extended to a wider type. */
6693 fold_widened_comparison (location_t loc, enum tree_code code,
6694 tree type, tree arg0, tree arg1)
6696 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6698 tree shorter_type, outer_type;
6702 if (arg0_unw == arg0)
6704 shorter_type = TREE_TYPE (arg0_unw);
6706 #ifdef HAVE_canonicalize_funcptr_for_compare
6707 /* Disable this optimization if we're casting a function pointer
6708 type on targets that require function pointer canonicalization. */
6709 if (HAVE_canonicalize_funcptr_for_compare
6710 && TREE_CODE (shorter_type) == POINTER_TYPE
6711 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6715 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6718 arg1_unw = get_unwidened (arg1, NULL_TREE);
6720 /* If possible, express the comparison in the shorter mode. */
6721 if ((code == EQ_EXPR || code == NE_EXPR
6722 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6723 && (TREE_TYPE (arg1_unw) == shorter_type
6724 || ((TYPE_PRECISION (shorter_type)
6725 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6726 && (TYPE_UNSIGNED (shorter_type)
6727 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6728 || (TREE_CODE (arg1_unw) == INTEGER_CST
6729 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6730 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6731 && int_fits_type_p (arg1_unw, shorter_type))))
6732 return fold_build2_loc (loc, code, type, arg0_unw,
6733 fold_convert_loc (loc, shorter_type, arg1_unw));
6735 if (TREE_CODE (arg1_unw) != INTEGER_CST
6736 || TREE_CODE (shorter_type) != INTEGER_TYPE
6737 || !int_fits_type_p (arg1_unw, shorter_type))
6740 /* If we are comparing with the integer that does not fit into the range
6741 of the shorter type, the result is known. */
6742 outer_type = TREE_TYPE (arg1_unw);
6743 min = lower_bound_in_type (outer_type, shorter_type);
6744 max = upper_bound_in_type (outer_type, shorter_type);
6746 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6748 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6760 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6766 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6773 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6775 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6785 ARG0 just the signedness is changed. */
6788 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6789 tree arg0, tree arg1)
6792 tree inner_type, outer_type;
6794 if (!CONVERT_EXPR_P (arg0))
6797 outer_type = TREE_TYPE (arg0);
6798 arg0_inner = TREE_OPERAND (arg0, 0);
6799 inner_type = TREE_TYPE (arg0_inner);
6801 #ifdef HAVE_canonicalize_funcptr_for_compare
6802 /* Disable this optimization if we're casting a function pointer
6803 type on targets that require function pointer canonicalization. */
6804 if (HAVE_canonicalize_funcptr_for_compare
6805 && TREE_CODE (inner_type) == POINTER_TYPE
6806 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6810 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6813 if (TREE_CODE (arg1) != INTEGER_CST
6814 && !(CONVERT_EXPR_P (arg1)
6815 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6818 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6823 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6826 if (TREE_CODE (arg1) == INTEGER_CST)
6827 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6828 TREE_OVERFLOW (arg1));
6830 arg1 = fold_convert_loc (loc, inner_type, arg1);
6832 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6836 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6837 means A >= Y && A != MAX, but in this case we know that
6838 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6841 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6843 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6845 if (TREE_CODE (bound) == LT_EXPR)
6846 a = TREE_OPERAND (bound, 0);
6847 else if (TREE_CODE (bound) == GT_EXPR)
6848 a = TREE_OPERAND (bound, 1);
6852 typea = TREE_TYPE (a);
6853 if (!INTEGRAL_TYPE_P (typea)
6854 && !POINTER_TYPE_P (typea))
6857 if (TREE_CODE (ineq) == LT_EXPR)
6859 a1 = TREE_OPERAND (ineq, 1);
6860 y = TREE_OPERAND (ineq, 0);
6862 else if (TREE_CODE (ineq) == GT_EXPR)
6864 a1 = TREE_OPERAND (ineq, 0);
6865 y = TREE_OPERAND (ineq, 1);
6870 if (TREE_TYPE (a1) != typea)
6873 if (POINTER_TYPE_P (typea))
6875 /* Convert the pointer types into integer before taking the difference. */
6876 tree ta = fold_convert_loc (loc, ssizetype, a);
6877 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6878 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6881 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6883 if (!diff || !integer_onep (diff))
6886 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6889 /* Fold a sum or difference of at least one multiplication.
6890 Returns the folded tree or NULL if no simplification could be made. */
6893 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6894 tree arg0, tree arg1)
6896 tree arg00, arg01, arg10, arg11;
6897 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6899 /* (A * C) +- (B * C) -> (A+-B) * C.
6900 (A * C) +- A -> A * (C+-1).
6901 We are most concerned about the case where C is a constant,
6902 but other combinations show up during loop reduction. Since
6903 it is not difficult, try all four possibilities. */
6905 if (TREE_CODE (arg0) == MULT_EXPR)
6907 arg00 = TREE_OPERAND (arg0, 0);
6908 arg01 = TREE_OPERAND (arg0, 1);
6910 else if (TREE_CODE (arg0) == INTEGER_CST)
6912 arg00 = build_one_cst (type);
6917 /* We cannot generate constant 1 for fract. */
6918 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6921 arg01 = build_one_cst (type);
6923 if (TREE_CODE (arg1) == MULT_EXPR)
6925 arg10 = TREE_OPERAND (arg1, 0);
6926 arg11 = TREE_OPERAND (arg1, 1);
6928 else if (TREE_CODE (arg1) == INTEGER_CST)
6930 arg10 = build_one_cst (type);
6931 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6932 the purpose of this canonicalization. */
6933 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6934 && negate_expr_p (arg1)
6935 && code == PLUS_EXPR)
6937 arg11 = negate_expr (arg1);
6945 /* We cannot generate constant 1 for fract. */
6946 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6949 arg11 = build_one_cst (type);
6953 if (operand_equal_p (arg01, arg11, 0))
6954 same = arg01, alt0 = arg00, alt1 = arg10;
6955 else if (operand_equal_p (arg00, arg10, 0))
6956 same = arg00, alt0 = arg01, alt1 = arg11;
6957 else if (operand_equal_p (arg00, arg11, 0))
6958 same = arg00, alt0 = arg01, alt1 = arg10;
6959 else if (operand_equal_p (arg01, arg10, 0))
6960 same = arg01, alt0 = arg00, alt1 = arg11;
6962 /* No identical multiplicands; see if we can find a common
6963 power-of-two factor in non-power-of-two multiplies. This
6964 can help in multi-dimensional array access. */
6965 else if (tree_fits_shwi_p (arg01)
6966 && tree_fits_shwi_p (arg11))
6968 HOST_WIDE_INT int01, int11, tmp;
6971 int01 = tree_to_shwi (arg01);
6972 int11 = tree_to_shwi (arg11);
6974 /* Move min of absolute values to int11. */
6975 if (absu_hwi (int01) < absu_hwi (int11))
6977 tmp = int01, int01 = int11, int11 = tmp;
6978 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6985 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6986 /* The remainder should not be a constant, otherwise we
6987 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6988 increased the number of multiplications necessary. */
6989 && TREE_CODE (arg10) != INTEGER_CST)
6991 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6992 build_int_cst (TREE_TYPE (arg00),
6997 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7002 return fold_build2_loc (loc, MULT_EXPR, type,
7003 fold_build2_loc (loc, code, type,
7004 fold_convert_loc (loc, type, alt0),
7005 fold_convert_loc (loc, type, alt1)),
7006 fold_convert_loc (loc, type, same));
7011 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7012 specified by EXPR into the buffer PTR of length LEN bytes.
7013 Return the number of bytes placed in the buffer, or zero
7017 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7019 tree type = TREE_TYPE (expr);
7020 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021 int byte, offset, word, words;
7022 unsigned char value;
7024 if ((off == -1 && total_bytes > len)
7025 || off >= total_bytes)
7029 words = total_bytes / UNITS_PER_WORD;
7031 for (byte = 0; byte < total_bytes; byte++)
7033 int bitpos = byte * BITS_PER_UNIT;
7034 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7036 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7038 if (total_bytes > UNITS_PER_WORD)
7040 word = byte / UNITS_PER_WORD;
7041 if (WORDS_BIG_ENDIAN)
7042 word = (words - 1) - word;
7043 offset = word * UNITS_PER_WORD;
7044 if (BYTES_BIG_ENDIAN)
7045 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7047 offset += byte % UNITS_PER_WORD;
7050 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7052 && offset - off < len)
7053 ptr[offset - off] = value;
7055 return MIN (len, total_bytes - off);
7059 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7060 specified by EXPR into the buffer PTR of length LEN bytes.
7061 Return the number of bytes placed in the buffer, or zero
7065 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7067 tree type = TREE_TYPE (expr);
7068 machine_mode mode = TYPE_MODE (type);
7069 int total_bytes = GET_MODE_SIZE (mode);
7070 FIXED_VALUE_TYPE value;
7071 tree i_value, i_type;
7073 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7076 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7078 if (NULL_TREE == i_type
7079 || TYPE_PRECISION (i_type) != total_bytes)
7082 value = TREE_FIXED_CST (expr);
7083 i_value = double_int_to_tree (i_type, value.data);
7085 return native_encode_int (i_value, ptr, len, off);
7089 /* Subroutine of native_encode_expr. Encode the REAL_CST
7090 specified by EXPR into the buffer PTR of length LEN bytes.
7091 Return the number of bytes placed in the buffer, or zero
7095 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7097 tree type = TREE_TYPE (expr);
7098 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7099 int byte, offset, word, words, bitpos;
7100 unsigned char value;
7102 /* There are always 32 bits in each long, no matter the size of
7103 the hosts long. We handle floating point representations with
7107 if ((off == -1 && total_bytes > len)
7108 || off >= total_bytes)
7112 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7114 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7116 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7117 bitpos += BITS_PER_UNIT)
7119 byte = (bitpos / BITS_PER_UNIT) & 3;
7120 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7122 if (UNITS_PER_WORD < 4)
7124 word = byte / UNITS_PER_WORD;
7125 if (WORDS_BIG_ENDIAN)
7126 word = (words - 1) - word;
7127 offset = word * UNITS_PER_WORD;
7128 if (BYTES_BIG_ENDIAN)
7129 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7131 offset += byte % UNITS_PER_WORD;
7134 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7135 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7137 && offset - off < len)
7138 ptr[offset - off] = value;
7140 return MIN (len, total_bytes - off);
7143 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7144 specified by EXPR into the buffer PTR of length LEN bytes.
7145 Return the number of bytes placed in the buffer, or zero
7149 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7154 part = TREE_REALPART (expr);
7155 rsize = native_encode_expr (part, ptr, len, off);
7159 part = TREE_IMAGPART (expr);
7161 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7162 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7166 return rsize + isize;
7170 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7176 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7183 count = VECTOR_CST_NELTS (expr);
7184 itype = TREE_TYPE (TREE_TYPE (expr));
7185 size = GET_MODE_SIZE (TYPE_MODE (itype));
7186 for (i = 0; i < count; i++)
7193 elem = VECTOR_CST_ELT (expr, i);
7194 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7195 if ((off == -1 && res != size)
7208 /* Subroutine of native_encode_expr. Encode the STRING_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7214 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7216 tree type = TREE_TYPE (expr);
7217 HOST_WIDE_INT total_bytes;
7219 if (TREE_CODE (type) != ARRAY_TYPE
7220 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7221 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7222 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7224 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7225 if ((off == -1 && total_bytes > len)
7226 || off >= total_bytes)
7230 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7233 if (off < TREE_STRING_LENGTH (expr))
7235 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7236 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7238 memset (ptr + written, 0,
7239 MIN (total_bytes - written, len - written));
7242 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7243 return MIN (total_bytes - off, len);
7247 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7248 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7249 buffer PTR of length LEN bytes. If OFF is not -1 then start
7250 the encoding at byte offset OFF and encode at most LEN bytes.
7251 Return the number of bytes placed in the buffer, or zero upon failure. */
7254 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7256 switch (TREE_CODE (expr))
7259 return native_encode_int (expr, ptr, len, off);
7262 return native_encode_real (expr, ptr, len, off);
7265 return native_encode_fixed (expr, ptr, len, off);
7268 return native_encode_complex (expr, ptr, len, off);
7271 return native_encode_vector (expr, ptr, len, off);
7274 return native_encode_string (expr, ptr, len, off);
7282 /* Subroutine of native_interpret_expr. Interpret the contents of
7283 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7284 If the buffer cannot be interpreted, return NULL_TREE. */
7287 native_interpret_int (tree type, const unsigned char *ptr, int len)
7289 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7291 if (total_bytes > len
7292 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7295 wide_int result = wi::from_buffer (ptr, total_bytes);
7297 return wide_int_to_tree (type, result);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7306 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7308 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7310 FIXED_VALUE_TYPE fixed_value;
7312 if (total_bytes > len
7313 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7316 result = double_int::from_buffer (ptr, total_bytes);
7317 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7319 return build_fixed (type, fixed_value);
7323 /* Subroutine of native_interpret_expr. Interpret the contents of
7324 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7325 If the buffer cannot be interpreted, return NULL_TREE. */
7328 native_interpret_real (tree type, const unsigned char *ptr, int len)
7330 machine_mode mode = TYPE_MODE (type);
7331 int total_bytes = GET_MODE_SIZE (mode);
7332 int byte, offset, word, words, bitpos;
7333 unsigned char value;
7334 /* There are always 32 bits in each long, no matter the size of
7335 the hosts long. We handle floating point representations with
7340 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7341 if (total_bytes > len || total_bytes > 24)
7343 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7345 memset (tmp, 0, sizeof (tmp));
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 if (UNITS_PER_WORD < 4)
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7359 offset += byte % UNITS_PER_WORD;
7362 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7363 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7365 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7368 real_from_target (&r, tmp, mode);
7369 return build_real (type, r);
7373 /* Subroutine of native_interpret_expr. Interpret the contents of
7374 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7375 If the buffer cannot be interpreted, return NULL_TREE. */
7378 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7380 tree etype, rpart, ipart;
7383 etype = TREE_TYPE (type);
7384 size = GET_MODE_SIZE (TYPE_MODE (etype));
7387 rpart = native_interpret_expr (etype, ptr, size);
7390 ipart = native_interpret_expr (etype, ptr+size, size);
7393 return build_complex (type, rpart, ipart);
7397 /* Subroutine of native_interpret_expr. Interpret the contents of
7398 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7399 If the buffer cannot be interpreted, return NULL_TREE. */
7402 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7408 etype = TREE_TYPE (type);
7409 size = GET_MODE_SIZE (TYPE_MODE (etype));
7410 count = TYPE_VECTOR_SUBPARTS (type);
7411 if (size * count > len)
7414 elements = XALLOCAVEC (tree, count);
7415 for (i = count - 1; i >= 0; i--)
7417 elem = native_interpret_expr (etype, ptr+(i*size), size);
7422 return build_vector (type, elements);
7426 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a constant of type TYPE. For
7428 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7429 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7430 return NULL_TREE. */
7433 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7435 switch (TREE_CODE (type))
7441 case REFERENCE_TYPE:
7442 return native_interpret_int (type, ptr, len);
7445 return native_interpret_real (type, ptr, len);
7447 case FIXED_POINT_TYPE:
7448 return native_interpret_fixed (type, ptr, len);
7451 return native_interpret_complex (type, ptr, len);
7454 return native_interpret_vector (type, ptr, len);
7461 /* Returns true if we can interpret the contents of a native encoding
7465 can_native_interpret_type_p (tree type)
7467 switch (TREE_CODE (type))
7473 case REFERENCE_TYPE:
7474 case FIXED_POINT_TYPE:
7484 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7485 TYPE at compile-time. If we're unable to perform the conversion
7486 return NULL_TREE. */
7489 fold_view_convert_expr (tree type, tree expr)
7491 /* We support up to 512-bit values (for V8DFmode). */
7492 unsigned char buffer[64];
7495 /* Check that the host and target are sane. */
7496 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7499 len = native_encode_expr (expr, buffer, sizeof (buffer));
7503 return native_interpret_expr (type, buffer, len);
7506 /* Build an expression for the address of T. Folds away INDIRECT_REF
7507 to avoid confusing the gimplify process. */
7510 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7512 /* The size of the object is not relevant when talking about its address. */
7513 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7514 t = TREE_OPERAND (t, 0);
7516 if (TREE_CODE (t) == INDIRECT_REF)
7518 t = TREE_OPERAND (t, 0);
7520 if (TREE_TYPE (t) != ptrtype)
7521 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7523 else if (TREE_CODE (t) == MEM_REF
7524 && integer_zerop (TREE_OPERAND (t, 1)))
7525 return TREE_OPERAND (t, 0);
7526 else if (TREE_CODE (t) == MEM_REF
7527 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7528 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7529 TREE_OPERAND (t, 0),
7530 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7531 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7533 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7535 if (TREE_TYPE (t) != ptrtype)
7536 t = fold_convert_loc (loc, ptrtype, t);
7539 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7544 /* Build an expression for the address of T. */
7547 build_fold_addr_expr_loc (location_t loc, tree t)
7549 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7551 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7554 static bool vec_cst_ctor_to_array (tree, tree *);
7556 /* Fold a unary expression of code CODE and type TYPE with operand
7557 OP0. Return the folded expression if folding is successful.
7558 Otherwise, return NULL_TREE. */
7561 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7565 enum tree_code_class kind = TREE_CODE_CLASS (code);
7567 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7568 && TREE_CODE_LENGTH (code) == 1);
7570 tem = generic_simplify (loc, code, type, op0);
7577 if (CONVERT_EXPR_CODE_P (code)
7578 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7580 /* Don't use STRIP_NOPS, because signedness of argument type
7582 STRIP_SIGN_NOPS (arg0);
7586 /* Strip any conversions that don't change the mode. This
7587 is safe for every expression, except for a comparison
7588 expression because its signedness is derived from its
7591 Note that this is done as an internal manipulation within
7592 the constant folder, in order to find the simplest
7593 representation of the arguments so that their form can be
7594 studied. In any cases, the appropriate type conversions
7595 should be put back in the tree that will get out of the
7601 if (TREE_CODE_CLASS (code) == tcc_unary)
7603 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7604 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7605 fold_build1_loc (loc, code, type,
7606 fold_convert_loc (loc, TREE_TYPE (op0),
7607 TREE_OPERAND (arg0, 1))));
7608 else if (TREE_CODE (arg0) == COND_EXPR)
7610 tree arg01 = TREE_OPERAND (arg0, 1);
7611 tree arg02 = TREE_OPERAND (arg0, 2);
7612 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7613 arg01 = fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc,
7615 TREE_TYPE (op0), arg01));
7616 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7617 arg02 = fold_build1_loc (loc, code, type,
7618 fold_convert_loc (loc,
7619 TREE_TYPE (op0), arg02));
7620 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7623 /* If this was a conversion, and all we did was to move into
7624 inside the COND_EXPR, bring it back out. But leave it if
7625 it is a conversion from integer to integer and the
7626 result precision is no wider than a word since such a
7627 conversion is cheap and may be optimized away by combine,
7628 while it couldn't if it were outside the COND_EXPR. Then return
7629 so we don't get into an infinite recursion loop taking the
7630 conversion out and then back in. */
7632 if ((CONVERT_EXPR_CODE_P (code)
7633 || code == NON_LVALUE_EXPR)
7634 && TREE_CODE (tem) == COND_EXPR
7635 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7636 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7637 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7638 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7639 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7640 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7641 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7643 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7644 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7645 || flag_syntax_only))
7646 tem = build1_loc (loc, code, type,
7648 TREE_TYPE (TREE_OPERAND
7649 (TREE_OPERAND (tem, 1), 0)),
7650 TREE_OPERAND (tem, 0),
7651 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7652 TREE_OPERAND (TREE_OPERAND (tem, 2),
7660 case NON_LVALUE_EXPR:
7661 if (!maybe_lvalue_p (op0))
7662 return fold_convert_loc (loc, type, op0);
7667 case FIX_TRUNC_EXPR:
7668 if (COMPARISON_CLASS_P (op0))
7670 /* If we have (type) (a CMP b) and type is an integral type, return
7671 new expression involving the new type. Canonicalize
7672 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7674 Do not fold the result as that would not simplify further, also
7675 folding again results in recursions. */
7676 if (TREE_CODE (type) == BOOLEAN_TYPE)
7677 return build2_loc (loc, TREE_CODE (op0), type,
7678 TREE_OPERAND (op0, 0),
7679 TREE_OPERAND (op0, 1));
7680 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7681 && TREE_CODE (type) != VECTOR_TYPE)
7682 return build3_loc (loc, COND_EXPR, type, op0,
7683 constant_boolean_node (true, type),
7684 constant_boolean_node (false, type));
7687 /* Handle (T *)&A.B.C for A being of type T and B and C
7688 living at offset zero. This occurs frequently in
7689 C++ upcasting and then accessing the base. */
7690 if (TREE_CODE (op0) == ADDR_EXPR
7691 && POINTER_TYPE_P (type)
7692 && handled_component_p (TREE_OPERAND (op0, 0)))
7694 HOST_WIDE_INT bitsize, bitpos;
7697 int unsignedp, volatilep;
7698 tree base = TREE_OPERAND (op0, 0);
7699 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7700 &mode, &unsignedp, &volatilep, false);
7701 /* If the reference was to a (constant) zero offset, we can use
7702 the address of the base if it has the same base type
7703 as the result type and the pointer type is unqualified. */
7704 if (! offset && bitpos == 0
7705 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7706 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7707 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7708 return fold_convert_loc (loc, type,
7709 build_fold_addr_expr_loc (loc, base));
7712 if (TREE_CODE (op0) == MODIFY_EXPR
7713 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7714 /* Detect assigning a bitfield. */
7715 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7717 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7719 /* Don't leave an assignment inside a conversion
7720 unless assigning a bitfield. */
7721 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7722 /* First do the assignment, then return converted constant. */
7723 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7724 TREE_NO_WARNING (tem) = 1;
7725 TREE_USED (tem) = 1;
7729 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7730 constants (if x has signed type, the sign bit cannot be set
7731 in c). This folds extension into the BIT_AND_EXPR.
7732 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7733 very likely don't have maximal range for their precision and this
7734 transformation effectively doesn't preserve non-maximal ranges. */
7735 if (TREE_CODE (type) == INTEGER_TYPE
7736 && TREE_CODE (op0) == BIT_AND_EXPR
7737 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7739 tree and_expr = op0;
7740 tree and0 = TREE_OPERAND (and_expr, 0);
7741 tree and1 = TREE_OPERAND (and_expr, 1);
7744 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7745 || (TYPE_PRECISION (type)
7746 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7748 else if (TYPE_PRECISION (TREE_TYPE (and1))
7749 <= HOST_BITS_PER_WIDE_INT
7750 && tree_fits_uhwi_p (and1))
7752 unsigned HOST_WIDE_INT cst;
7754 cst = tree_to_uhwi (and1);
7755 cst &= HOST_WIDE_INT_M1U
7756 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7757 change = (cst == 0);
7758 #ifdef LOAD_EXTEND_OP
7760 && !flag_syntax_only
7761 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7764 tree uns = unsigned_type_for (TREE_TYPE (and0));
7765 and0 = fold_convert_loc (loc, uns, and0);
7766 and1 = fold_convert_loc (loc, uns, and1);
7772 tem = force_fit_type (type, wi::to_widest (and1), 0,
7773 TREE_OVERFLOW (and1));
7774 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7775 fold_convert_loc (loc, type, and0), tem);
7779 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7780 when one of the new casts will fold away. Conservatively we assume
7781 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7782 if (POINTER_TYPE_P (type)
7783 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7784 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7786 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7787 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7789 tree arg00 = TREE_OPERAND (arg0, 0);
7790 tree arg01 = TREE_OPERAND (arg0, 1);
7792 return fold_build_pointer_plus_loc
7793 (loc, fold_convert_loc (loc, type, arg00), arg01);
7796 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7797 of the same precision, and X is an integer type not narrower than
7798 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7799 if (INTEGRAL_TYPE_P (type)
7800 && TREE_CODE (op0) == BIT_NOT_EXPR
7801 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7802 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7803 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7805 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7806 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7807 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7808 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7809 fold_convert_loc (loc, type, tem));
7812 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7813 type of X and Y (integer types only). */
7814 if (INTEGRAL_TYPE_P (type)
7815 && TREE_CODE (op0) == MULT_EXPR
7816 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7817 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7819 /* Be careful not to introduce new overflows. */
7821 if (TYPE_OVERFLOW_WRAPS (type))
7824 mult_type = unsigned_type_for (type);
7826 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7828 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7829 fold_convert_loc (loc, mult_type,
7830 TREE_OPERAND (op0, 0)),
7831 fold_convert_loc (loc, mult_type,
7832 TREE_OPERAND (op0, 1)));
7833 return fold_convert_loc (loc, type, tem);
7837 tem = fold_convert_const (code, type, arg0);
7838 return tem ? tem : NULL_TREE;
7840 case ADDR_SPACE_CONVERT_EXPR:
7841 if (integer_zerop (arg0))
7842 return fold_convert_const (code, type, arg0);
7845 case FIXED_CONVERT_EXPR:
7846 tem = fold_convert_const (code, type, arg0);
7847 return tem ? tem : NULL_TREE;
7849 case VIEW_CONVERT_EXPR:
7850 if (TREE_CODE (op0) == MEM_REF)
7851 return fold_build2_loc (loc, MEM_REF, type,
7852 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7854 return fold_view_convert_expr (type, op0);
7857 tem = fold_negate_expr (loc, arg0);
7859 return fold_convert_loc (loc, type, tem);
7863 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7864 return fold_abs_const (arg0, type);
7865 /* Convert fabs((double)float) into (double)fabsf(float). */
7866 else if (TREE_CODE (arg0) == NOP_EXPR
7867 && TREE_CODE (type) == REAL_TYPE)
7869 tree targ0 = strip_float_extensions (arg0);
7871 return fold_convert_loc (loc, type,
7872 fold_build1_loc (loc, ABS_EXPR,
7876 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7877 else if (TREE_CODE (arg0) == ABS_EXPR)
7880 /* Strip sign ops from argument. */
7881 if (TREE_CODE (type) == REAL_TYPE)
7883 tem = fold_strip_sign_ops (arg0);
7885 return fold_build1_loc (loc, ABS_EXPR, type,
7886 fold_convert_loc (loc, type, tem));
7891 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7892 return fold_convert_loc (loc, type, arg0);
7893 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7895 tree itype = TREE_TYPE (type);
7896 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7897 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7898 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7899 negate_expr (ipart));
7901 if (TREE_CODE (arg0) == COMPLEX_CST)
7903 tree itype = TREE_TYPE (type);
7904 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7905 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7906 return build_complex (type, rpart, negate_expr (ipart));
7908 if (TREE_CODE (arg0) == CONJ_EXPR)
7909 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7913 if (TREE_CODE (arg0) == INTEGER_CST)
7914 return fold_not_const (arg0, type);
7915 /* Convert ~ (-A) to A - 1. */
7916 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7917 return fold_build2_loc (loc, MINUS_EXPR, type,
7918 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7919 build_int_cst (type, 1));
7920 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7921 else if (INTEGRAL_TYPE_P (type)
7922 && ((TREE_CODE (arg0) == MINUS_EXPR
7923 && integer_onep (TREE_OPERAND (arg0, 1)))
7924 || (TREE_CODE (arg0) == PLUS_EXPR
7925 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7926 return fold_build1_loc (loc, NEGATE_EXPR, type,
7927 fold_convert_loc (loc, type,
7928 TREE_OPERAND (arg0, 0)));
7929 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type,
7933 TREE_OPERAND (arg0, 0)))))
7934 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 1)));
7937 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7938 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7939 fold_convert_loc (loc, type,
7940 TREE_OPERAND (arg0, 1)))))
7941 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7942 fold_convert_loc (loc, type,
7943 TREE_OPERAND (arg0, 0)), tem);
7944 /* Perform BIT_NOT_EXPR on each element individually. */
7945 else if (TREE_CODE (arg0) == VECTOR_CST)
7949 unsigned count = VECTOR_CST_NELTS (arg0), i;
7951 elements = XALLOCAVEC (tree, count);
7952 for (i = 0; i < count; i++)
7954 elem = VECTOR_CST_ELT (arg0, i);
7955 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7956 if (elem == NULL_TREE)
7961 return build_vector (type, elements);
7963 else if (COMPARISON_CLASS_P (arg0)
7964 && (VECTOR_TYPE_P (type)
7965 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
7967 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
7968 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
7969 HONOR_NANS (TYPE_MODE (op_type)));
7970 if (subcode != ERROR_MARK)
7971 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
7972 TREE_OPERAND (arg0, 1));
7978 case TRUTH_NOT_EXPR:
7979 /* Note that the operand of this must be an int
7980 and its values must be 0 or 1.
7981 ("true" is a fixed value perhaps depending on the language,
7982 but we don't handle values other than 1 correctly yet.) */
7983 tem = fold_truth_not_expr (loc, arg0);
7986 return fold_convert_loc (loc, type, tem);
7989 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7990 return fold_convert_loc (loc, type, arg0);
7991 if (TREE_CODE (arg0) == COMPLEX_CST)
7992 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7993 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7995 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7996 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7997 fold_build1_loc (loc, REALPART_EXPR, itype,
7998 TREE_OPERAND (arg0, 0)),
7999 fold_build1_loc (loc, REALPART_EXPR, itype,
8000 TREE_OPERAND (arg0, 1)));
8001 return fold_convert_loc (loc, type, tem);
8003 if (TREE_CODE (arg0) == CONJ_EXPR)
8005 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8006 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8007 TREE_OPERAND (arg0, 0));
8008 return fold_convert_loc (loc, type, tem);
8010 if (TREE_CODE (arg0) == CALL_EXPR)
8012 tree fn = get_callee_fndecl (arg0);
8013 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8014 switch (DECL_FUNCTION_CODE (fn))
8016 CASE_FLT_FN (BUILT_IN_CEXPI):
8017 fn = mathfn_built_in (type, BUILT_IN_COS);
8019 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8029 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8030 return build_zero_cst (type);
8031 if (TREE_CODE (arg0) == COMPLEX_CST)
8032 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8033 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8035 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8036 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8037 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8038 TREE_OPERAND (arg0, 0)),
8039 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8040 TREE_OPERAND (arg0, 1)));
8041 return fold_convert_loc (loc, type, tem);
8043 if (TREE_CODE (arg0) == CONJ_EXPR)
8045 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8046 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8047 return fold_convert_loc (loc, type, negate_expr (tem));
8049 if (TREE_CODE (arg0) == CALL_EXPR)
8051 tree fn = get_callee_fndecl (arg0);
8052 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8053 switch (DECL_FUNCTION_CODE (fn))
8055 CASE_FLT_FN (BUILT_IN_CEXPI):
8056 fn = mathfn_built_in (type, BUILT_IN_SIN);
8058 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8068 /* Fold *&X to X if X is an lvalue. */
8069 if (TREE_CODE (op0) == ADDR_EXPR)
8071 tree op00 = TREE_OPERAND (op0, 0);
8072 if ((TREE_CODE (op00) == VAR_DECL
8073 || TREE_CODE (op00) == PARM_DECL
8074 || TREE_CODE (op00) == RESULT_DECL)
8075 && !TREE_READONLY (op00))
8080 case VEC_UNPACK_LO_EXPR:
8081 case VEC_UNPACK_HI_EXPR:
8082 case VEC_UNPACK_FLOAT_LO_EXPR:
8083 case VEC_UNPACK_FLOAT_HI_EXPR:
8085 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8087 enum tree_code subcode;
8089 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8090 if (TREE_CODE (arg0) != VECTOR_CST)
8093 elts = XALLOCAVEC (tree, nelts * 2);
8094 if (!vec_cst_ctor_to_array (arg0, elts))
8097 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8098 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8101 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8104 subcode = FLOAT_EXPR;
8106 for (i = 0; i < nelts; i++)
8108 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8109 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8113 return build_vector (type, elts);
8116 case REDUC_MIN_EXPR:
8117 case REDUC_MAX_EXPR:
8118 case REDUC_PLUS_EXPR:
8120 unsigned int nelts, i;
8122 enum tree_code subcode;
8124 if (TREE_CODE (op0) != VECTOR_CST)
8126 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8128 elts = XALLOCAVEC (tree, nelts);
8129 if (!vec_cst_ctor_to_array (op0, elts))
8134 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8135 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8136 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8137 default: gcc_unreachable ();
8140 for (i = 1; i < nelts; i++)
8142 elts[0] = const_binop (subcode, elts[0], elts[i]);
8143 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8152 } /* switch (code) */
8156 /* If the operation was a conversion do _not_ mark a resulting constant
8157 with TREE_OVERFLOW if the original constant was not. These conversions
8158 have implementation defined behavior and retaining the TREE_OVERFLOW
8159 flag here would confuse later passes such as VRP. */
8161 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8162 tree type, tree op0)
8164 tree res = fold_unary_loc (loc, code, type, op0);
8166 && TREE_CODE (res) == INTEGER_CST
8167 && TREE_CODE (op0) == INTEGER_CST
8168 && CONVERT_EXPR_CODE_P (code))
8169 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8174 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8175 operands OP0 and OP1. LOC is the location of the resulting expression.
8176 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8177 Return the folded expression if folding is successful. Otherwise,
8178 return NULL_TREE. */
8180 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8181 tree arg0, tree arg1, tree op0, tree op1)
8185 /* We only do these simplifications if we are optimizing. */
8189 /* Check for things like (A || B) && (A || C). We can convert this
8190 to A || (B && C). Note that either operator can be any of the four
8191 truth and/or operations and the transformation will still be
8192 valid. Also note that we only care about order for the
8193 ANDIF and ORIF operators. If B contains side effects, this
8194 might change the truth-value of A. */
8195 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8196 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8197 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8198 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8199 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8200 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8202 tree a00 = TREE_OPERAND (arg0, 0);
8203 tree a01 = TREE_OPERAND (arg0, 1);
8204 tree a10 = TREE_OPERAND (arg1, 0);
8205 tree a11 = TREE_OPERAND (arg1, 1);
8206 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8207 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8208 && (code == TRUTH_AND_EXPR
8209 || code == TRUTH_OR_EXPR));
8211 if (operand_equal_p (a00, a10, 0))
8212 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8213 fold_build2_loc (loc, code, type, a01, a11));
8214 else if (commutative && operand_equal_p (a00, a11, 0))
8215 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8216 fold_build2_loc (loc, code, type, a01, a10));
8217 else if (commutative && operand_equal_p (a01, a10, 0))
8218 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8219 fold_build2_loc (loc, code, type, a00, a11));
8221 /* This case if tricky because we must either have commutative
8222 operators or else A10 must not have side-effects. */
8224 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8225 && operand_equal_p (a01, a11, 0))
8226 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8227 fold_build2_loc (loc, code, type, a00, a10),
8231 /* See if we can build a range comparison. */
8232 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8235 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8236 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8238 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8240 return fold_build2_loc (loc, code, type, tem, arg1);
8243 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8244 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8246 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8248 return fold_build2_loc (loc, code, type, arg0, tem);
8251 /* Check for the possibility of merging component references. If our
8252 lhs is another similar operation, try to merge its rhs with our
8253 rhs. Then try to merge our lhs and rhs. */
8254 if (TREE_CODE (arg0) == code
8255 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8256 TREE_OPERAND (arg0, 1), arg1)))
8257 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8259 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8262 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8263 && (code == TRUTH_AND_EXPR
8264 || code == TRUTH_ANDIF_EXPR
8265 || code == TRUTH_OR_EXPR
8266 || code == TRUTH_ORIF_EXPR))
8268 enum tree_code ncode, icode;
8270 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8271 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8272 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8274 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8275 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8276 We don't want to pack more than two leafs to a non-IF AND/OR
8278 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8279 equal to IF-CODE, then we don't want to add right-hand operand.
8280 If the inner right-hand side of left-hand operand has
8281 side-effects, or isn't simple, then we can't add to it,
8282 as otherwise we might destroy if-sequence. */
8283 if (TREE_CODE (arg0) == icode
8284 && simple_operand_p_2 (arg1)
8285 /* Needed for sequence points to handle trappings, and
8287 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8289 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8291 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8294 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8295 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8296 else if (TREE_CODE (arg1) == icode
8297 && simple_operand_p_2 (arg0)
8298 /* Needed for sequence points to handle trappings, and
8300 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8302 tem = fold_build2_loc (loc, ncode, type,
8303 arg0, TREE_OPERAND (arg1, 0));
8304 return fold_build2_loc (loc, icode, type, tem,
8305 TREE_OPERAND (arg1, 1));
8307 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8309 For sequence point consistancy, we need to check for trapping,
8310 and side-effects. */
8311 else if (code == icode && simple_operand_p_2 (arg0)
8312 && simple_operand_p_2 (arg1))
8313 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8319 /* Fold a binary expression of code CODE and type TYPE with operands
8320 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8321 Return the folded expression if folding is successful. Otherwise,
8322 return NULL_TREE. */
8325 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8327 enum tree_code compl_code;
8329 if (code == MIN_EXPR)
8330 compl_code = MAX_EXPR;
8331 else if (code == MAX_EXPR)
8332 compl_code = MIN_EXPR;
8336 /* MIN (MAX (a, b), b) == b. */
8337 if (TREE_CODE (op0) == compl_code
8338 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8339 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8341 /* MIN (MAX (b, a), b) == b. */
8342 if (TREE_CODE (op0) == compl_code
8343 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8344 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8345 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8347 /* MIN (a, MAX (a, b)) == a. */
8348 if (TREE_CODE (op1) == compl_code
8349 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8350 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8351 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8353 /* MIN (a, MAX (b, a)) == a. */
8354 if (TREE_CODE (op1) == compl_code
8355 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8356 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8357 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8362 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8363 by changing CODE to reduce the magnitude of constants involved in
8364 ARG0 of the comparison.
8365 Returns a canonicalized comparison tree if a simplification was
8366 possible, otherwise returns NULL_TREE.
8367 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8368 valid if signed overflow is undefined. */
8371 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8372 tree arg0, tree arg1,
8373 bool *strict_overflow_p)
8375 enum tree_code code0 = TREE_CODE (arg0);
8376 tree t, cst0 = NULL_TREE;
8380 /* Match A +- CST code arg1 and CST code arg1. We can change the
8381 first form only if overflow is undefined. */
8382 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8383 /* In principle pointers also have undefined overflow behavior,
8384 but that causes problems elsewhere. */
8385 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8386 && (code0 == MINUS_EXPR
8387 || code0 == PLUS_EXPR)
8388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8389 || code0 == INTEGER_CST))
8392 /* Identify the constant in arg0 and its sign. */
8393 if (code0 == INTEGER_CST)
8396 cst0 = TREE_OPERAND (arg0, 1);
8397 sgn0 = tree_int_cst_sgn (cst0);
8399 /* Overflowed constants and zero will cause problems. */
8400 if (integer_zerop (cst0)
8401 || TREE_OVERFLOW (cst0))
8404 /* See if we can reduce the magnitude of the constant in
8405 arg0 by changing the comparison code. */
8406 if (code0 == INTEGER_CST)
8408 /* CST <= arg1 -> CST-1 < arg1. */
8409 if (code == LE_EXPR && sgn0 == 1)
8411 /* -CST < arg1 -> -CST-1 <= arg1. */
8412 else if (code == LT_EXPR && sgn0 == -1)
8414 /* CST > arg1 -> CST-1 >= arg1. */
8415 else if (code == GT_EXPR && sgn0 == 1)
8417 /* -CST >= arg1 -> -CST-1 > arg1. */
8418 else if (code == GE_EXPR && sgn0 == -1)
8422 /* arg1 code' CST' might be more canonical. */
8427 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8429 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8431 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8432 else if (code == GT_EXPR
8433 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8435 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8436 else if (code == LE_EXPR
8437 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8439 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8440 else if (code == GE_EXPR
8441 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8445 *strict_overflow_p = true;
8448 /* Now build the constant reduced in magnitude. But not if that
8449 would produce one outside of its types range. */
8450 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8452 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8453 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8455 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8456 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8457 /* We cannot swap the comparison here as that would cause us to
8458 endlessly recurse. */
8461 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8462 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8463 if (code0 != INTEGER_CST)
8464 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8465 t = fold_convert (TREE_TYPE (arg1), t);
8467 /* If swapping might yield to a more canonical form, do so. */
8469 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8471 return fold_build2_loc (loc, code, type, t, arg1);
8474 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8475 overflow further. Try to decrease the magnitude of constants involved
8476 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8477 and put sole constants at the second argument position.
8478 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8481 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8482 tree arg0, tree arg1)
8485 bool strict_overflow_p;
8486 const char * const warnmsg = G_("assuming signed overflow does not occur "
8487 "when reducing constant in comparison");
8489 /* Try canonicalization by simplifying arg0. */
8490 strict_overflow_p = false;
8491 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8492 &strict_overflow_p);
8495 if (strict_overflow_p)
8496 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8500 /* Try canonicalization by simplifying arg1 using the swapped
8502 code = swap_tree_comparison (code);
8503 strict_overflow_p = false;
8504 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8505 &strict_overflow_p);
8506 if (t && strict_overflow_p)
8507 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8511 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8512 space. This is used to avoid issuing overflow warnings for
8513 expressions like &p->x which can not wrap. */
8516 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8518 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8525 int precision = TYPE_PRECISION (TREE_TYPE (base));
8526 if (offset == NULL_TREE)
8527 wi_offset = wi::zero (precision);
8528 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8534 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8535 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8539 if (!wi::fits_uhwi_p (total))
8542 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8546 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8548 if (TREE_CODE (base) == ADDR_EXPR)
8550 HOST_WIDE_INT base_size;
8552 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8553 if (base_size > 0 && size < base_size)
8557 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8560 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8561 kind INTEGER_CST. This makes sure to properly sign-extend the
8564 static HOST_WIDE_INT
8565 size_low_cst (const_tree t)
8567 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8568 int prec = TYPE_PRECISION (TREE_TYPE (t));
8569 if (prec < HOST_BITS_PER_WIDE_INT)
8570 return sext_hwi (w, prec);
8574 /* Subroutine of fold_binary. This routine performs all of the
8575 transformations that are common to the equality/inequality
8576 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8577 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8578 fold_binary should call fold_binary. Fold a comparison with
8579 tree code CODE and type TYPE with operands OP0 and OP1. Return
8580 the folded comparison or NULL_TREE. */
8583 fold_comparison (location_t loc, enum tree_code code, tree type,
8586 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8587 tree arg0, arg1, tem;
8592 STRIP_SIGN_NOPS (arg0);
8593 STRIP_SIGN_NOPS (arg1);
8595 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8596 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8597 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8599 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8600 && TREE_CODE (arg1) == INTEGER_CST
8601 && !TREE_OVERFLOW (arg1))
8603 const enum tree_code
8604 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8605 tree const1 = TREE_OPERAND (arg0, 1);
8606 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8607 tree variable = TREE_OPERAND (arg0, 0);
8608 tree new_const = int_const_binop (reverse_op, const2, const1);
8610 /* If the constant operation overflowed this can be
8611 simplified as a comparison against INT_MAX/INT_MIN. */
8612 if (TREE_OVERFLOW (new_const)
8613 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8615 int const1_sgn = tree_int_cst_sgn (const1);
8616 enum tree_code code2 = code;
8618 /* Get the sign of the constant on the lhs if the
8619 operation were VARIABLE + CONST1. */
8620 if (TREE_CODE (arg0) == MINUS_EXPR)
8621 const1_sgn = -const1_sgn;
8623 /* The sign of the constant determines if we overflowed
8624 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8625 Canonicalize to the INT_MIN overflow by swapping the comparison
8627 if (const1_sgn == -1)
8628 code2 = swap_tree_comparison (code);
8630 /* We now can look at the canonicalized case
8631 VARIABLE + 1 CODE2 INT_MIN
8632 and decide on the result. */
8639 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8645 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8654 fold_overflow_warning ("assuming signed overflow does not occur "
8655 "when changing X +- C1 cmp C2 to "
8657 WARN_STRICT_OVERFLOW_COMPARISON);
8658 return fold_build2_loc (loc, code, type, variable, new_const);
8662 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8663 if (TREE_CODE (arg0) == MINUS_EXPR
8665 && integer_zerop (arg1))
8667 /* ??? The transformation is valid for the other operators if overflow
8668 is undefined for the type, but performing it here badly interacts
8669 with the transformation in fold_cond_expr_with_comparison which
8670 attempts to synthetize ABS_EXPR. */
8672 fold_overflow_warning ("assuming signed overflow does not occur "
8673 "when changing X - Y cmp 0 to X cmp Y",
8674 WARN_STRICT_OVERFLOW_COMPARISON);
8675 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8676 TREE_OPERAND (arg0, 1));
8679 /* For comparisons of pointers we can decompose it to a compile time
8680 comparison of the base objects and the offsets into the object.
8681 This requires at least one operand being an ADDR_EXPR or a
8682 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8683 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8684 && (TREE_CODE (arg0) == ADDR_EXPR
8685 || TREE_CODE (arg1) == ADDR_EXPR
8686 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8687 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8689 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8690 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8692 int volatilep, unsignedp;
8693 bool indirect_base0 = false, indirect_base1 = false;
8695 /* Get base and offset for the access. Strip ADDR_EXPR for
8696 get_inner_reference, but put it back by stripping INDIRECT_REF
8697 off the base object if possible. indirect_baseN will be true
8698 if baseN is not an address but refers to the object itself. */
8700 if (TREE_CODE (arg0) == ADDR_EXPR)
8702 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8703 &bitsize, &bitpos0, &offset0, &mode,
8704 &unsignedp, &volatilep, false);
8705 if (TREE_CODE (base0) == INDIRECT_REF)
8706 base0 = TREE_OPERAND (base0, 0);
8708 indirect_base0 = true;
8710 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8712 base0 = TREE_OPERAND (arg0, 0);
8713 STRIP_SIGN_NOPS (base0);
8714 if (TREE_CODE (base0) == ADDR_EXPR)
8716 base0 = TREE_OPERAND (base0, 0);
8717 indirect_base0 = true;
8719 offset0 = TREE_OPERAND (arg0, 1);
8720 if (tree_fits_shwi_p (offset0))
8722 HOST_WIDE_INT off = size_low_cst (offset0);
8723 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8725 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8727 bitpos0 = off * BITS_PER_UNIT;
8728 offset0 = NULL_TREE;
8734 if (TREE_CODE (arg1) == ADDR_EXPR)
8736 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8737 &bitsize, &bitpos1, &offset1, &mode,
8738 &unsignedp, &volatilep, false);
8739 if (TREE_CODE (base1) == INDIRECT_REF)
8740 base1 = TREE_OPERAND (base1, 0);
8742 indirect_base1 = true;
8744 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8746 base1 = TREE_OPERAND (arg1, 0);
8747 STRIP_SIGN_NOPS (base1);
8748 if (TREE_CODE (base1) == ADDR_EXPR)
8750 base1 = TREE_OPERAND (base1, 0);
8751 indirect_base1 = true;
8753 offset1 = TREE_OPERAND (arg1, 1);
8754 if (tree_fits_shwi_p (offset1))
8756 HOST_WIDE_INT off = size_low_cst (offset1);
8757 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8759 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8761 bitpos1 = off * BITS_PER_UNIT;
8762 offset1 = NULL_TREE;
8767 /* A local variable can never be pointed to by
8768 the default SSA name of an incoming parameter. */
8769 if ((TREE_CODE (arg0) == ADDR_EXPR
8771 && TREE_CODE (base0) == VAR_DECL
8772 && auto_var_in_fn_p (base0, current_function_decl)
8774 && TREE_CODE (base1) == SSA_NAME
8775 && SSA_NAME_IS_DEFAULT_DEF (base1)
8776 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8777 || (TREE_CODE (arg1) == ADDR_EXPR
8779 && TREE_CODE (base1) == VAR_DECL
8780 && auto_var_in_fn_p (base1, current_function_decl)
8782 && TREE_CODE (base0) == SSA_NAME
8783 && SSA_NAME_IS_DEFAULT_DEF (base0)
8784 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8786 if (code == NE_EXPR)
8787 return constant_boolean_node (1, type);
8788 else if (code == EQ_EXPR)
8789 return constant_boolean_node (0, type);
8791 /* If we have equivalent bases we might be able to simplify. */
8792 else if (indirect_base0 == indirect_base1
8793 && operand_equal_p (base0, base1, 0))
8795 /* We can fold this expression to a constant if the non-constant
8796 offset parts are equal. */
8797 if ((offset0 == offset1
8798 || (offset0 && offset1
8799 && operand_equal_p (offset0, offset1, 0)))
8802 || (indirect_base0 && DECL_P (base0))
8803 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8807 && bitpos0 != bitpos1
8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8810 fold_overflow_warning (("assuming pointer wraparound does not "
8811 "occur when comparing P +- C1 with "
8813 WARN_STRICT_OVERFLOW_CONDITIONAL);
8818 return constant_boolean_node (bitpos0 == bitpos1, type);
8820 return constant_boolean_node (bitpos0 != bitpos1, type);
8822 return constant_boolean_node (bitpos0 < bitpos1, type);
8824 return constant_boolean_node (bitpos0 <= bitpos1, type);
8826 return constant_boolean_node (bitpos0 >= bitpos1, type);
8828 return constant_boolean_node (bitpos0 > bitpos1, type);
8832 /* We can simplify the comparison to a comparison of the variable
8833 offset parts if the constant offset parts are equal.
8834 Be careful to use signed sizetype here because otherwise we
8835 mess with array offsets in the wrong way. This is possible
8836 because pointer arithmetic is restricted to retain within an
8837 object and overflow on pointer differences is undefined as of
8838 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8839 else if (bitpos0 == bitpos1
8841 || (indirect_base0 && DECL_P (base0))
8842 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8844 /* By converting to signed sizetype we cover middle-end pointer
8845 arithmetic which operates on unsigned pointer types of size
8846 type size and ARRAY_REF offsets which are properly sign or
8847 zero extended from their type in case it is narrower than
8849 if (offset0 == NULL_TREE)
8850 offset0 = build_int_cst (ssizetype, 0);
8852 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8853 if (offset1 == NULL_TREE)
8854 offset1 = build_int_cst (ssizetype, 0);
8856 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8859 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8860 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8861 fold_overflow_warning (("assuming pointer wraparound does not "
8862 "occur when comparing P +- C1 with "
8864 WARN_STRICT_OVERFLOW_COMPARISON);
8866 return fold_build2_loc (loc, code, type, offset0, offset1);
8869 /* For non-equal bases we can simplify if they are addresses
8870 of local binding decls or constants. */
8871 else if (indirect_base0 && indirect_base1
8872 /* We know that !operand_equal_p (base0, base1, 0)
8873 because the if condition was false. But make
8874 sure two decls are not the same. */
8876 && TREE_CODE (arg0) == ADDR_EXPR
8877 && TREE_CODE (arg1) == ADDR_EXPR
8878 && (((TREE_CODE (base0) == VAR_DECL
8879 || TREE_CODE (base0) == PARM_DECL)
8880 && (targetm.binds_local_p (base0)
8881 || CONSTANT_CLASS_P (base1)))
8882 || CONSTANT_CLASS_P (base0))
8883 && (((TREE_CODE (base1) == VAR_DECL
8884 || TREE_CODE (base1) == PARM_DECL)
8885 && (targetm.binds_local_p (base1)
8886 || CONSTANT_CLASS_P (base0)))
8887 || CONSTANT_CLASS_P (base1)))
8889 if (code == EQ_EXPR)
8890 return omit_two_operands_loc (loc, type, boolean_false_node,
8892 else if (code == NE_EXPR)
8893 return omit_two_operands_loc (loc, type, boolean_true_node,
8896 /* For equal offsets we can simplify to a comparison of the
8898 else if (bitpos0 == bitpos1
8900 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8902 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8903 && ((offset0 == offset1)
8904 || (offset0 && offset1
8905 && operand_equal_p (offset0, offset1, 0))))
8908 base0 = build_fold_addr_expr_loc (loc, base0);
8910 base1 = build_fold_addr_expr_loc (loc, base1);
8911 return fold_build2_loc (loc, code, type, base0, base1);
8915 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8916 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8917 the resulting offset is smaller in absolute value than the
8918 original one and has the same sign. */
8919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8920 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8922 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8923 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8924 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8925 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8927 tree const1 = TREE_OPERAND (arg0, 1);
8928 tree const2 = TREE_OPERAND (arg1, 1);
8929 tree variable1 = TREE_OPERAND (arg0, 0);
8930 tree variable2 = TREE_OPERAND (arg1, 0);
8932 const char * const warnmsg = G_("assuming signed overflow does not "
8933 "occur when combining constants around "
8936 /* Put the constant on the side where it doesn't overflow and is
8937 of lower absolute value and of same sign than before. */
8938 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8939 ? MINUS_EXPR : PLUS_EXPR,
8941 if (!TREE_OVERFLOW (cst)
8942 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8943 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8945 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8946 return fold_build2_loc (loc, code, type,
8948 fold_build2_loc (loc, TREE_CODE (arg1),
8953 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8954 ? MINUS_EXPR : PLUS_EXPR,
8956 if (!TREE_OVERFLOW (cst)
8957 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8958 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8960 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8961 return fold_build2_loc (loc, code, type,
8962 fold_build2_loc (loc, TREE_CODE (arg0),
8969 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8970 signed arithmetic case. That form is created by the compiler
8971 often enough for folding it to be of value. One example is in
8972 computing loop trip counts after Operator Strength Reduction. */
8973 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8974 && TREE_CODE (arg0) == MULT_EXPR
8975 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8976 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8977 && integer_zerop (arg1))
8979 tree const1 = TREE_OPERAND (arg0, 1);
8980 tree const2 = arg1; /* zero */
8981 tree variable1 = TREE_OPERAND (arg0, 0);
8982 enum tree_code cmp_code = code;
8984 /* Handle unfolded multiplication by zero. */
8985 if (integer_zerop (const1))
8986 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8988 fold_overflow_warning (("assuming signed overflow does not occur when "
8989 "eliminating multiplication in comparison "
8991 WARN_STRICT_OVERFLOW_COMPARISON);
8993 /* If const1 is negative we swap the sense of the comparison. */
8994 if (tree_int_cst_sgn (const1) < 0)
8995 cmp_code = swap_tree_comparison (cmp_code);
8997 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9000 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9004 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9006 tree targ0 = strip_float_extensions (arg0);
9007 tree targ1 = strip_float_extensions (arg1);
9008 tree newtype = TREE_TYPE (targ0);
9010 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9011 newtype = TREE_TYPE (targ1);
9013 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9014 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9015 return fold_build2_loc (loc, code, type,
9016 fold_convert_loc (loc, newtype, targ0),
9017 fold_convert_loc (loc, newtype, targ1));
9019 /* (-a) CMP (-b) -> b CMP a */
9020 if (TREE_CODE (arg0) == NEGATE_EXPR
9021 && TREE_CODE (arg1) == NEGATE_EXPR)
9022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9023 TREE_OPERAND (arg0, 0));
9025 if (TREE_CODE (arg1) == REAL_CST)
9027 REAL_VALUE_TYPE cst;
9028 cst = TREE_REAL_CST (arg1);
9030 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9031 if (TREE_CODE (arg0) == NEGATE_EXPR)
9032 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9033 TREE_OPERAND (arg0, 0),
9034 build_real (TREE_TYPE (arg1),
9035 real_value_negate (&cst)));
9037 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9038 /* a CMP (-0) -> a CMP 0 */
9039 if (REAL_VALUE_MINUS_ZERO (cst))
9040 return fold_build2_loc (loc, code, type, arg0,
9041 build_real (TREE_TYPE (arg1), dconst0));
9043 /* x != NaN is always true, other ops are always false. */
9044 if (REAL_VALUE_ISNAN (cst)
9045 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9047 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9048 return omit_one_operand_loc (loc, type, tem, arg0);
9051 /* Fold comparisons against infinity. */
9052 if (REAL_VALUE_ISINF (cst)
9053 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9055 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9056 if (tem != NULL_TREE)
9061 /* If this is a comparison of a real constant with a PLUS_EXPR
9062 or a MINUS_EXPR of a real constant, we can convert it into a
9063 comparison with a revised real constant as long as no overflow
9064 occurs when unsafe_math_optimizations are enabled. */
9065 if (flag_unsafe_math_optimizations
9066 && TREE_CODE (arg1) == REAL_CST
9067 && (TREE_CODE (arg0) == PLUS_EXPR
9068 || TREE_CODE (arg0) == MINUS_EXPR)
9069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9070 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9071 ? MINUS_EXPR : PLUS_EXPR,
9072 arg1, TREE_OPERAND (arg0, 1)))
9073 && !TREE_OVERFLOW (tem))
9074 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9076 /* Likewise, we can simplify a comparison of a real constant with
9077 a MINUS_EXPR whose first operand is also a real constant, i.e.
9078 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9079 floating-point types only if -fassociative-math is set. */
9080 if (flag_associative_math
9081 && TREE_CODE (arg1) == REAL_CST
9082 && TREE_CODE (arg0) == MINUS_EXPR
9083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9084 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9086 && !TREE_OVERFLOW (tem))
9087 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9088 TREE_OPERAND (arg0, 1), tem);
9090 /* Fold comparisons against built-in math functions. */
9091 if (TREE_CODE (arg1) == REAL_CST
9092 && flag_unsafe_math_optimizations
9093 && ! flag_errno_math)
9095 enum built_in_function fcode = builtin_mathfn_code (arg0);
9097 if (fcode != END_BUILTINS)
9099 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9100 if (tem != NULL_TREE)
9106 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9107 && CONVERT_EXPR_P (arg0))
9109 /* If we are widening one operand of an integer comparison,
9110 see if the other operand is similarly being widened. Perhaps we
9111 can do the comparison in the narrower type. */
9112 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9116 /* Or if we are changing signedness. */
9117 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9122 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9123 constant, we can simplify it. */
9124 if (TREE_CODE (arg1) == INTEGER_CST
9125 && (TREE_CODE (arg0) == MIN_EXPR
9126 || TREE_CODE (arg0) == MAX_EXPR)
9127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9129 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9134 /* Simplify comparison of something with itself. (For IEEE
9135 floating-point, we can only do some of these simplifications.) */
9136 if (operand_equal_p (arg0, arg1, 0))
9141 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9142 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9143 return constant_boolean_node (1, type);
9148 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9149 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9150 return constant_boolean_node (1, type);
9151 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9154 /* For NE, we can only do this simplification if integer
9155 or we don't honor IEEE floating point NaNs. */
9156 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9157 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9159 /* ... fall through ... */
9162 return constant_boolean_node (0, type);
9168 /* If we are comparing an expression that just has comparisons
9169 of two integer values, arithmetic expressions of those comparisons,
9170 and constants, we can simplify it. There are only three cases
9171 to check: the two values can either be equal, the first can be
9172 greater, or the second can be greater. Fold the expression for
9173 those three values. Since each value must be 0 or 1, we have
9174 eight possibilities, each of which corresponds to the constant 0
9175 or 1 or one of the six possible comparisons.
9177 This handles common cases like (a > b) == 0 but also handles
9178 expressions like ((x > y) - (y > x)) > 0, which supposedly
9179 occur in macroized code. */
9181 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9183 tree cval1 = 0, cval2 = 0;
9186 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9187 /* Don't handle degenerate cases here; they should already
9188 have been handled anyway. */
9189 && cval1 != 0 && cval2 != 0
9190 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9191 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9192 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9193 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9194 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9195 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9196 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9198 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9199 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9201 /* We can't just pass T to eval_subst in case cval1 or cval2
9202 was the same as ARG1. */
9205 = fold_build2_loc (loc, code, type,
9206 eval_subst (loc, arg0, cval1, maxval,
9210 = fold_build2_loc (loc, code, type,
9211 eval_subst (loc, arg0, cval1, maxval,
9215 = fold_build2_loc (loc, code, type,
9216 eval_subst (loc, arg0, cval1, minval,
9220 /* All three of these results should be 0 or 1. Confirm they are.
9221 Then use those values to select the proper code to use. */
9223 if (TREE_CODE (high_result) == INTEGER_CST
9224 && TREE_CODE (equal_result) == INTEGER_CST
9225 && TREE_CODE (low_result) == INTEGER_CST)
9227 /* Make a 3-bit mask with the high-order bit being the
9228 value for `>', the next for '=', and the low for '<'. */
9229 switch ((integer_onep (high_result) * 4)
9230 + (integer_onep (equal_result) * 2)
9231 + integer_onep (low_result))
9235 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9256 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9261 tem = save_expr (build2 (code, type, cval1, cval2));
9262 SET_EXPR_LOCATION (tem, loc);
9265 return fold_build2_loc (loc, code, type, cval1, cval2);
9270 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9271 into a single range test. */
9272 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9273 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9274 && TREE_CODE (arg1) == INTEGER_CST
9275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9276 && !integer_zerop (TREE_OPERAND (arg0, 1))
9277 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9278 && !TREE_OVERFLOW (arg1))
9280 tem = fold_div_compare (loc, code, type, arg0, arg1);
9281 if (tem != NULL_TREE)
9285 /* Fold ~X op ~Y as Y op X. */
9286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9287 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9289 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9290 return fold_build2_loc (loc, code, type,
9291 fold_convert_loc (loc, cmp_type,
9292 TREE_OPERAND (arg1, 0)),
9293 TREE_OPERAND (arg0, 0));
9296 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9297 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9298 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9300 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9301 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9302 TREE_OPERAND (arg0, 0),
9303 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9304 fold_convert_loc (loc, cmp_type, arg1)));
9311 /* Subroutine of fold_binary. Optimize complex multiplications of the
9312 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9313 argument EXPR represents the expression "z" of type TYPE. */
9316 fold_mult_zconjz (location_t loc, tree type, tree expr)
9318 tree itype = TREE_TYPE (type);
9319 tree rpart, ipart, tem;
9321 if (TREE_CODE (expr) == COMPLEX_EXPR)
9323 rpart = TREE_OPERAND (expr, 0);
9324 ipart = TREE_OPERAND (expr, 1);
9326 else if (TREE_CODE (expr) == COMPLEX_CST)
9328 rpart = TREE_REALPART (expr);
9329 ipart = TREE_IMAGPART (expr);
9333 expr = save_expr (expr);
9334 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9335 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9338 rpart = save_expr (rpart);
9339 ipart = save_expr (ipart);
9340 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9341 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9342 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9343 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9344 build_zero_cst (itype));
9348 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9349 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9350 guarantees that P and N have the same least significant log2(M) bits.
9351 N is not otherwise constrained. In particular, N is not normalized to
9352 0 <= N < M as is common. In general, the precise value of P is unknown.
9353 M is chosen as large as possible such that constant N can be determined.
9355 Returns M and sets *RESIDUE to N.
9357 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9358 account. This is not always possible due to PR 35705.
9361 static unsigned HOST_WIDE_INT
9362 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9363 bool allow_func_align)
9365 enum tree_code code;
9369 code = TREE_CODE (expr);
9370 if (code == ADDR_EXPR)
9372 unsigned int bitalign;
9373 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9374 *residue /= BITS_PER_UNIT;
9375 return bitalign / BITS_PER_UNIT;
9377 else if (code == POINTER_PLUS_EXPR)
9380 unsigned HOST_WIDE_INT modulus;
9381 enum tree_code inner_code;
9383 op0 = TREE_OPERAND (expr, 0);
9385 modulus = get_pointer_modulus_and_residue (op0, residue,
9388 op1 = TREE_OPERAND (expr, 1);
9390 inner_code = TREE_CODE (op1);
9391 if (inner_code == INTEGER_CST)
9393 *residue += TREE_INT_CST_LOW (op1);
9396 else if (inner_code == MULT_EXPR)
9398 op1 = TREE_OPERAND (op1, 1);
9399 if (TREE_CODE (op1) == INTEGER_CST)
9401 unsigned HOST_WIDE_INT align;
9403 /* Compute the greatest power-of-2 divisor of op1. */
9404 align = TREE_INT_CST_LOW (op1);
9407 /* If align is non-zero and less than *modulus, replace
9408 *modulus with align., If align is 0, then either op1 is 0
9409 or the greatest power-of-2 divisor of op1 doesn't fit in an
9410 unsigned HOST_WIDE_INT. In either case, no additional
9411 constraint is imposed. */
9413 modulus = MIN (modulus, align);
9420 /* If we get here, we were unable to determine anything useful about the
9425 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9426 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9429 vec_cst_ctor_to_array (tree arg, tree *elts)
9431 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9433 if (TREE_CODE (arg) == VECTOR_CST)
9435 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9436 elts[i] = VECTOR_CST_ELT (arg, i);
9438 else if (TREE_CODE (arg) == CONSTRUCTOR)
9440 constructor_elt *elt;
9442 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9443 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9446 elts[i] = elt->value;
9450 for (; i < nelts; i++)
9452 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9456 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9457 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9458 NULL_TREE otherwise. */
9461 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9463 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9465 bool need_ctor = false;
9467 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9468 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9469 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9470 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9473 elts = XALLOCAVEC (tree, nelts * 3);
9474 if (!vec_cst_ctor_to_array (arg0, elts)
9475 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9478 for (i = 0; i < nelts; i++)
9480 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9482 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9487 vec<constructor_elt, va_gc> *v;
9488 vec_alloc (v, nelts);
9489 for (i = 0; i < nelts; i++)
9490 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9491 return build_constructor (type, v);
9494 return build_vector (type, &elts[2 * nelts]);
9497 /* Try to fold a pointer difference of type TYPE two address expressions of
9498 array references AREF0 and AREF1 using location LOC. Return a
9499 simplified expression for the difference or NULL_TREE. */
9502 fold_addr_of_array_ref_difference (location_t loc, tree type,
9503 tree aref0, tree aref1)
9505 tree base0 = TREE_OPERAND (aref0, 0);
9506 tree base1 = TREE_OPERAND (aref1, 0);
9507 tree base_offset = build_int_cst (type, 0);
9509 /* If the bases are array references as well, recurse. If the bases
9510 are pointer indirections compute the difference of the pointers.
9511 If the bases are equal, we are set. */
9512 if ((TREE_CODE (base0) == ARRAY_REF
9513 && TREE_CODE (base1) == ARRAY_REF
9515 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9516 || (INDIRECT_REF_P (base0)
9517 && INDIRECT_REF_P (base1)
9518 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9519 TREE_OPERAND (base0, 0),
9520 TREE_OPERAND (base1, 0))))
9521 || operand_equal_p (base0, base1, 0))
9523 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9524 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9525 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9526 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9527 return fold_build2_loc (loc, PLUS_EXPR, type,
9529 fold_build2_loc (loc, MULT_EXPR, type,
9535 /* If the real or vector real constant CST of type TYPE has an exact
9536 inverse, return it, else return NULL. */
9539 exact_inverse (tree type, tree cst)
9542 tree unit_type, *elts;
9544 unsigned vec_nelts, i;
9546 switch (TREE_CODE (cst))
9549 r = TREE_REAL_CST (cst);
9551 if (exact_real_inverse (TYPE_MODE (type), &r))
9552 return build_real (type, r);
9557 vec_nelts = VECTOR_CST_NELTS (cst);
9558 elts = XALLOCAVEC (tree, vec_nelts);
9559 unit_type = TREE_TYPE (type);
9560 mode = TYPE_MODE (unit_type);
9562 for (i = 0; i < vec_nelts; i++)
9564 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9565 if (!exact_real_inverse (mode, &r))
9567 elts[i] = build_real (unit_type, r);
9570 return build_vector (type, elts);
9577 /* Mask out the tz least significant bits of X of type TYPE where
9578 tz is the number of trailing zeroes in Y. */
9580 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9582 int tz = wi::ctz (y);
9584 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9588 /* Return true when T is an address and is known to be nonzero.
9589 For floating point we further ensure that T is not denormal.
9590 Similar logic is present in nonzero_address in rtlanal.h.
9592 If the return value is based on the assumption that signed overflow
9593 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9594 change *STRICT_OVERFLOW_P. */
9597 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9599 tree type = TREE_TYPE (t);
9600 enum tree_code code;
9602 /* Doing something useful for floating point would need more work. */
9603 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9606 code = TREE_CODE (t);
9607 switch (TREE_CODE_CLASS (code))
9610 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9613 case tcc_comparison:
9614 return tree_binary_nonzero_warnv_p (code, type,
9615 TREE_OPERAND (t, 0),
9616 TREE_OPERAND (t, 1),
9619 case tcc_declaration:
9621 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9629 case TRUTH_NOT_EXPR:
9630 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9633 case TRUTH_AND_EXPR:
9635 case TRUTH_XOR_EXPR:
9636 return tree_binary_nonzero_warnv_p (code, type,
9637 TREE_OPERAND (t, 0),
9638 TREE_OPERAND (t, 1),
9646 case WITH_SIZE_EXPR:
9648 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9653 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9657 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9662 tree fndecl = get_callee_fndecl (t);
9663 if (!fndecl) return false;
9664 if (flag_delete_null_pointer_checks && !flag_check_new
9665 && DECL_IS_OPERATOR_NEW (fndecl)
9666 && !TREE_NOTHROW (fndecl))
9668 if (flag_delete_null_pointer_checks
9669 && lookup_attribute ("returns_nonnull",
9670 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9672 return alloca_call_p (t);
9681 /* Return true when T is an address and is known to be nonzero.
9682 Handle warnings about undefined signed overflow. */
9685 tree_expr_nonzero_p (tree t)
9687 bool ret, strict_overflow_p;
9689 strict_overflow_p = false;
9690 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9691 if (strict_overflow_p)
9692 fold_overflow_warning (("assuming signed overflow does not occur when "
9693 "determining that expression is always "
9695 WARN_STRICT_OVERFLOW_MISC);
9699 /* Fold a binary expression of code CODE and type TYPE with operands
9700 OP0 and OP1. LOC is the location of the resulting expression.
9701 Return the folded expression if folding is successful. Otherwise,
9702 return NULL_TREE. */
9705 fold_binary_loc (location_t loc,
9706 enum tree_code code, tree type, tree op0, tree op1)
9708 enum tree_code_class kind = TREE_CODE_CLASS (code);
9709 tree arg0, arg1, tem;
9710 tree t1 = NULL_TREE;
9711 bool strict_overflow_p;
9714 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9715 && TREE_CODE_LENGTH (code) == 2
9717 && op1 != NULL_TREE);
9722 /* Strip any conversions that don't change the mode. This is
9723 safe for every expression, except for a comparison expression
9724 because its signedness is derived from its operands. So, in
9725 the latter case, only strip conversions that don't change the
9726 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9729 Note that this is done as an internal manipulation within the
9730 constant folder, in order to find the simplest representation
9731 of the arguments so that their form can be studied. In any
9732 cases, the appropriate type conversions should be put back in
9733 the tree that will get out of the constant folder. */
9735 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9737 STRIP_SIGN_NOPS (arg0);
9738 STRIP_SIGN_NOPS (arg1);
9746 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9747 constant but we can't do arithmetic on them. */
9748 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9749 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9750 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9751 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9752 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9753 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9754 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9756 if (kind == tcc_binary)
9758 /* Make sure type and arg0 have the same saturating flag. */
9759 gcc_assert (TYPE_SATURATING (type)
9760 == TYPE_SATURATING (TREE_TYPE (arg0)));
9761 tem = const_binop (code, arg0, arg1);
9763 else if (kind == tcc_comparison)
9764 tem = fold_relational_const (code, type, arg0, arg1);
9768 if (tem != NULL_TREE)
9770 if (TREE_TYPE (tem) != type)
9771 tem = fold_convert_loc (loc, type, tem);
9776 /* If this is a commutative operation, and ARG0 is a constant, move it
9777 to ARG1 to reduce the number of tests below. */
9778 if (commutative_tree_code (code)
9779 && tree_swap_operands_p (arg0, arg1, true))
9780 return fold_build2_loc (loc, code, type, op1, op0);
9782 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9783 to ARG1 to reduce the number of tests below. */
9784 if (kind == tcc_comparison
9785 && tree_swap_operands_p (arg0, arg1, true))
9786 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9788 tem = generic_simplify (loc, code, type, op0, op1);
9792 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9794 First check for cases where an arithmetic operation is applied to a
9795 compound, conditional, or comparison operation. Push the arithmetic
9796 operation inside the compound or conditional to see if any folding
9797 can then be done. Convert comparison to conditional for this purpose.
9798 The also optimizes non-constant cases that used to be done in
9801 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9802 one of the operands is a comparison and the other is a comparison, a
9803 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9804 code below would make the expression more complex. Change it to a
9805 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9806 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9808 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9809 || code == EQ_EXPR || code == NE_EXPR)
9810 && TREE_CODE (type) != VECTOR_TYPE
9811 && ((truth_value_p (TREE_CODE (arg0))
9812 && (truth_value_p (TREE_CODE (arg1))
9813 || (TREE_CODE (arg1) == BIT_AND_EXPR
9814 && integer_onep (TREE_OPERAND (arg1, 1)))))
9815 || (truth_value_p (TREE_CODE (arg1))
9816 && (truth_value_p (TREE_CODE (arg0))
9817 || (TREE_CODE (arg0) == BIT_AND_EXPR
9818 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9820 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9821 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9824 fold_convert_loc (loc, boolean_type_node, arg0),
9825 fold_convert_loc (loc, boolean_type_node, arg1));
9827 if (code == EQ_EXPR)
9828 tem = invert_truthvalue_loc (loc, tem);
9830 return fold_convert_loc (loc, type, tem);
9833 if (TREE_CODE_CLASS (code) == tcc_binary
9834 || TREE_CODE_CLASS (code) == tcc_comparison)
9836 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9838 tem = fold_build2_loc (loc, code, type,
9839 fold_convert_loc (loc, TREE_TYPE (op0),
9840 TREE_OPERAND (arg0, 1)), op1);
9841 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9844 if (TREE_CODE (arg1) == COMPOUND_EXPR
9845 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9847 tem = fold_build2_loc (loc, code, type, op0,
9848 fold_convert_loc (loc, TREE_TYPE (op1),
9849 TREE_OPERAND (arg1, 1)));
9850 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9854 if (TREE_CODE (arg0) == COND_EXPR
9855 || TREE_CODE (arg0) == VEC_COND_EXPR
9856 || COMPARISON_CLASS_P (arg0))
9858 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9860 /*cond_first_p=*/1);
9861 if (tem != NULL_TREE)
9865 if (TREE_CODE (arg1) == COND_EXPR
9866 || TREE_CODE (arg1) == VEC_COND_EXPR
9867 || COMPARISON_CLASS_P (arg1))
9869 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9871 /*cond_first_p=*/0);
9872 if (tem != NULL_TREE)
9880 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9881 if (TREE_CODE (arg0) == ADDR_EXPR
9882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9884 tree iref = TREE_OPERAND (arg0, 0);
9885 return fold_build2 (MEM_REF, type,
9886 TREE_OPERAND (iref, 0),
9887 int_const_binop (PLUS_EXPR, arg1,
9888 TREE_OPERAND (iref, 1)));
9891 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9892 if (TREE_CODE (arg0) == ADDR_EXPR
9893 && handled_component_p (TREE_OPERAND (arg0, 0)))
9896 HOST_WIDE_INT coffset;
9897 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9901 return fold_build2 (MEM_REF, type,
9902 build_fold_addr_expr (base),
9903 int_const_binop (PLUS_EXPR, arg1,
9904 size_int (coffset)));
9909 case POINTER_PLUS_EXPR:
9910 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9911 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9912 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9913 return fold_convert_loc (loc, type,
9914 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9915 fold_convert_loc (loc, sizetype,
9917 fold_convert_loc (loc, sizetype,
9920 /* PTR_CST +p CST -> CST1 */
9921 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9922 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9923 fold_convert_loc (loc, type, arg1));
9928 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9930 /* X + (X / CST) * -CST is X % CST. */
9931 if (TREE_CODE (arg1) == MULT_EXPR
9932 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9933 && operand_equal_p (arg0,
9934 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9936 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9937 tree cst1 = TREE_OPERAND (arg1, 1);
9938 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9940 if (sum && integer_zerop (sum))
9941 return fold_convert_loc (loc, type,
9942 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9943 TREE_TYPE (arg0), arg0,
9948 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9949 one. Make sure the type is not saturating and has the signedness of
9950 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9951 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9952 if ((TREE_CODE (arg0) == MULT_EXPR
9953 || TREE_CODE (arg1) == MULT_EXPR)
9954 && !TYPE_SATURATING (type)
9955 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9956 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9957 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9959 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9964 if (! FLOAT_TYPE_P (type))
9966 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9967 with a constant, and the two constants have no bits in common,
9968 we should treat this as a BIT_IOR_EXPR since this may produce more
9970 if (TREE_CODE (arg0) == BIT_AND_EXPR
9971 && TREE_CODE (arg1) == BIT_AND_EXPR
9972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9973 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9974 && wi::bit_and (TREE_OPERAND (arg0, 1),
9975 TREE_OPERAND (arg1, 1)) == 0)
9977 code = BIT_IOR_EXPR;
9981 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9982 (plus (plus (mult) (mult)) (foo)) so that we can
9983 take advantage of the factoring cases below. */
9984 if (TYPE_OVERFLOW_WRAPS (type)
9985 && (((TREE_CODE (arg0) == PLUS_EXPR
9986 || TREE_CODE (arg0) == MINUS_EXPR)
9987 && TREE_CODE (arg1) == MULT_EXPR)
9988 || ((TREE_CODE (arg1) == PLUS_EXPR
9989 || TREE_CODE (arg1) == MINUS_EXPR)
9990 && TREE_CODE (arg0) == MULT_EXPR)))
9992 tree parg0, parg1, parg, marg;
9993 enum tree_code pcode;
9995 if (TREE_CODE (arg1) == MULT_EXPR)
9996 parg = arg0, marg = arg1;
9998 parg = arg1, marg = arg0;
9999 pcode = TREE_CODE (parg);
10000 parg0 = TREE_OPERAND (parg, 0);
10001 parg1 = TREE_OPERAND (parg, 1);
10002 STRIP_NOPS (parg0);
10003 STRIP_NOPS (parg1);
10005 if (TREE_CODE (parg0) == MULT_EXPR
10006 && TREE_CODE (parg1) != MULT_EXPR)
10007 return fold_build2_loc (loc, pcode, type,
10008 fold_build2_loc (loc, PLUS_EXPR, type,
10009 fold_convert_loc (loc, type,
10011 fold_convert_loc (loc, type,
10013 fold_convert_loc (loc, type, parg1));
10014 if (TREE_CODE (parg0) != MULT_EXPR
10015 && TREE_CODE (parg1) == MULT_EXPR)
10017 fold_build2_loc (loc, PLUS_EXPR, type,
10018 fold_convert_loc (loc, type, parg0),
10019 fold_build2_loc (loc, pcode, type,
10020 fold_convert_loc (loc, type, marg),
10021 fold_convert_loc (loc, type,
10027 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10028 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10029 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10031 /* Likewise if the operands are reversed. */
10032 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10033 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10035 /* Convert X + -C into X - C. */
10036 if (TREE_CODE (arg1) == REAL_CST
10037 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10039 tem = fold_negate_const (arg1, type);
10040 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10041 return fold_build2_loc (loc, MINUS_EXPR, type,
10042 fold_convert_loc (loc, type, arg0),
10043 fold_convert_loc (loc, type, tem));
10046 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10047 to __complex__ ( x, y ). This is not the same for SNaNs or
10048 if signed zeros are involved. */
10049 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10051 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10053 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10054 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10055 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10056 bool arg0rz = false, arg0iz = false;
10057 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10058 || (arg0i && (arg0iz = real_zerop (arg0i))))
10060 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10061 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10062 if (arg0rz && arg1i && real_zerop (arg1i))
10064 tree rp = arg1r ? arg1r
10065 : build1 (REALPART_EXPR, rtype, arg1);
10066 tree ip = arg0i ? arg0i
10067 : build1 (IMAGPART_EXPR, rtype, arg0);
10068 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10070 else if (arg0iz && arg1r && real_zerop (arg1r))
10072 tree rp = arg0r ? arg0r
10073 : build1 (REALPART_EXPR, rtype, arg0);
10074 tree ip = arg1i ? arg1i
10075 : build1 (IMAGPART_EXPR, rtype, arg1);
10076 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10081 if (flag_unsafe_math_optimizations
10082 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10083 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10084 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10087 /* Convert x+x into x*2.0. */
10088 if (operand_equal_p (arg0, arg1, 0)
10089 && SCALAR_FLOAT_TYPE_P (type))
10090 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10091 build_real (type, dconst2));
10093 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10094 We associate floats only if the user has specified
10095 -fassociative-math. */
10096 if (flag_associative_math
10097 && TREE_CODE (arg1) == PLUS_EXPR
10098 && TREE_CODE (arg0) != MULT_EXPR)
10100 tree tree10 = TREE_OPERAND (arg1, 0);
10101 tree tree11 = TREE_OPERAND (arg1, 1);
10102 if (TREE_CODE (tree11) == MULT_EXPR
10103 && TREE_CODE (tree10) == MULT_EXPR)
10106 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10107 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10110 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10111 We associate floats only if the user has specified
10112 -fassociative-math. */
10113 if (flag_associative_math
10114 && TREE_CODE (arg0) == PLUS_EXPR
10115 && TREE_CODE (arg1) != MULT_EXPR)
10117 tree tree00 = TREE_OPERAND (arg0, 0);
10118 tree tree01 = TREE_OPERAND (arg0, 1);
10119 if (TREE_CODE (tree01) == MULT_EXPR
10120 && TREE_CODE (tree00) == MULT_EXPR)
10123 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10124 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10130 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10131 is a rotate of A by C1 bits. */
10132 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10133 is a rotate of A by B bits. */
10135 enum tree_code code0, code1;
10137 code0 = TREE_CODE (arg0);
10138 code1 = TREE_CODE (arg1);
10139 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10140 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10141 && operand_equal_p (TREE_OPERAND (arg0, 0),
10142 TREE_OPERAND (arg1, 0), 0)
10143 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10144 TYPE_UNSIGNED (rtype))
10145 /* Only create rotates in complete modes. Other cases are not
10146 expanded properly. */
10147 && (element_precision (rtype)
10148 == element_precision (TYPE_MODE (rtype))))
10150 tree tree01, tree11;
10151 enum tree_code code01, code11;
10153 tree01 = TREE_OPERAND (arg0, 1);
10154 tree11 = TREE_OPERAND (arg1, 1);
10155 STRIP_NOPS (tree01);
10156 STRIP_NOPS (tree11);
10157 code01 = TREE_CODE (tree01);
10158 code11 = TREE_CODE (tree11);
10159 if (code01 == INTEGER_CST
10160 && code11 == INTEGER_CST
10161 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10162 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10164 tem = build2_loc (loc, LROTATE_EXPR,
10165 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10166 TREE_OPERAND (arg0, 0),
10167 code0 == LSHIFT_EXPR ? tree01 : tree11);
10168 return fold_convert_loc (loc, type, tem);
10170 else if (code11 == MINUS_EXPR)
10172 tree tree110, tree111;
10173 tree110 = TREE_OPERAND (tree11, 0);
10174 tree111 = TREE_OPERAND (tree11, 1);
10175 STRIP_NOPS (tree110);
10176 STRIP_NOPS (tree111);
10177 if (TREE_CODE (tree110) == INTEGER_CST
10178 && 0 == compare_tree_int (tree110,
10180 (TREE_TYPE (TREE_OPERAND
10182 && operand_equal_p (tree01, tree111, 0))
10184 fold_convert_loc (loc, type,
10185 build2 ((code0 == LSHIFT_EXPR
10188 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10189 TREE_OPERAND (arg0, 0), tree01));
10191 else if (code01 == MINUS_EXPR)
10193 tree tree010, tree011;
10194 tree010 = TREE_OPERAND (tree01, 0);
10195 tree011 = TREE_OPERAND (tree01, 1);
10196 STRIP_NOPS (tree010);
10197 STRIP_NOPS (tree011);
10198 if (TREE_CODE (tree010) == INTEGER_CST
10199 && 0 == compare_tree_int (tree010,
10201 (TREE_TYPE (TREE_OPERAND
10203 && operand_equal_p (tree11, tree011, 0))
10204 return fold_convert_loc
10206 build2 ((code0 != LSHIFT_EXPR
10209 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10210 TREE_OPERAND (arg0, 0), tree11));
10216 /* In most languages, can't associate operations on floats through
10217 parentheses. Rather than remember where the parentheses were, we
10218 don't associate floats at all, unless the user has specified
10219 -fassociative-math.
10220 And, we need to make sure type is not saturating. */
10222 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10223 && !TYPE_SATURATING (type))
10225 tree var0, con0, lit0, minus_lit0;
10226 tree var1, con1, lit1, minus_lit1;
10230 /* Split both trees into variables, constants, and literals. Then
10231 associate each group together, the constants with literals,
10232 then the result with variables. This increases the chances of
10233 literals being recombined later and of generating relocatable
10234 expressions for the sum of a constant and literal. */
10235 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10236 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10237 code == MINUS_EXPR);
10239 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10240 if (code == MINUS_EXPR)
10243 /* With undefined overflow prefer doing association in a type
10244 which wraps on overflow, if that is one of the operand types. */
10245 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10246 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10248 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10249 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10250 atype = TREE_TYPE (arg0);
10251 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10252 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10253 atype = TREE_TYPE (arg1);
10254 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10257 /* With undefined overflow we can only associate constants with one
10258 variable, and constants whose association doesn't overflow. */
10259 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10260 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10267 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10268 tmp0 = TREE_OPERAND (tmp0, 0);
10269 if (CONVERT_EXPR_P (tmp0)
10270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10271 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10272 <= TYPE_PRECISION (atype)))
10273 tmp0 = TREE_OPERAND (tmp0, 0);
10274 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10275 tmp1 = TREE_OPERAND (tmp1, 0);
10276 if (CONVERT_EXPR_P (tmp1)
10277 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10278 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10279 <= TYPE_PRECISION (atype)))
10280 tmp1 = TREE_OPERAND (tmp1, 0);
10281 /* The only case we can still associate with two variables
10282 is if they are the same, modulo negation and bit-pattern
10283 preserving conversions. */
10284 if (!operand_equal_p (tmp0, tmp1, 0))
10289 /* Only do something if we found more than two objects. Otherwise,
10290 nothing has changed and we risk infinite recursion. */
10292 && (2 < ((var0 != 0) + (var1 != 0)
10293 + (con0 != 0) + (con1 != 0)
10294 + (lit0 != 0) + (lit1 != 0)
10295 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10297 bool any_overflows = false;
10298 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10299 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10300 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10301 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10302 var0 = associate_trees (loc, var0, var1, code, atype);
10303 con0 = associate_trees (loc, con0, con1, code, atype);
10304 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10305 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10308 /* Preserve the MINUS_EXPR if the negative part of the literal is
10309 greater than the positive part. Otherwise, the multiplicative
10310 folding code (i.e extract_muldiv) may be fooled in case
10311 unsigned constants are subtracted, like in the following
10312 example: ((X*2 + 4) - 8U)/2. */
10313 if (minus_lit0 && lit0)
10315 if (TREE_CODE (lit0) == INTEGER_CST
10316 && TREE_CODE (minus_lit0) == INTEGER_CST
10317 && tree_int_cst_lt (lit0, minus_lit0))
10319 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10320 MINUS_EXPR, atype);
10325 lit0 = associate_trees (loc, lit0, minus_lit0,
10326 MINUS_EXPR, atype);
10331 /* Don't introduce overflows through reassociation. */
10333 && ((lit0 && TREE_OVERFLOW (lit0))
10334 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10341 fold_convert_loc (loc, type,
10342 associate_trees (loc, var0, minus_lit0,
10343 MINUS_EXPR, atype));
10346 con0 = associate_trees (loc, con0, minus_lit0,
10347 MINUS_EXPR, atype);
10349 fold_convert_loc (loc, type,
10350 associate_trees (loc, var0, con0,
10351 PLUS_EXPR, atype));
10355 con0 = associate_trees (loc, con0, lit0, code, atype);
10357 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10365 /* Pointer simplifications for subtraction, simple reassociations. */
10366 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10368 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10369 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10370 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10372 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10373 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10374 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10375 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10376 return fold_build2_loc (loc, PLUS_EXPR, type,
10377 fold_build2_loc (loc, MINUS_EXPR, type,
10379 fold_build2_loc (loc, MINUS_EXPR, type,
10382 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10383 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10385 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10386 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10387 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10388 fold_convert_loc (loc, type, arg1));
10390 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10392 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10394 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10396 tree arg10 = fold_convert_loc (loc, type,
10397 TREE_OPERAND (arg1, 0));
10398 tree arg11 = fold_convert_loc (loc, type,
10399 TREE_OPERAND (arg1, 1));
10400 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10401 fold_convert_loc (loc, type, arg0),
10404 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10407 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10408 if (TREE_CODE (arg0) == NEGATE_EXPR
10409 && negate_expr_p (arg1)
10410 && reorder_operands_p (arg0, arg1))
10411 return fold_build2_loc (loc, MINUS_EXPR, type,
10412 fold_convert_loc (loc, type,
10413 negate_expr (arg1)),
10414 fold_convert_loc (loc, type,
10415 TREE_OPERAND (arg0, 0)));
10416 /* Convert -A - 1 to ~A. */
10417 if (TREE_CODE (arg0) == NEGATE_EXPR
10418 && integer_each_onep (arg1)
10419 && !TYPE_OVERFLOW_TRAPS (type))
10420 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10421 fold_convert_loc (loc, type,
10422 TREE_OPERAND (arg0, 0)));
10424 /* Convert -1 - A to ~A. */
10425 if (TREE_CODE (type) != COMPLEX_TYPE
10426 && integer_all_onesp (arg0))
10427 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10430 /* X - (X / Y) * Y is X % Y. */
10431 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10432 && TREE_CODE (arg1) == MULT_EXPR
10433 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10434 && operand_equal_p (arg0,
10435 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10436 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10437 TREE_OPERAND (arg1, 1), 0))
10439 fold_convert_loc (loc, type,
10440 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10441 arg0, TREE_OPERAND (arg1, 1)));
10443 if (! FLOAT_TYPE_P (type))
10445 if (integer_zerop (arg0))
10446 return negate_expr (fold_convert_loc (loc, type, arg1));
10448 /* Fold A - (A & B) into ~B & A. */
10449 if (!TREE_SIDE_EFFECTS (arg0)
10450 && TREE_CODE (arg1) == BIT_AND_EXPR)
10452 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10454 tree arg10 = fold_convert_loc (loc, type,
10455 TREE_OPERAND (arg1, 0));
10456 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10457 fold_build1_loc (loc, BIT_NOT_EXPR,
10459 fold_convert_loc (loc, type, arg0));
10461 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10463 tree arg11 = fold_convert_loc (loc,
10464 type, TREE_OPERAND (arg1, 1));
10465 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10466 fold_build1_loc (loc, BIT_NOT_EXPR,
10468 fold_convert_loc (loc, type, arg0));
10472 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10473 any power of 2 minus 1. */
10474 if (TREE_CODE (arg0) == BIT_AND_EXPR
10475 && TREE_CODE (arg1) == BIT_AND_EXPR
10476 && operand_equal_p (TREE_OPERAND (arg0, 0),
10477 TREE_OPERAND (arg1, 0), 0))
10479 tree mask0 = TREE_OPERAND (arg0, 1);
10480 tree mask1 = TREE_OPERAND (arg1, 1);
10481 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10483 if (operand_equal_p (tem, mask1, 0))
10485 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10486 TREE_OPERAND (arg0, 0), mask1);
10487 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10492 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10493 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10496 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10497 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10498 (-ARG1 + ARG0) reduces to -ARG1. */
10499 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10500 return negate_expr (fold_convert_loc (loc, type, arg1));
10502 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10503 __complex__ ( x, -y ). This is not the same for SNaNs or if
10504 signed zeros are involved. */
10505 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10506 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10507 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10509 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10510 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10511 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10512 bool arg0rz = false, arg0iz = false;
10513 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10514 || (arg0i && (arg0iz = real_zerop (arg0i))))
10516 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10517 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10518 if (arg0rz && arg1i && real_zerop (arg1i))
10520 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10522 : build1 (REALPART_EXPR, rtype, arg1));
10523 tree ip = arg0i ? arg0i
10524 : build1 (IMAGPART_EXPR, rtype, arg0);
10525 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10527 else if (arg0iz && arg1r && real_zerop (arg1r))
10529 tree rp = arg0r ? arg0r
10530 : build1 (REALPART_EXPR, rtype, arg0);
10531 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10533 : build1 (IMAGPART_EXPR, rtype, arg1));
10534 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10539 /* A - B -> A + (-B) if B is easily negatable. */
10540 if (negate_expr_p (arg1)
10541 && ((FLOAT_TYPE_P (type)
10542 /* Avoid this transformation if B is a positive REAL_CST. */
10543 && (TREE_CODE (arg1) != REAL_CST
10544 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10545 || INTEGRAL_TYPE_P (type)))
10546 return fold_build2_loc (loc, PLUS_EXPR, type,
10547 fold_convert_loc (loc, type, arg0),
10548 fold_convert_loc (loc, type,
10549 negate_expr (arg1)));
10551 /* Try folding difference of addresses. */
10553 HOST_WIDE_INT diff;
10555 if ((TREE_CODE (arg0) == ADDR_EXPR
10556 || TREE_CODE (arg1) == ADDR_EXPR)
10557 && ptr_difference_const (arg0, arg1, &diff))
10558 return build_int_cst_type (type, diff);
10561 /* Fold &a[i] - &a[j] to i-j. */
10562 if (TREE_CODE (arg0) == ADDR_EXPR
10563 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10564 && TREE_CODE (arg1) == ADDR_EXPR
10565 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10567 tree tem = fold_addr_of_array_ref_difference (loc, type,
10568 TREE_OPERAND (arg0, 0),
10569 TREE_OPERAND (arg1, 0));
10574 if (FLOAT_TYPE_P (type)
10575 && flag_unsafe_math_optimizations
10576 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10577 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10578 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10581 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10582 one. Make sure the type is not saturating and has the signedness of
10583 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10584 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10585 if ((TREE_CODE (arg0) == MULT_EXPR
10586 || TREE_CODE (arg1) == MULT_EXPR)
10587 && !TYPE_SATURATING (type)
10588 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10589 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10590 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10592 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10600 /* (-A) * (-B) -> A * B */
10601 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10602 return fold_build2_loc (loc, MULT_EXPR, type,
10603 fold_convert_loc (loc, type,
10604 TREE_OPERAND (arg0, 0)),
10605 fold_convert_loc (loc, type,
10606 negate_expr (arg1)));
10607 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10608 return fold_build2_loc (loc, MULT_EXPR, type,
10609 fold_convert_loc (loc, type,
10610 negate_expr (arg0)),
10611 fold_convert_loc (loc, type,
10612 TREE_OPERAND (arg1, 0)));
10614 if (! FLOAT_TYPE_P (type))
10616 /* Transform x * -1 into -x. Make sure to do the negation
10617 on the original operand with conversions not stripped
10618 because we can only strip non-sign-changing conversions. */
10619 if (integer_minus_onep (arg1))
10620 return fold_convert_loc (loc, type, negate_expr (op0));
10621 /* Transform x * -C into -x * C if x is easily negatable. */
10622 if (TREE_CODE (arg1) == INTEGER_CST
10623 && tree_int_cst_sgn (arg1) == -1
10624 && negate_expr_p (arg0)
10625 && (tem = negate_expr (arg1)) != arg1
10626 && !TREE_OVERFLOW (tem))
10627 return fold_build2_loc (loc, MULT_EXPR, type,
10628 fold_convert_loc (loc, type,
10629 negate_expr (arg0)),
10632 /* (a * (1 << b)) is (a << b) */
10633 if (TREE_CODE (arg1) == LSHIFT_EXPR
10634 && integer_onep (TREE_OPERAND (arg1, 0)))
10635 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10636 TREE_OPERAND (arg1, 1));
10637 if (TREE_CODE (arg0) == LSHIFT_EXPR
10638 && integer_onep (TREE_OPERAND (arg0, 0)))
10639 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10640 TREE_OPERAND (arg0, 1));
10642 /* (A + A) * C -> A * 2 * C */
10643 if (TREE_CODE (arg0) == PLUS_EXPR
10644 && TREE_CODE (arg1) == INTEGER_CST
10645 && operand_equal_p (TREE_OPERAND (arg0, 0),
10646 TREE_OPERAND (arg0, 1), 0))
10647 return fold_build2_loc (loc, MULT_EXPR, type,
10648 omit_one_operand_loc (loc, type,
10649 TREE_OPERAND (arg0, 0),
10650 TREE_OPERAND (arg0, 1)),
10651 fold_build2_loc (loc, MULT_EXPR, type,
10652 build_int_cst (type, 2) , arg1));
10654 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10655 sign-changing only. */
10656 if (TREE_CODE (arg1) == INTEGER_CST
10657 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10658 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10659 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10661 strict_overflow_p = false;
10662 if (TREE_CODE (arg1) == INTEGER_CST
10663 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10664 &strict_overflow_p)))
10666 if (strict_overflow_p)
10667 fold_overflow_warning (("assuming signed overflow does not "
10668 "occur when simplifying "
10670 WARN_STRICT_OVERFLOW_MISC);
10671 return fold_convert_loc (loc, type, tem);
10674 /* Optimize z * conj(z) for integer complex numbers. */
10675 if (TREE_CODE (arg0) == CONJ_EXPR
10676 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10677 return fold_mult_zconjz (loc, type, arg1);
10678 if (TREE_CODE (arg1) == CONJ_EXPR
10679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10680 return fold_mult_zconjz (loc, type, arg0);
10684 /* Maybe fold x * 0 to 0. The expressions aren't the same
10685 when x is NaN, since x * 0 is also NaN. Nor are they the
10686 same in modes with signed zeros, since multiplying a
10687 negative value by 0 gives -0, not +0. */
10688 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10689 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10690 && real_zerop (arg1))
10691 return omit_one_operand_loc (loc, type, arg1, arg0);
10692 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10693 Likewise for complex arithmetic with signed zeros. */
10694 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10695 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10696 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10697 && real_onep (arg1))
10698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10700 /* Transform x * -1.0 into -x. */
10701 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10702 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10703 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10704 && real_minus_onep (arg1))
10705 return fold_convert_loc (loc, type, negate_expr (arg0));
10707 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10708 the result for floating point types due to rounding so it is applied
10709 only if -fassociative-math was specify. */
10710 if (flag_associative_math
10711 && TREE_CODE (arg0) == RDIV_EXPR
10712 && TREE_CODE (arg1) == REAL_CST
10713 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10715 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10718 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10719 TREE_OPERAND (arg0, 1));
10722 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10723 if (operand_equal_p (arg0, arg1, 0))
10725 tree tem = fold_strip_sign_ops (arg0);
10726 if (tem != NULL_TREE)
10728 tem = fold_convert_loc (loc, type, tem);
10729 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10733 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10734 This is not the same for NaNs or if signed zeros are
10736 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10737 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10738 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10739 && TREE_CODE (arg1) == COMPLEX_CST
10740 && real_zerop (TREE_REALPART (arg1)))
10742 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10743 if (real_onep (TREE_IMAGPART (arg1)))
10745 fold_build2_loc (loc, COMPLEX_EXPR, type,
10746 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10748 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10749 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10751 fold_build2_loc (loc, COMPLEX_EXPR, type,
10752 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10753 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10757 /* Optimize z * conj(z) for floating point complex numbers.
10758 Guarded by flag_unsafe_math_optimizations as non-finite
10759 imaginary components don't produce scalar results. */
10760 if (flag_unsafe_math_optimizations
10761 && TREE_CODE (arg0) == CONJ_EXPR
10762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10763 return fold_mult_zconjz (loc, type, arg1);
10764 if (flag_unsafe_math_optimizations
10765 && TREE_CODE (arg1) == CONJ_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10767 return fold_mult_zconjz (loc, type, arg0);
10769 if (flag_unsafe_math_optimizations)
10771 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10772 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10774 /* Optimizations of root(...)*root(...). */
10775 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10778 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10779 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10781 /* Optimize sqrt(x)*sqrt(x) as x. */
10782 if (BUILTIN_SQRT_P (fcode0)
10783 && operand_equal_p (arg00, arg10, 0)
10784 && ! HONOR_SNANS (TYPE_MODE (type)))
10787 /* Optimize root(x)*root(y) as root(x*y). */
10788 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10789 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10790 return build_call_expr_loc (loc, rootfn, 1, arg);
10793 /* Optimize expN(x)*expN(y) as expN(x+y). */
10794 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10796 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10797 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10798 CALL_EXPR_ARG (arg0, 0),
10799 CALL_EXPR_ARG (arg1, 0));
10800 return build_call_expr_loc (loc, expfn, 1, arg);
10803 /* Optimizations of pow(...)*pow(...). */
10804 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10805 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10806 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10808 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10809 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10810 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10811 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10813 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10814 if (operand_equal_p (arg01, arg11, 0))
10816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10817 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10819 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10822 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10823 if (operand_equal_p (arg00, arg10, 0))
10825 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10826 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10828 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10832 /* Optimize tan(x)*cos(x) as sin(x). */
10833 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10834 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10835 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10836 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10837 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10838 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10839 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10840 CALL_EXPR_ARG (arg1, 0), 0))
10842 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10844 if (sinfn != NULL_TREE)
10845 return build_call_expr_loc (loc, sinfn, 1,
10846 CALL_EXPR_ARG (arg0, 0));
10849 /* Optimize x*pow(x,c) as pow(x,c+1). */
10850 if (fcode1 == BUILT_IN_POW
10851 || fcode1 == BUILT_IN_POWF
10852 || fcode1 == BUILT_IN_POWL)
10854 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10855 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10856 if (TREE_CODE (arg11) == REAL_CST
10857 && !TREE_OVERFLOW (arg11)
10858 && operand_equal_p (arg0, arg10, 0))
10860 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10864 c = TREE_REAL_CST (arg11);
10865 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10866 arg = build_real (type, c);
10867 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10871 /* Optimize pow(x,c)*x as pow(x,c+1). */
10872 if (fcode0 == BUILT_IN_POW
10873 || fcode0 == BUILT_IN_POWF
10874 || fcode0 == BUILT_IN_POWL)
10876 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10877 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10878 if (TREE_CODE (arg01) == REAL_CST
10879 && !TREE_OVERFLOW (arg01)
10880 && operand_equal_p (arg1, arg00, 0))
10882 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10886 c = TREE_REAL_CST (arg01);
10887 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10888 arg = build_real (type, c);
10889 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10893 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10894 if (!in_gimple_form
10896 && operand_equal_p (arg0, arg1, 0))
10898 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10902 tree arg = build_real (type, dconst2);
10903 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10912 /* ~X | X is -1. */
10913 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10914 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10916 t1 = build_zero_cst (type);
10917 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10918 return omit_one_operand_loc (loc, type, t1, arg1);
10921 /* X | ~X is -1. */
10922 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10923 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10925 t1 = build_zero_cst (type);
10926 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10927 return omit_one_operand_loc (loc, type, t1, arg0);
10930 /* Canonicalize (X & C1) | C2. */
10931 if (TREE_CODE (arg0) == BIT_AND_EXPR
10932 && TREE_CODE (arg1) == INTEGER_CST
10933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10935 int width = TYPE_PRECISION (type), w;
10936 wide_int c1 = TREE_OPERAND (arg0, 1);
10937 wide_int c2 = arg1;
10939 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10940 if ((c1 & c2) == c1)
10941 return omit_one_operand_loc (loc, type, arg1,
10942 TREE_OPERAND (arg0, 0));
10944 wide_int msk = wi::mask (width, false,
10945 TYPE_PRECISION (TREE_TYPE (arg1)));
10947 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10948 if (msk.and_not (c1 | c2) == 0)
10949 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10950 TREE_OPERAND (arg0, 0), arg1);
10952 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10953 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10954 mode which allows further optimizations. */
10957 wide_int c3 = c1.and_not (c2);
10958 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10960 wide_int mask = wi::mask (w, false,
10961 TYPE_PRECISION (type));
10962 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10970 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10971 fold_build2_loc (loc, BIT_AND_EXPR, type,
10972 TREE_OPERAND (arg0, 0),
10973 wide_int_to_tree (type,
10978 /* (X & ~Y) | (~X & Y) is X ^ Y */
10979 if (TREE_CODE (arg0) == BIT_AND_EXPR
10980 && TREE_CODE (arg1) == BIT_AND_EXPR)
10982 tree a0, a1, l0, l1, n0, n1;
10984 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10985 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10987 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10988 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10990 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10991 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10993 if ((operand_equal_p (n0, a0, 0)
10994 && operand_equal_p (n1, a1, 0))
10995 || (operand_equal_p (n0, a1, 0)
10996 && operand_equal_p (n1, a0, 0)))
10997 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11000 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11001 if (t1 != NULL_TREE)
11004 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11006 This results in more efficient code for machines without a NAND
11007 instruction. Combine will canonicalize to the first form
11008 which will allow use of NAND instructions provided by the
11009 backend if they exist. */
11010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11011 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11014 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11015 build2 (BIT_AND_EXPR, type,
11016 fold_convert_loc (loc, type,
11017 TREE_OPERAND (arg0, 0)),
11018 fold_convert_loc (loc, type,
11019 TREE_OPERAND (arg1, 0))));
11022 /* See if this can be simplified into a rotate first. If that
11023 is unsuccessful continue in the association code. */
11027 /* ~X ^ X is -1. */
11028 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11029 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11031 t1 = build_zero_cst (type);
11032 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11033 return omit_one_operand_loc (loc, type, t1, arg1);
11036 /* X ^ ~X is -1. */
11037 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11038 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11040 t1 = build_zero_cst (type);
11041 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11042 return omit_one_operand_loc (loc, type, t1, arg0);
11045 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11046 with a constant, and the two constants have no bits in common,
11047 we should treat this as a BIT_IOR_EXPR since this may produce more
11048 simplifications. */
11049 if (TREE_CODE (arg0) == BIT_AND_EXPR
11050 && TREE_CODE (arg1) == BIT_AND_EXPR
11051 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11052 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11053 && wi::bit_and (TREE_OPERAND (arg0, 1),
11054 TREE_OPERAND (arg1, 1)) == 0)
11056 code = BIT_IOR_EXPR;
11060 /* (X | Y) ^ X -> Y & ~ X*/
11061 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11064 tree t2 = TREE_OPERAND (arg0, 1);
11065 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11067 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11068 fold_convert_loc (loc, type, t2),
11069 fold_convert_loc (loc, type, t1));
11073 /* (Y | X) ^ X -> Y & ~ X*/
11074 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11077 tree t2 = TREE_OPERAND (arg0, 0);
11078 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11080 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11081 fold_convert_loc (loc, type, t2),
11082 fold_convert_loc (loc, type, t1));
11086 /* X ^ (X | Y) -> Y & ~ X*/
11087 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11088 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11090 tree t2 = TREE_OPERAND (arg1, 1);
11091 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11093 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11094 fold_convert_loc (loc, type, t2),
11095 fold_convert_loc (loc, type, t1));
11099 /* X ^ (Y | X) -> Y & ~ X*/
11100 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11101 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11103 tree t2 = TREE_OPERAND (arg1, 0);
11104 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11106 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11107 fold_convert_loc (loc, type, t2),
11108 fold_convert_loc (loc, type, t1));
11112 /* Convert ~X ^ ~Y to X ^ Y. */
11113 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11114 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11115 return fold_build2_loc (loc, code, type,
11116 fold_convert_loc (loc, type,
11117 TREE_OPERAND (arg0, 0)),
11118 fold_convert_loc (loc, type,
11119 TREE_OPERAND (arg1, 0)));
11121 /* Convert ~X ^ C to X ^ ~C. */
11122 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11123 && TREE_CODE (arg1) == INTEGER_CST)
11124 return fold_build2_loc (loc, code, type,
11125 fold_convert_loc (loc, type,
11126 TREE_OPERAND (arg0, 0)),
11127 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11129 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11130 if (TREE_CODE (arg0) == BIT_AND_EXPR
11131 && INTEGRAL_TYPE_P (type)
11132 && integer_onep (TREE_OPERAND (arg0, 1))
11133 && integer_onep (arg1))
11134 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11135 build_zero_cst (TREE_TYPE (arg0)));
11137 /* Fold (X & Y) ^ Y as ~X & Y. */
11138 if (TREE_CODE (arg0) == BIT_AND_EXPR
11139 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11141 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11142 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11143 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11144 fold_convert_loc (loc, type, arg1));
11146 /* Fold (X & Y) ^ X as ~Y & X. */
11147 if (TREE_CODE (arg0) == BIT_AND_EXPR
11148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11149 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11151 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11152 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11153 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11154 fold_convert_loc (loc, type, arg1));
11156 /* Fold X ^ (X & Y) as X & ~Y. */
11157 if (TREE_CODE (arg1) == BIT_AND_EXPR
11158 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11160 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11161 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11162 fold_convert_loc (loc, type, arg0),
11163 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11165 /* Fold X ^ (Y & X) as ~Y & X. */
11166 if (TREE_CODE (arg1) == BIT_AND_EXPR
11167 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11168 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11170 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11171 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11172 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11173 fold_convert_loc (loc, type, arg0));
11176 /* See if this can be simplified into a rotate first. If that
11177 is unsuccessful continue in the association code. */
11181 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11182 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11183 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11184 || (TREE_CODE (arg0) == EQ_EXPR
11185 && integer_zerop (TREE_OPERAND (arg0, 1))))
11186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11187 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11189 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11190 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11191 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11192 || (TREE_CODE (arg1) == EQ_EXPR
11193 && integer_zerop (TREE_OPERAND (arg1, 1))))
11194 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11195 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11197 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11198 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11199 && INTEGRAL_TYPE_P (type)
11200 && integer_onep (TREE_OPERAND (arg0, 1))
11201 && integer_onep (arg1))
11204 tem = TREE_OPERAND (arg0, 0);
11205 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11206 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11208 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11209 build_zero_cst (TREE_TYPE (tem)));
11211 /* Fold ~X & 1 as (X & 1) == 0. */
11212 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11213 && INTEGRAL_TYPE_P (type)
11214 && integer_onep (arg1))
11217 tem = TREE_OPERAND (arg0, 0);
11218 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11219 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11221 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11222 build_zero_cst (TREE_TYPE (tem)));
11224 /* Fold !X & 1 as X == 0. */
11225 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11226 && integer_onep (arg1))
11228 tem = TREE_OPERAND (arg0, 0);
11229 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11230 build_zero_cst (TREE_TYPE (tem)));
11233 /* Fold (X ^ Y) & Y as ~X & Y. */
11234 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11235 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11237 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11238 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11239 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11240 fold_convert_loc (loc, type, arg1));
11242 /* Fold (X ^ Y) & X as ~Y & X. */
11243 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11244 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11245 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11247 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11248 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11249 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11250 fold_convert_loc (loc, type, arg1));
11252 /* Fold X & (X ^ Y) as X & ~Y. */
11253 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11254 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11256 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11257 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11258 fold_convert_loc (loc, type, arg0),
11259 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11261 /* Fold X & (Y ^ X) as ~Y & X. */
11262 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11264 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11266 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11267 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11268 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11269 fold_convert_loc (loc, type, arg0));
11272 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11273 multiple of 1 << CST. */
11274 if (TREE_CODE (arg1) == INTEGER_CST)
11276 wide_int cst1 = arg1;
11277 wide_int ncst1 = -cst1;
11278 if ((cst1 & ncst1) == ncst1
11279 && multiple_of_p (type, arg0,
11280 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11281 return fold_convert_loc (loc, type, arg0);
11284 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11286 if (TREE_CODE (arg1) == INTEGER_CST
11287 && TREE_CODE (arg0) == MULT_EXPR
11288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11290 wide_int warg1 = arg1;
11291 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11294 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11296 else if (masked != warg1)
11298 /* Avoid the transform if arg1 is a mask of some
11299 mode which allows further optimizations. */
11300 int pop = wi::popcount (warg1);
11301 if (!(pop >= BITS_PER_UNIT
11302 && exact_log2 (pop) != -1
11303 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11304 return fold_build2_loc (loc, code, type, op0,
11305 wide_int_to_tree (type, masked));
11309 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11310 ((A & N) + B) & M -> (A + B) & M
11311 Similarly if (N & M) == 0,
11312 ((A | N) + B) & M -> (A + B) & M
11313 and for - instead of + (or unary - instead of +)
11314 and/or ^ instead of |.
11315 If B is constant and (B & M) == 0, fold into A & M. */
11316 if (TREE_CODE (arg1) == INTEGER_CST)
11318 wide_int cst1 = arg1;
11319 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11320 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11321 && (TREE_CODE (arg0) == PLUS_EXPR
11322 || TREE_CODE (arg0) == MINUS_EXPR
11323 || TREE_CODE (arg0) == NEGATE_EXPR)
11324 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11325 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11331 /* Now we know that arg0 is (C + D) or (C - D) or
11332 -C and arg1 (M) is == (1LL << cst) - 1.
11333 Store C into PMOP[0] and D into PMOP[1]. */
11334 pmop[0] = TREE_OPERAND (arg0, 0);
11336 if (TREE_CODE (arg0) != NEGATE_EXPR)
11338 pmop[1] = TREE_OPERAND (arg0, 1);
11342 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11345 for (; which >= 0; which--)
11346 switch (TREE_CODE (pmop[which]))
11351 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11354 cst0 = TREE_OPERAND (pmop[which], 1);
11356 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11361 else if (cst0 != 0)
11363 /* If C or D is of the form (A & N) where
11364 (N & M) == M, or of the form (A | N) or
11365 (A ^ N) where (N & M) == 0, replace it with A. */
11366 pmop[which] = TREE_OPERAND (pmop[which], 0);
11369 /* If C or D is a N where (N & M) == 0, it can be
11370 omitted (assumed 0). */
11371 if ((TREE_CODE (arg0) == PLUS_EXPR
11372 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11373 && (cst1 & pmop[which]) == 0)
11374 pmop[which] = NULL;
11380 /* Only build anything new if we optimized one or both arguments
11382 if (pmop[0] != TREE_OPERAND (arg0, 0)
11383 || (TREE_CODE (arg0) != NEGATE_EXPR
11384 && pmop[1] != TREE_OPERAND (arg0, 1)))
11386 tree utype = TREE_TYPE (arg0);
11387 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11389 /* Perform the operations in a type that has defined
11390 overflow behavior. */
11391 utype = unsigned_type_for (TREE_TYPE (arg0));
11392 if (pmop[0] != NULL)
11393 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11394 if (pmop[1] != NULL)
11395 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11398 if (TREE_CODE (arg0) == NEGATE_EXPR)
11399 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11400 else if (TREE_CODE (arg0) == PLUS_EXPR)
11402 if (pmop[0] != NULL && pmop[1] != NULL)
11403 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11405 else if (pmop[0] != NULL)
11407 else if (pmop[1] != NULL)
11410 return build_int_cst (type, 0);
11412 else if (pmop[0] == NULL)
11413 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11415 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11417 /* TEM is now the new binary +, - or unary - replacement. */
11418 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11419 fold_convert_loc (loc, utype, arg1));
11420 return fold_convert_loc (loc, type, tem);
11425 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11426 if (t1 != NULL_TREE)
11428 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11429 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11430 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11432 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11434 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11437 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11440 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11442 This results in more efficient code for machines without a NOR
11443 instruction. Combine will canonicalize to the first form
11444 which will allow use of NOR instructions provided by the
11445 backend if they exist. */
11446 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11447 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11449 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11450 build2 (BIT_IOR_EXPR, type,
11451 fold_convert_loc (loc, type,
11452 TREE_OPERAND (arg0, 0)),
11453 fold_convert_loc (loc, type,
11454 TREE_OPERAND (arg1, 0))));
11457 /* If arg0 is derived from the address of an object or function, we may
11458 be able to fold this expression using the object or function's
11460 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11462 unsigned HOST_WIDE_INT modulus, residue;
11463 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11465 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11466 integer_onep (arg1));
11468 /* This works because modulus is a power of 2. If this weren't the
11469 case, we'd have to replace it by its greatest power-of-2
11470 divisor: modulus & -modulus. */
11472 return build_int_cst (type, residue & low);
11475 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11476 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11477 if the new mask might be further optimized. */
11478 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11479 || TREE_CODE (arg0) == RSHIFT_EXPR)
11480 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11481 && TREE_CODE (arg1) == INTEGER_CST
11482 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11483 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11484 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11485 < TYPE_PRECISION (TREE_TYPE (arg0))))
11487 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11488 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11489 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11490 tree shift_type = TREE_TYPE (arg0);
11492 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11493 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11494 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11495 && TYPE_PRECISION (TREE_TYPE (arg0))
11496 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11498 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11499 tree arg00 = TREE_OPERAND (arg0, 0);
11500 /* See if more bits can be proven as zero because of
11502 if (TREE_CODE (arg00) == NOP_EXPR
11503 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11505 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11506 if (TYPE_PRECISION (inner_type)
11507 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11508 && TYPE_PRECISION (inner_type) < prec)
11510 prec = TYPE_PRECISION (inner_type);
11511 /* See if we can shorten the right shift. */
11513 shift_type = inner_type;
11514 /* Otherwise X >> C1 is all zeros, so we'll optimize
11515 it into (X, 0) later on by making sure zerobits
11519 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11522 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11523 zerobits <<= prec - shiftc;
11525 /* For arithmetic shift if sign bit could be set, zerobits
11526 can contain actually sign bits, so no transformation is
11527 possible, unless MASK masks them all away. In that
11528 case the shift needs to be converted into logical shift. */
11529 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11530 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11532 if ((mask & zerobits) == 0)
11533 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11539 /* ((X << 16) & 0xff00) is (X, 0). */
11540 if ((mask & zerobits) == mask)
11541 return omit_one_operand_loc (loc, type,
11542 build_int_cst (type, 0), arg0);
11544 newmask = mask | zerobits;
11545 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11547 /* Only do the transformation if NEWMASK is some integer
11549 for (prec = BITS_PER_UNIT;
11550 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11551 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11553 if (prec < HOST_BITS_PER_WIDE_INT
11554 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11558 if (shift_type != TREE_TYPE (arg0))
11560 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11561 fold_convert_loc (loc, shift_type,
11562 TREE_OPERAND (arg0, 0)),
11563 TREE_OPERAND (arg0, 1));
11564 tem = fold_convert_loc (loc, type, tem);
11568 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11569 if (!tree_int_cst_equal (newmaskt, arg1))
11570 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11578 /* Don't touch a floating-point divide by zero unless the mode
11579 of the constant can represent infinity. */
11580 if (TREE_CODE (arg1) == REAL_CST
11581 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11582 && real_zerop (arg1))
11585 /* Optimize A / A to 1.0 if we don't care about
11586 NaNs or Infinities. Skip the transformation
11587 for non-real operands. */
11588 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11589 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11590 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11591 && operand_equal_p (arg0, arg1, 0))
11593 tree r = build_real (TREE_TYPE (arg0), dconst1);
11595 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11598 /* The complex version of the above A / A optimization. */
11599 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11600 && operand_equal_p (arg0, arg1, 0))
11602 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11603 if (! HONOR_NANS (TYPE_MODE (elem_type))
11604 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11606 tree r = build_real (elem_type, dconst1);
11607 /* omit_two_operands will call fold_convert for us. */
11608 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11612 /* (-A) / (-B) -> A / B */
11613 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11614 return fold_build2_loc (loc, RDIV_EXPR, type,
11615 TREE_OPERAND (arg0, 0),
11616 negate_expr (arg1));
11617 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11618 return fold_build2_loc (loc, RDIV_EXPR, type,
11619 negate_expr (arg0),
11620 TREE_OPERAND (arg1, 0));
11622 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11623 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11624 && real_onep (arg1))
11625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11627 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11628 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11629 && real_minus_onep (arg1))
11630 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11631 negate_expr (arg0)));
11633 /* If ARG1 is a constant, we can convert this to a multiply by the
11634 reciprocal. This does not have the same rounding properties,
11635 so only do this if -freciprocal-math. We can actually
11636 always safely do it if ARG1 is a power of two, but it's hard to
11637 tell if it is or not in a portable manner. */
11639 && (TREE_CODE (arg1) == REAL_CST
11640 || (TREE_CODE (arg1) == COMPLEX_CST
11641 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11642 || (TREE_CODE (arg1) == VECTOR_CST
11643 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11645 if (flag_reciprocal_math
11646 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11647 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11648 /* Find the reciprocal if optimizing and the result is exact.
11649 TODO: Complex reciprocal not implemented. */
11650 if (TREE_CODE (arg1) != COMPLEX_CST)
11652 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11655 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11658 /* Convert A/B/C to A/(B*C). */
11659 if (flag_reciprocal_math
11660 && TREE_CODE (arg0) == RDIV_EXPR)
11661 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11662 fold_build2_loc (loc, MULT_EXPR, type,
11663 TREE_OPERAND (arg0, 1), arg1));
11665 /* Convert A/(B/C) to (A/B)*C. */
11666 if (flag_reciprocal_math
11667 && TREE_CODE (arg1) == RDIV_EXPR)
11668 return fold_build2_loc (loc, MULT_EXPR, type,
11669 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11670 TREE_OPERAND (arg1, 0)),
11671 TREE_OPERAND (arg1, 1));
11673 /* Convert C1/(X*C2) into (C1/C2)/X. */
11674 if (flag_reciprocal_math
11675 && TREE_CODE (arg1) == MULT_EXPR
11676 && TREE_CODE (arg0) == REAL_CST
11677 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11679 tree tem = const_binop (RDIV_EXPR, arg0,
11680 TREE_OPERAND (arg1, 1));
11682 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11683 TREE_OPERAND (arg1, 0));
11686 if (flag_unsafe_math_optimizations)
11688 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11689 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11691 /* Optimize sin(x)/cos(x) as tan(x). */
11692 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11693 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11694 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11695 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11696 CALL_EXPR_ARG (arg1, 0), 0))
11698 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11700 if (tanfn != NULL_TREE)
11701 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11704 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11705 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11706 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11707 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11708 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11709 CALL_EXPR_ARG (arg1, 0), 0))
11711 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11713 if (tanfn != NULL_TREE)
11715 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11716 CALL_EXPR_ARG (arg0, 0));
11717 return fold_build2_loc (loc, RDIV_EXPR, type,
11718 build_real (type, dconst1), tmp);
11722 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11723 NaNs or Infinities. */
11724 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11725 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11726 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11728 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11729 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11731 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11732 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11733 && operand_equal_p (arg00, arg01, 0))
11735 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11737 if (cosfn != NULL_TREE)
11738 return build_call_expr_loc (loc, cosfn, 1, arg00);
11742 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11743 NaNs or Infinities. */
11744 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11745 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11746 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11748 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11749 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11751 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11752 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11753 && operand_equal_p (arg00, arg01, 0))
11755 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11757 if (cosfn != NULL_TREE)
11759 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11760 return fold_build2_loc (loc, RDIV_EXPR, type,
11761 build_real (type, dconst1),
11767 /* Optimize pow(x,c)/x as pow(x,c-1). */
11768 if (fcode0 == BUILT_IN_POW
11769 || fcode0 == BUILT_IN_POWF
11770 || fcode0 == BUILT_IN_POWL)
11772 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11773 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11774 if (TREE_CODE (arg01) == REAL_CST
11775 && !TREE_OVERFLOW (arg01)
11776 && operand_equal_p (arg1, arg00, 0))
11778 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11782 c = TREE_REAL_CST (arg01);
11783 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11784 arg = build_real (type, c);
11785 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11789 /* Optimize a/root(b/c) into a*root(c/b). */
11790 if (BUILTIN_ROOT_P (fcode1))
11792 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11794 if (TREE_CODE (rootarg) == RDIV_EXPR)
11796 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11797 tree b = TREE_OPERAND (rootarg, 0);
11798 tree c = TREE_OPERAND (rootarg, 1);
11800 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11802 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11803 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11807 /* Optimize x/expN(y) into x*expN(-y). */
11808 if (BUILTIN_EXPONENT_P (fcode1))
11810 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11811 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11812 arg1 = build_call_expr_loc (loc,
11814 fold_convert_loc (loc, type, arg));
11815 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11818 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11819 if (fcode1 == BUILT_IN_POW
11820 || fcode1 == BUILT_IN_POWF
11821 || fcode1 == BUILT_IN_POWL)
11823 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11824 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11825 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11826 tree neg11 = fold_convert_loc (loc, type,
11827 negate_expr (arg11));
11828 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11829 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11834 case TRUNC_DIV_EXPR:
11835 /* Optimize (X & (-A)) / A where A is a power of 2,
11837 if (TREE_CODE (arg0) == BIT_AND_EXPR
11838 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11839 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11841 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11842 arg1, TREE_OPERAND (arg0, 1));
11843 if (sum && integer_zerop (sum)) {
11844 tree pow2 = build_int_cst (integer_type_node,
11845 wi::exact_log2 (arg1));
11846 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11847 TREE_OPERAND (arg0, 0), pow2);
11853 case FLOOR_DIV_EXPR:
11854 /* Simplify A / (B << N) where A and B are positive and B is
11855 a power of 2, to A >> (N + log2(B)). */
11856 strict_overflow_p = false;
11857 if (TREE_CODE (arg1) == LSHIFT_EXPR
11858 && (TYPE_UNSIGNED (type)
11859 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11861 tree sval = TREE_OPERAND (arg1, 0);
11862 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11864 tree sh_cnt = TREE_OPERAND (arg1, 1);
11865 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11866 wi::exact_log2 (sval));
11868 if (strict_overflow_p)
11869 fold_overflow_warning (("assuming signed overflow does not "
11870 "occur when simplifying A / (B << N)"),
11871 WARN_STRICT_OVERFLOW_MISC);
11873 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11875 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11876 fold_convert_loc (loc, type, arg0), sh_cnt);
11880 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11881 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11882 if (INTEGRAL_TYPE_P (type)
11883 && TYPE_UNSIGNED (type)
11884 && code == FLOOR_DIV_EXPR)
11885 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11889 case ROUND_DIV_EXPR:
11890 case CEIL_DIV_EXPR:
11891 case EXACT_DIV_EXPR:
11892 if (integer_zerop (arg1))
11894 /* X / -1 is -X. */
11895 if (!TYPE_UNSIGNED (type)
11896 && TREE_CODE (arg1) == INTEGER_CST
11897 && wi::eq_p (arg1, -1))
11898 return fold_convert_loc (loc, type, negate_expr (arg0));
11900 /* Convert -A / -B to A / B when the type is signed and overflow is
11902 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11903 && TREE_CODE (arg0) == NEGATE_EXPR
11904 && negate_expr_p (arg1))
11906 if (INTEGRAL_TYPE_P (type))
11907 fold_overflow_warning (("assuming signed overflow does not occur "
11908 "when distributing negation across "
11910 WARN_STRICT_OVERFLOW_MISC);
11911 return fold_build2_loc (loc, code, type,
11912 fold_convert_loc (loc, type,
11913 TREE_OPERAND (arg0, 0)),
11914 fold_convert_loc (loc, type,
11915 negate_expr (arg1)));
11917 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11918 && TREE_CODE (arg1) == NEGATE_EXPR
11919 && negate_expr_p (arg0))
11921 if (INTEGRAL_TYPE_P (type))
11922 fold_overflow_warning (("assuming signed overflow does not occur "
11923 "when distributing negation across "
11925 WARN_STRICT_OVERFLOW_MISC);
11926 return fold_build2_loc (loc, code, type,
11927 fold_convert_loc (loc, type,
11928 negate_expr (arg0)),
11929 fold_convert_loc (loc, type,
11930 TREE_OPERAND (arg1, 0)));
11933 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11934 operation, EXACT_DIV_EXPR.
11936 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11937 At one time others generated faster code, it's not clear if they do
11938 after the last round to changes to the DIV code in expmed.c. */
11939 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11940 && multiple_of_p (type, arg0, arg1))
11941 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11943 strict_overflow_p = false;
11944 if (TREE_CODE (arg1) == INTEGER_CST
11945 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11946 &strict_overflow_p)))
11948 if (strict_overflow_p)
11949 fold_overflow_warning (("assuming signed overflow does not occur "
11950 "when simplifying division"),
11951 WARN_STRICT_OVERFLOW_MISC);
11952 return fold_convert_loc (loc, type, tem);
11957 case CEIL_MOD_EXPR:
11958 case FLOOR_MOD_EXPR:
11959 case ROUND_MOD_EXPR:
11960 case TRUNC_MOD_EXPR:
11961 /* X % -1 is zero. */
11962 if (!TYPE_UNSIGNED (type)
11963 && TREE_CODE (arg1) == INTEGER_CST
11964 && wi::eq_p (arg1, -1))
11965 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11967 /* X % -C is the same as X % C. */
11968 if (code == TRUNC_MOD_EXPR
11969 && TYPE_SIGN (type) == SIGNED
11970 && TREE_CODE (arg1) == INTEGER_CST
11971 && !TREE_OVERFLOW (arg1)
11972 && wi::neg_p (arg1)
11973 && !TYPE_OVERFLOW_TRAPS (type)
11974 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11975 && !sign_bit_p (arg1, arg1))
11976 return fold_build2_loc (loc, code, type,
11977 fold_convert_loc (loc, type, arg0),
11978 fold_convert_loc (loc, type,
11979 negate_expr (arg1)));
11981 /* X % -Y is the same as X % Y. */
11982 if (code == TRUNC_MOD_EXPR
11983 && !TYPE_UNSIGNED (type)
11984 && TREE_CODE (arg1) == NEGATE_EXPR
11985 && !TYPE_OVERFLOW_TRAPS (type))
11986 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11987 fold_convert_loc (loc, type,
11988 TREE_OPERAND (arg1, 0)));
11990 strict_overflow_p = false;
11991 if (TREE_CODE (arg1) == INTEGER_CST
11992 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11993 &strict_overflow_p)))
11995 if (strict_overflow_p)
11996 fold_overflow_warning (("assuming signed overflow does not occur "
11997 "when simplifying modulus"),
11998 WARN_STRICT_OVERFLOW_MISC);
11999 return fold_convert_loc (loc, type, tem);
12002 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12003 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12004 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12005 && (TYPE_UNSIGNED (type)
12006 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12009 /* Also optimize A % (C << N) where C is a power of 2,
12010 to A & ((C << N) - 1). */
12011 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12012 c = TREE_OPERAND (arg1, 0);
12014 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12017 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12018 build_int_cst (TREE_TYPE (arg1), 1));
12019 if (strict_overflow_p)
12020 fold_overflow_warning (("assuming signed overflow does not "
12021 "occur when simplifying "
12022 "X % (power of two)"),
12023 WARN_STRICT_OVERFLOW_MISC);
12024 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12025 fold_convert_loc (loc, type, arg0),
12026 fold_convert_loc (loc, type, mask));
12034 if (integer_all_onesp (arg0))
12035 return omit_one_operand_loc (loc, type, arg0, arg1);
12039 /* Optimize -1 >> x for arithmetic right shifts. */
12040 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12041 && tree_expr_nonnegative_p (arg1))
12042 return omit_one_operand_loc (loc, type, arg0, arg1);
12043 /* ... fall through ... */
12047 if (integer_zerop (arg1))
12048 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12049 if (integer_zerop (arg0))
12050 return omit_one_operand_loc (loc, type, arg0, arg1);
12052 /* Prefer vector1 << scalar to vector1 << vector2
12053 if vector2 is uniform. */
12054 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12055 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12056 return fold_build2_loc (loc, code, type, op0, tem);
12058 /* Since negative shift count is not well-defined,
12059 don't try to compute it in the compiler. */
12060 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12063 prec = element_precision (type);
12065 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12066 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12067 && tree_to_uhwi (arg1) < prec
12068 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12069 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12071 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12072 + tree_to_uhwi (arg1));
12074 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12075 being well defined. */
12078 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12080 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12081 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12082 TREE_OPERAND (arg0, 0));
12087 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12088 build_int_cst (TREE_TYPE (arg1), low));
12091 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12092 into x & ((unsigned)-1 >> c) for unsigned types. */
12093 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12094 || (TYPE_UNSIGNED (type)
12095 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12096 && tree_fits_uhwi_p (arg1)
12097 && tree_to_uhwi (arg1) < prec
12098 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12099 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12101 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12102 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12108 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12110 lshift = build_minus_one_cst (type);
12111 lshift = const_binop (code, lshift, arg1);
12113 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12117 /* Rewrite an LROTATE_EXPR by a constant into an
12118 RROTATE_EXPR by a new constant. */
12119 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12121 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12122 tem = const_binop (MINUS_EXPR, tem, arg1);
12123 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12126 /* If we have a rotate of a bit operation with the rotate count and
12127 the second operand of the bit operation both constant,
12128 permute the two operations. */
12129 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12130 && (TREE_CODE (arg0) == BIT_AND_EXPR
12131 || TREE_CODE (arg0) == BIT_IOR_EXPR
12132 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12134 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12135 fold_build2_loc (loc, code, type,
12136 TREE_OPERAND (arg0, 0), arg1),
12137 fold_build2_loc (loc, code, type,
12138 TREE_OPERAND (arg0, 1), arg1));
12140 /* Two consecutive rotates adding up to the some integer
12141 multiple of the precision of the type can be ignored. */
12142 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12143 && TREE_CODE (arg0) == RROTATE_EXPR
12144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12145 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12147 return TREE_OPERAND (arg0, 0);
12149 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12150 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12151 if the latter can be further optimized. */
12152 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12153 && TREE_CODE (arg0) == BIT_AND_EXPR
12154 && TREE_CODE (arg1) == INTEGER_CST
12155 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12157 tree mask = fold_build2_loc (loc, code, type,
12158 fold_convert_loc (loc, type,
12159 TREE_OPERAND (arg0, 1)),
12161 tree shift = fold_build2_loc (loc, code, type,
12162 fold_convert_loc (loc, type,
12163 TREE_OPERAND (arg0, 0)),
12165 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12173 if (operand_equal_p (arg0, arg1, 0))
12174 return omit_one_operand_loc (loc, type, arg0, arg1);
12175 if (INTEGRAL_TYPE_P (type)
12176 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12177 return omit_one_operand_loc (loc, type, arg1, arg0);
12178 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12184 if (operand_equal_p (arg0, arg1, 0))
12185 return omit_one_operand_loc (loc, type, arg0, arg1);
12186 if (INTEGRAL_TYPE_P (type)
12187 && TYPE_MAX_VALUE (type)
12188 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12189 return omit_one_operand_loc (loc, type, arg1, arg0);
12190 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12195 case TRUTH_ANDIF_EXPR:
12196 /* Note that the operands of this must be ints
12197 and their values must be 0 or 1.
12198 ("true" is a fixed value perhaps depending on the language.) */
12199 /* If first arg is constant zero, return it. */
12200 if (integer_zerop (arg0))
12201 return fold_convert_loc (loc, type, arg0);
12202 case TRUTH_AND_EXPR:
12203 /* If either arg is constant true, drop it. */
12204 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12205 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12206 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12207 /* Preserve sequence points. */
12208 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12210 /* If second arg is constant zero, result is zero, but first arg
12211 must be evaluated. */
12212 if (integer_zerop (arg1))
12213 return omit_one_operand_loc (loc, type, arg1, arg0);
12214 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12215 case will be handled here. */
12216 if (integer_zerop (arg0))
12217 return omit_one_operand_loc (loc, type, arg0, arg1);
12219 /* !X && X is always false. */
12220 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12221 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12222 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12223 /* X && !X is always false. */
12224 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12226 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12228 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12229 means A >= Y && A != MAX, but in this case we know that
12232 if (!TREE_SIDE_EFFECTS (arg0)
12233 && !TREE_SIDE_EFFECTS (arg1))
12235 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12236 if (tem && !operand_equal_p (tem, arg0, 0))
12237 return fold_build2_loc (loc, code, type, tem, arg1);
12239 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12240 if (tem && !operand_equal_p (tem, arg1, 0))
12241 return fold_build2_loc (loc, code, type, arg0, tem);
12244 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12250 case TRUTH_ORIF_EXPR:
12251 /* Note that the operands of this must be ints
12252 and their values must be 0 or true.
12253 ("true" is a fixed value perhaps depending on the language.) */
12254 /* If first arg is constant true, return it. */
12255 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12256 return fold_convert_loc (loc, type, arg0);
12257 case TRUTH_OR_EXPR:
12258 /* If either arg is constant zero, drop it. */
12259 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12261 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12262 /* Preserve sequence points. */
12263 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12264 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12265 /* If second arg is constant true, result is true, but we must
12266 evaluate first arg. */
12267 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12268 return omit_one_operand_loc (loc, type, arg1, arg0);
12269 /* Likewise for first arg, but note this only occurs here for
12271 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12272 return omit_one_operand_loc (loc, type, arg0, arg1);
12274 /* !X || X is always true. */
12275 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12277 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12278 /* X || !X is always true. */
12279 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12281 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12283 /* (X && !Y) || (!X && Y) is X ^ Y */
12284 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12285 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12287 tree a0, a1, l0, l1, n0, n1;
12289 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12290 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12292 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12293 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12295 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12296 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12298 if ((operand_equal_p (n0, a0, 0)
12299 && operand_equal_p (n1, a1, 0))
12300 || (operand_equal_p (n0, a1, 0)
12301 && operand_equal_p (n1, a0, 0)))
12302 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12305 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12311 case TRUTH_XOR_EXPR:
12312 /* If the second arg is constant zero, drop it. */
12313 if (integer_zerop (arg1))
12314 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12315 /* If the second arg is constant true, this is a logical inversion. */
12316 if (integer_onep (arg1))
12318 tem = invert_truthvalue_loc (loc, arg0);
12319 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12321 /* Identical arguments cancel to zero. */
12322 if (operand_equal_p (arg0, arg1, 0))
12323 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12325 /* !X ^ X is always true. */
12326 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12328 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12330 /* X ^ !X is always true. */
12331 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12332 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12333 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12342 tem = fold_comparison (loc, code, type, op0, op1);
12343 if (tem != NULL_TREE)
12346 /* bool_var != 0 becomes bool_var. */
12347 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12348 && code == NE_EXPR)
12349 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12351 /* bool_var == 1 becomes bool_var. */
12352 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12353 && code == EQ_EXPR)
12354 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12356 /* bool_var != 1 becomes !bool_var. */
12357 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12358 && code == NE_EXPR)
12359 return fold_convert_loc (loc, type,
12360 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12361 TREE_TYPE (arg0), arg0));
12363 /* bool_var == 0 becomes !bool_var. */
12364 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12365 && code == EQ_EXPR)
12366 return fold_convert_loc (loc, type,
12367 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12368 TREE_TYPE (arg0), arg0));
12370 /* !exp != 0 becomes !exp */
12371 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12372 && code == NE_EXPR)
12373 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12375 /* If this is an equality comparison of the address of two non-weak,
12376 unaliased symbols neither of which are extern (since we do not
12377 have access to attributes for externs), then we know the result. */
12378 if (TREE_CODE (arg0) == ADDR_EXPR
12379 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12380 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12381 && ! lookup_attribute ("alias",
12382 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12383 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12384 && TREE_CODE (arg1) == ADDR_EXPR
12385 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12386 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12387 && ! lookup_attribute ("alias",
12388 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12389 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12391 /* We know that we're looking at the address of two
12392 non-weak, unaliased, static _DECL nodes.
12394 It is both wasteful and incorrect to call operand_equal_p
12395 to compare the two ADDR_EXPR nodes. It is wasteful in that
12396 all we need to do is test pointer equality for the arguments
12397 to the two ADDR_EXPR nodes. It is incorrect to use
12398 operand_equal_p as that function is NOT equivalent to a
12399 C equality test. It can in fact return false for two
12400 objects which would test as equal using the C equality
12402 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12403 return constant_boolean_node (equal
12404 ? code == EQ_EXPR : code != EQ_EXPR,
12408 /* Similarly for a NEGATE_EXPR. */
12409 if (TREE_CODE (arg0) == NEGATE_EXPR
12410 && TREE_CODE (arg1) == INTEGER_CST
12411 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12413 && TREE_CODE (tem) == INTEGER_CST
12414 && !TREE_OVERFLOW (tem))
12415 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12417 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12418 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12419 && TREE_CODE (arg1) == INTEGER_CST
12420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12421 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12422 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12423 fold_convert_loc (loc,
12426 TREE_OPERAND (arg0, 1)));
12428 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12429 if ((TREE_CODE (arg0) == PLUS_EXPR
12430 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12431 || TREE_CODE (arg0) == MINUS_EXPR)
12432 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12435 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12436 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12438 tree val = TREE_OPERAND (arg0, 1);
12439 return omit_two_operands_loc (loc, type,
12440 fold_build2_loc (loc, code, type,
12442 build_int_cst (TREE_TYPE (val),
12444 TREE_OPERAND (arg0, 0), arg1);
12447 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12448 if (TREE_CODE (arg0) == MINUS_EXPR
12449 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12450 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12453 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12455 return omit_two_operands_loc (loc, type,
12457 ? boolean_true_node : boolean_false_node,
12458 TREE_OPERAND (arg0, 1), arg1);
12461 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12462 if (TREE_CODE (arg0) == ABS_EXPR
12463 && (integer_zerop (arg1) || real_zerop (arg1)))
12464 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12466 /* If this is an EQ or NE comparison with zero and ARG0 is
12467 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12468 two operations, but the latter can be done in one less insn
12469 on machines that have only two-operand insns or on which a
12470 constant cannot be the first operand. */
12471 if (TREE_CODE (arg0) == BIT_AND_EXPR
12472 && integer_zerop (arg1))
12474 tree arg00 = TREE_OPERAND (arg0, 0);
12475 tree arg01 = TREE_OPERAND (arg0, 1);
12476 if (TREE_CODE (arg00) == LSHIFT_EXPR
12477 && integer_onep (TREE_OPERAND (arg00, 0)))
12479 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12480 arg01, TREE_OPERAND (arg00, 1));
12481 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12482 build_int_cst (TREE_TYPE (arg0), 1));
12483 return fold_build2_loc (loc, code, type,
12484 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12487 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12488 && integer_onep (TREE_OPERAND (arg01, 0)))
12490 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12491 arg00, TREE_OPERAND (arg01, 1));
12492 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12493 build_int_cst (TREE_TYPE (arg0), 1));
12494 return fold_build2_loc (loc, code, type,
12495 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12500 /* If this is an NE or EQ comparison of zero against the result of a
12501 signed MOD operation whose second operand is a power of 2, make
12502 the MOD operation unsigned since it is simpler and equivalent. */
12503 if (integer_zerop (arg1)
12504 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12505 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12506 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12507 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12508 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12509 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12511 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12512 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12513 fold_convert_loc (loc, newtype,
12514 TREE_OPERAND (arg0, 0)),
12515 fold_convert_loc (loc, newtype,
12516 TREE_OPERAND (arg0, 1)));
12518 return fold_build2_loc (loc, code, type, newmod,
12519 fold_convert_loc (loc, newtype, arg1));
12522 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12523 C1 is a valid shift constant, and C2 is a power of two, i.e.
12525 if (TREE_CODE (arg0) == BIT_AND_EXPR
12526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12527 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12529 && integer_pow2p (TREE_OPERAND (arg0, 1))
12530 && integer_zerop (arg1))
12532 tree itype = TREE_TYPE (arg0);
12533 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12534 prec = TYPE_PRECISION (itype);
12536 /* Check for a valid shift count. */
12537 if (wi::ltu_p (arg001, prec))
12539 tree arg01 = TREE_OPERAND (arg0, 1);
12540 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12541 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12542 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12543 can be rewritten as (X & (C2 << C1)) != 0. */
12544 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12546 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12547 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12548 return fold_build2_loc (loc, code, type, tem,
12549 fold_convert_loc (loc, itype, arg1));
12551 /* Otherwise, for signed (arithmetic) shifts,
12552 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12553 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12554 else if (!TYPE_UNSIGNED (itype))
12555 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12556 arg000, build_int_cst (itype, 0));
12557 /* Otherwise, of unsigned (logical) shifts,
12558 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12559 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12561 return omit_one_operand_loc (loc, type,
12562 code == EQ_EXPR ? integer_one_node
12563 : integer_zero_node,
12568 /* If we have (A & C) == C where C is a power of 2, convert this into
12569 (A & C) != 0. Similarly for NE_EXPR. */
12570 if (TREE_CODE (arg0) == BIT_AND_EXPR
12571 && integer_pow2p (TREE_OPERAND (arg0, 1))
12572 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12573 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12574 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12575 integer_zero_node));
12577 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12578 bit, then fold the expression into A < 0 or A >= 0. */
12579 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12583 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12584 Similarly for NE_EXPR. */
12585 if (TREE_CODE (arg0) == BIT_AND_EXPR
12586 && TREE_CODE (arg1) == INTEGER_CST
12587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12589 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12590 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12591 TREE_OPERAND (arg0, 1));
12593 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12594 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12596 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12597 if (integer_nonzerop (dandnotc))
12598 return omit_one_operand_loc (loc, type, rslt, arg0);
12601 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12602 Similarly for NE_EXPR. */
12603 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12604 && TREE_CODE (arg1) == INTEGER_CST
12605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12607 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12609 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12610 TREE_OPERAND (arg0, 1),
12611 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12612 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12613 if (integer_nonzerop (candnotd))
12614 return omit_one_operand_loc (loc, type, rslt, arg0);
12617 /* If this is a comparison of a field, we may be able to simplify it. */
12618 if ((TREE_CODE (arg0) == COMPONENT_REF
12619 || TREE_CODE (arg0) == BIT_FIELD_REF)
12620 /* Handle the constant case even without -O
12621 to make sure the warnings are given. */
12622 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12624 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12629 /* Optimize comparisons of strlen vs zero to a compare of the
12630 first character of the string vs zero. To wit,
12631 strlen(ptr) == 0 => *ptr == 0
12632 strlen(ptr) != 0 => *ptr != 0
12633 Other cases should reduce to one of these two (or a constant)
12634 due to the return value of strlen being unsigned. */
12635 if (TREE_CODE (arg0) == CALL_EXPR
12636 && integer_zerop (arg1))
12638 tree fndecl = get_callee_fndecl (arg0);
12641 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12642 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12643 && call_expr_nargs (arg0) == 1
12644 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12646 tree iref = build_fold_indirect_ref_loc (loc,
12647 CALL_EXPR_ARG (arg0, 0));
12648 return fold_build2_loc (loc, code, type, iref,
12649 build_int_cst (TREE_TYPE (iref), 0));
12653 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12654 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12655 if (TREE_CODE (arg0) == RSHIFT_EXPR
12656 && integer_zerop (arg1)
12657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12659 tree arg00 = TREE_OPERAND (arg0, 0);
12660 tree arg01 = TREE_OPERAND (arg0, 1);
12661 tree itype = TREE_TYPE (arg00);
12662 if (wi::eq_p (arg01, element_precision (itype) - 1))
12664 if (TYPE_UNSIGNED (itype))
12666 itype = signed_type_for (itype);
12667 arg00 = fold_convert_loc (loc, itype, arg00);
12669 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12670 type, arg00, build_zero_cst (itype));
12674 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12675 if (integer_zerop (arg1)
12676 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12677 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12678 TREE_OPERAND (arg0, 1));
12680 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12681 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12682 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12683 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12684 build_zero_cst (TREE_TYPE (arg0)));
12685 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12686 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12688 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12690 build_zero_cst (TREE_TYPE (arg0)));
12692 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12693 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12694 && TREE_CODE (arg1) == INTEGER_CST
12695 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12696 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12697 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12698 TREE_OPERAND (arg0, 1), arg1));
12700 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12701 (X & C) == 0 when C is a single bit. */
12702 if (TREE_CODE (arg0) == BIT_AND_EXPR
12703 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12704 && integer_zerop (arg1)
12705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12707 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12708 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12709 TREE_OPERAND (arg0, 1));
12710 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12712 fold_convert_loc (loc, TREE_TYPE (arg0),
12716 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12717 constant C is a power of two, i.e. a single bit. */
12718 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12719 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12720 && integer_zerop (arg1)
12721 && integer_pow2p (TREE_OPERAND (arg0, 1))
12722 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12723 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12725 tree arg00 = TREE_OPERAND (arg0, 0);
12726 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12727 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12730 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12731 when is C is a power of two, i.e. a single bit. */
12732 if (TREE_CODE (arg0) == BIT_AND_EXPR
12733 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12734 && integer_zerop (arg1)
12735 && integer_pow2p (TREE_OPERAND (arg0, 1))
12736 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12737 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12739 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12740 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12741 arg000, TREE_OPERAND (arg0, 1));
12742 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12743 tem, build_int_cst (TREE_TYPE (tem), 0));
12746 if (integer_zerop (arg1)
12747 && tree_expr_nonzero_p (arg0))
12749 tree res = constant_boolean_node (code==NE_EXPR, type);
12750 return omit_one_operand_loc (loc, type, res, arg0);
12753 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12754 if (TREE_CODE (arg0) == NEGATE_EXPR
12755 && TREE_CODE (arg1) == NEGATE_EXPR)
12756 return fold_build2_loc (loc, code, type,
12757 TREE_OPERAND (arg0, 0),
12758 fold_convert_loc (loc, TREE_TYPE (arg0),
12759 TREE_OPERAND (arg1, 0)));
12761 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12762 if (TREE_CODE (arg0) == BIT_AND_EXPR
12763 && TREE_CODE (arg1) == BIT_AND_EXPR)
12765 tree arg00 = TREE_OPERAND (arg0, 0);
12766 tree arg01 = TREE_OPERAND (arg0, 1);
12767 tree arg10 = TREE_OPERAND (arg1, 0);
12768 tree arg11 = TREE_OPERAND (arg1, 1);
12769 tree itype = TREE_TYPE (arg0);
12771 if (operand_equal_p (arg01, arg11, 0))
12772 return fold_build2_loc (loc, code, type,
12773 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12774 fold_build2_loc (loc,
12775 BIT_XOR_EXPR, itype,
12778 build_zero_cst (itype));
12780 if (operand_equal_p (arg01, arg10, 0))
12781 return fold_build2_loc (loc, code, type,
12782 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12783 fold_build2_loc (loc,
12784 BIT_XOR_EXPR, itype,
12787 build_zero_cst (itype));
12789 if (operand_equal_p (arg00, arg11, 0))
12790 return fold_build2_loc (loc, code, type,
12791 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12792 fold_build2_loc (loc,
12793 BIT_XOR_EXPR, itype,
12796 build_zero_cst (itype));
12798 if (operand_equal_p (arg00, arg10, 0))
12799 return fold_build2_loc (loc, code, type,
12800 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12801 fold_build2_loc (loc,
12802 BIT_XOR_EXPR, itype,
12805 build_zero_cst (itype));
12808 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12809 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12811 tree arg00 = TREE_OPERAND (arg0, 0);
12812 tree arg01 = TREE_OPERAND (arg0, 1);
12813 tree arg10 = TREE_OPERAND (arg1, 0);
12814 tree arg11 = TREE_OPERAND (arg1, 1);
12815 tree itype = TREE_TYPE (arg0);
12817 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12818 operand_equal_p guarantees no side-effects so we don't need
12819 to use omit_one_operand on Z. */
12820 if (operand_equal_p (arg01, arg11, 0))
12821 return fold_build2_loc (loc, code, type, arg00,
12822 fold_convert_loc (loc, TREE_TYPE (arg00),
12824 if (operand_equal_p (arg01, arg10, 0))
12825 return fold_build2_loc (loc, code, type, arg00,
12826 fold_convert_loc (loc, TREE_TYPE (arg00),
12828 if (operand_equal_p (arg00, arg11, 0))
12829 return fold_build2_loc (loc, code, type, arg01,
12830 fold_convert_loc (loc, TREE_TYPE (arg01),
12832 if (operand_equal_p (arg00, arg10, 0))
12833 return fold_build2_loc (loc, code, type, arg01,
12834 fold_convert_loc (loc, TREE_TYPE (arg01),
12837 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12838 if (TREE_CODE (arg01) == INTEGER_CST
12839 && TREE_CODE (arg11) == INTEGER_CST)
12841 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12842 fold_convert_loc (loc, itype, arg11));
12843 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12844 return fold_build2_loc (loc, code, type, tem,
12845 fold_convert_loc (loc, itype, arg10));
12849 /* Attempt to simplify equality/inequality comparisons of complex
12850 values. Only lower the comparison if the result is known or
12851 can be simplified to a single scalar comparison. */
12852 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12853 || TREE_CODE (arg0) == COMPLEX_CST)
12854 && (TREE_CODE (arg1) == COMPLEX_EXPR
12855 || TREE_CODE (arg1) == COMPLEX_CST))
12857 tree real0, imag0, real1, imag1;
12860 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12862 real0 = TREE_OPERAND (arg0, 0);
12863 imag0 = TREE_OPERAND (arg0, 1);
12867 real0 = TREE_REALPART (arg0);
12868 imag0 = TREE_IMAGPART (arg0);
12871 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12873 real1 = TREE_OPERAND (arg1, 0);
12874 imag1 = TREE_OPERAND (arg1, 1);
12878 real1 = TREE_REALPART (arg1);
12879 imag1 = TREE_IMAGPART (arg1);
12882 rcond = fold_binary_loc (loc, code, type, real0, real1);
12883 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12885 if (integer_zerop (rcond))
12887 if (code == EQ_EXPR)
12888 return omit_two_operands_loc (loc, type, boolean_false_node,
12890 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12894 if (code == NE_EXPR)
12895 return omit_two_operands_loc (loc, type, boolean_true_node,
12897 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12901 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12902 if (icond && TREE_CODE (icond) == INTEGER_CST)
12904 if (integer_zerop (icond))
12906 if (code == EQ_EXPR)
12907 return omit_two_operands_loc (loc, type, boolean_false_node,
12909 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12913 if (code == NE_EXPR)
12914 return omit_two_operands_loc (loc, type, boolean_true_node,
12916 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12927 tem = fold_comparison (loc, code, type, op0, op1);
12928 if (tem != NULL_TREE)
12931 /* Transform comparisons of the form X +- C CMP X. */
12932 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12933 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12934 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12935 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12936 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12939 tree arg01 = TREE_OPERAND (arg0, 1);
12940 enum tree_code code0 = TREE_CODE (arg0);
12943 if (TREE_CODE (arg01) == REAL_CST)
12944 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12946 is_positive = tree_int_cst_sgn (arg01);
12948 /* (X - c) > X becomes false. */
12949 if (code == GT_EXPR
12950 && ((code0 == MINUS_EXPR && is_positive >= 0)
12951 || (code0 == PLUS_EXPR && is_positive <= 0)))
12953 if (TREE_CODE (arg01) == INTEGER_CST
12954 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12955 fold_overflow_warning (("assuming signed overflow does not "
12956 "occur when assuming that (X - c) > X "
12957 "is always false"),
12958 WARN_STRICT_OVERFLOW_ALL);
12959 return constant_boolean_node (0, type);
12962 /* Likewise (X + c) < X becomes false. */
12963 if (code == LT_EXPR
12964 && ((code0 == PLUS_EXPR && is_positive >= 0)
12965 || (code0 == MINUS_EXPR && is_positive <= 0)))
12967 if (TREE_CODE (arg01) == INTEGER_CST
12968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12969 fold_overflow_warning (("assuming signed overflow does not "
12970 "occur when assuming that "
12971 "(X + c) < X is always false"),
12972 WARN_STRICT_OVERFLOW_ALL);
12973 return constant_boolean_node (0, type);
12976 /* Convert (X - c) <= X to true. */
12977 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12979 && ((code0 == MINUS_EXPR && is_positive >= 0)
12980 || (code0 == PLUS_EXPR && is_positive <= 0)))
12982 if (TREE_CODE (arg01) == INTEGER_CST
12983 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12984 fold_overflow_warning (("assuming signed overflow does not "
12985 "occur when assuming that "
12986 "(X - c) <= X is always true"),
12987 WARN_STRICT_OVERFLOW_ALL);
12988 return constant_boolean_node (1, type);
12991 /* Convert (X + c) >= X to true. */
12992 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12994 && ((code0 == PLUS_EXPR && is_positive >= 0)
12995 || (code0 == MINUS_EXPR && is_positive <= 0)))
12997 if (TREE_CODE (arg01) == INTEGER_CST
12998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12999 fold_overflow_warning (("assuming signed overflow does not "
13000 "occur when assuming that "
13001 "(X + c) >= X is always true"),
13002 WARN_STRICT_OVERFLOW_ALL);
13003 return constant_boolean_node (1, type);
13006 if (TREE_CODE (arg01) == INTEGER_CST)
13008 /* Convert X + c > X and X - c < X to true for integers. */
13009 if (code == GT_EXPR
13010 && ((code0 == PLUS_EXPR && is_positive > 0)
13011 || (code0 == MINUS_EXPR && is_positive < 0)))
13013 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13014 fold_overflow_warning (("assuming signed overflow does "
13015 "not occur when assuming that "
13016 "(X + c) > X is always true"),
13017 WARN_STRICT_OVERFLOW_ALL);
13018 return constant_boolean_node (1, type);
13021 if (code == LT_EXPR
13022 && ((code0 == MINUS_EXPR && is_positive > 0)
13023 || (code0 == PLUS_EXPR && is_positive < 0)))
13025 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13026 fold_overflow_warning (("assuming signed overflow does "
13027 "not occur when assuming that "
13028 "(X - c) < X is always true"),
13029 WARN_STRICT_OVERFLOW_ALL);
13030 return constant_boolean_node (1, type);
13033 /* Convert X + c <= X and X - c >= X to false for integers. */
13034 if (code == LE_EXPR
13035 && ((code0 == PLUS_EXPR && is_positive > 0)
13036 || (code0 == MINUS_EXPR && is_positive < 0)))
13038 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13039 fold_overflow_warning (("assuming signed overflow does "
13040 "not occur when assuming that "
13041 "(X + c) <= X is always false"),
13042 WARN_STRICT_OVERFLOW_ALL);
13043 return constant_boolean_node (0, type);
13046 if (code == GE_EXPR
13047 && ((code0 == MINUS_EXPR && is_positive > 0)
13048 || (code0 == PLUS_EXPR && is_positive < 0)))
13050 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13051 fold_overflow_warning (("assuming signed overflow does "
13052 "not occur when assuming that "
13053 "(X - c) >= X is always false"),
13054 WARN_STRICT_OVERFLOW_ALL);
13055 return constant_boolean_node (0, type);
13060 /* Comparisons with the highest or lowest possible integer of
13061 the specified precision will have known values. */
13063 tree arg1_type = TREE_TYPE (arg1);
13064 unsigned int prec = TYPE_PRECISION (arg1_type);
13066 if (TREE_CODE (arg1) == INTEGER_CST
13067 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13069 wide_int max = wi::max_value (arg1_type);
13070 wide_int signed_max = wi::max_value (prec, SIGNED);
13071 wide_int min = wi::min_value (arg1_type);
13073 if (wi::eq_p (arg1, max))
13077 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13080 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13083 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13086 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13088 /* The GE_EXPR and LT_EXPR cases above are not normally
13089 reached because of previous transformations. */
13094 else if (wi::eq_p (arg1, max - 1))
13098 arg1 = const_binop (PLUS_EXPR, arg1,
13099 build_int_cst (TREE_TYPE (arg1), 1));
13100 return fold_build2_loc (loc, EQ_EXPR, type,
13101 fold_convert_loc (loc,
13102 TREE_TYPE (arg1), arg0),
13105 arg1 = const_binop (PLUS_EXPR, arg1,
13106 build_int_cst (TREE_TYPE (arg1), 1));
13107 return fold_build2_loc (loc, NE_EXPR, type,
13108 fold_convert_loc (loc, TREE_TYPE (arg1),
13114 else if (wi::eq_p (arg1, min))
13118 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13121 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13124 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13127 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13132 else if (wi::eq_p (arg1, min + 1))
13136 arg1 = const_binop (MINUS_EXPR, arg1,
13137 build_int_cst (TREE_TYPE (arg1), 1));
13138 return fold_build2_loc (loc, NE_EXPR, type,
13139 fold_convert_loc (loc,
13140 TREE_TYPE (arg1), arg0),
13143 arg1 = const_binop (MINUS_EXPR, arg1,
13144 build_int_cst (TREE_TYPE (arg1), 1));
13145 return fold_build2_loc (loc, EQ_EXPR, type,
13146 fold_convert_loc (loc, TREE_TYPE (arg1),
13153 else if (wi::eq_p (arg1, signed_max)
13154 && TYPE_UNSIGNED (arg1_type)
13155 /* We will flip the signedness of the comparison operator
13156 associated with the mode of arg1, so the sign bit is
13157 specified by this mode. Check that arg1 is the signed
13158 max associated with this sign bit. */
13159 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13160 /* signed_type does not work on pointer types. */
13161 && INTEGRAL_TYPE_P (arg1_type))
13163 /* The following case also applies to X < signed_max+1
13164 and X >= signed_max+1 because previous transformations. */
13165 if (code == LE_EXPR || code == GT_EXPR)
13167 tree st = signed_type_for (arg1_type);
13168 return fold_build2_loc (loc,
13169 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13170 type, fold_convert_loc (loc, st, arg0),
13171 build_int_cst (st, 0));
13177 /* If we are comparing an ABS_EXPR with a constant, we can
13178 convert all the cases into explicit comparisons, but they may
13179 well not be faster than doing the ABS and one comparison.
13180 But ABS (X) <= C is a range comparison, which becomes a subtraction
13181 and a comparison, and is probably faster. */
13182 if (code == LE_EXPR
13183 && TREE_CODE (arg1) == INTEGER_CST
13184 && TREE_CODE (arg0) == ABS_EXPR
13185 && ! TREE_SIDE_EFFECTS (arg0)
13186 && (0 != (tem = negate_expr (arg1)))
13187 && TREE_CODE (tem) == INTEGER_CST
13188 && !TREE_OVERFLOW (tem))
13189 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13190 build2 (GE_EXPR, type,
13191 TREE_OPERAND (arg0, 0), tem),
13192 build2 (LE_EXPR, type,
13193 TREE_OPERAND (arg0, 0), arg1));
13195 /* Convert ABS_EXPR<x> >= 0 to true. */
13196 strict_overflow_p = false;
13197 if (code == GE_EXPR
13198 && (integer_zerop (arg1)
13199 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13200 && real_zerop (arg1)))
13201 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13203 if (strict_overflow_p)
13204 fold_overflow_warning (("assuming signed overflow does not occur "
13205 "when simplifying comparison of "
13206 "absolute value and zero"),
13207 WARN_STRICT_OVERFLOW_CONDITIONAL);
13208 return omit_one_operand_loc (loc, type,
13209 constant_boolean_node (true, type),
13213 /* Convert ABS_EXPR<x> < 0 to false. */
13214 strict_overflow_p = false;
13215 if (code == LT_EXPR
13216 && (integer_zerop (arg1) || real_zerop (arg1))
13217 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13219 if (strict_overflow_p)
13220 fold_overflow_warning (("assuming signed overflow does not occur "
13221 "when simplifying comparison of "
13222 "absolute value and zero"),
13223 WARN_STRICT_OVERFLOW_CONDITIONAL);
13224 return omit_one_operand_loc (loc, type,
13225 constant_boolean_node (false, type),
13229 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13230 and similarly for >= into !=. */
13231 if ((code == LT_EXPR || code == GE_EXPR)
13232 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13233 && TREE_CODE (arg1) == LSHIFT_EXPR
13234 && integer_onep (TREE_OPERAND (arg1, 0)))
13235 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13236 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13237 TREE_OPERAND (arg1, 1)),
13238 build_zero_cst (TREE_TYPE (arg0)));
13240 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13241 otherwise Y might be >= # of bits in X's type and thus e.g.
13242 (unsigned char) (1 << Y) for Y 15 might be 0.
13243 If the cast is widening, then 1 << Y should have unsigned type,
13244 otherwise if Y is number of bits in the signed shift type minus 1,
13245 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13246 31 might be 0xffffffff80000000. */
13247 if ((code == LT_EXPR || code == GE_EXPR)
13248 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13249 && CONVERT_EXPR_P (arg1)
13250 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13251 && (TYPE_PRECISION (TREE_TYPE (arg1))
13252 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13253 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13254 || (TYPE_PRECISION (TREE_TYPE (arg1))
13255 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13256 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13258 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13259 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13260 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13261 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13262 build_zero_cst (TREE_TYPE (arg0)));
13267 case UNORDERED_EXPR:
13275 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13277 t1 = fold_relational_const (code, type, arg0, arg1);
13278 if (t1 != NULL_TREE)
13282 /* If the first operand is NaN, the result is constant. */
13283 if (TREE_CODE (arg0) == REAL_CST
13284 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13285 && (code != LTGT_EXPR || ! flag_trapping_math))
13287 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13288 ? integer_zero_node
13289 : integer_one_node;
13290 return omit_one_operand_loc (loc, type, t1, arg1);
13293 /* If the second operand is NaN, the result is constant. */
13294 if (TREE_CODE (arg1) == REAL_CST
13295 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13296 && (code != LTGT_EXPR || ! flag_trapping_math))
13298 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13299 ? integer_zero_node
13300 : integer_one_node;
13301 return omit_one_operand_loc (loc, type, t1, arg0);
13304 /* Simplify unordered comparison of something with itself. */
13305 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13306 && operand_equal_p (arg0, arg1, 0))
13307 return constant_boolean_node (1, type);
13309 if (code == LTGT_EXPR
13310 && !flag_trapping_math
13311 && operand_equal_p (arg0, arg1, 0))
13312 return constant_boolean_node (0, type);
13314 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13316 tree targ0 = strip_float_extensions (arg0);
13317 tree targ1 = strip_float_extensions (arg1);
13318 tree newtype = TREE_TYPE (targ0);
13320 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13321 newtype = TREE_TYPE (targ1);
13323 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13324 return fold_build2_loc (loc, code, type,
13325 fold_convert_loc (loc, newtype, targ0),
13326 fold_convert_loc (loc, newtype, targ1));
13331 case COMPOUND_EXPR:
13332 /* When pedantic, a compound expression can be neither an lvalue
13333 nor an integer constant expression. */
13334 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13336 /* Don't let (0, 0) be null pointer constant. */
13337 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13338 : fold_convert_loc (loc, type, arg1);
13339 return pedantic_non_lvalue_loc (loc, tem);
13342 if ((TREE_CODE (arg0) == REAL_CST
13343 && TREE_CODE (arg1) == REAL_CST)
13344 || (TREE_CODE (arg0) == INTEGER_CST
13345 && TREE_CODE (arg1) == INTEGER_CST))
13346 return build_complex (type, arg0, arg1);
13350 /* An ASSERT_EXPR should never be passed to fold_binary. */
13351 gcc_unreachable ();
13353 case VEC_PACK_TRUNC_EXPR:
13354 case VEC_PACK_FIX_TRUNC_EXPR:
13356 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13359 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13360 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13361 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13364 elts = XALLOCAVEC (tree, nelts);
13365 if (!vec_cst_ctor_to_array (arg0, elts)
13366 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13369 for (i = 0; i < nelts; i++)
13371 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13372 ? NOP_EXPR : FIX_TRUNC_EXPR,
13373 TREE_TYPE (type), elts[i]);
13374 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13378 return build_vector (type, elts);
13381 case VEC_WIDEN_MULT_LO_EXPR:
13382 case VEC_WIDEN_MULT_HI_EXPR:
13383 case VEC_WIDEN_MULT_EVEN_EXPR:
13384 case VEC_WIDEN_MULT_ODD_EXPR:
13386 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13387 unsigned int out, ofs, scale;
13390 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13391 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13392 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13395 elts = XALLOCAVEC (tree, nelts * 4);
13396 if (!vec_cst_ctor_to_array (arg0, elts)
13397 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13400 if (code == VEC_WIDEN_MULT_LO_EXPR)
13401 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13402 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13403 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13404 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13405 scale = 1, ofs = 0;
13406 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13407 scale = 1, ofs = 1;
13409 for (out = 0; out < nelts; out++)
13411 unsigned int in1 = (out << scale) + ofs;
13412 unsigned int in2 = in1 + nelts * 2;
13415 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13416 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13418 if (t1 == NULL_TREE || t2 == NULL_TREE)
13420 elts[out] = const_binop (MULT_EXPR, t1, t2);
13421 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13425 return build_vector (type, elts);
13430 } /* switch (code) */
13433 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13434 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13438 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13440 switch (TREE_CODE (*tp))
13446 *walk_subtrees = 0;
13448 /* ... fall through ... */
13455 /* Return whether the sub-tree ST contains a label which is accessible from
13456 outside the sub-tree. */
13459 contains_label_p (tree st)
13462 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13465 /* Fold a ternary expression of code CODE and type TYPE with operands
13466 OP0, OP1, and OP2. Return the folded expression if folding is
13467 successful. Otherwise, return NULL_TREE. */
13470 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13471 tree op0, tree op1, tree op2)
13474 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13475 enum tree_code_class kind = TREE_CODE_CLASS (code);
13477 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13478 && TREE_CODE_LENGTH (code) == 3);
13480 /* If this is a commutative operation, and OP0 is a constant, move it
13481 to OP1 to reduce the number of tests below. */
13482 if (commutative_ternary_tree_code (code)
13483 && tree_swap_operands_p (op0, op1, true))
13484 return fold_build3_loc (loc, code, type, op1, op0, op2);
13486 tem = generic_simplify (loc, code, type, op0, op1, op2);
13490 /* Strip any conversions that don't change the mode. This is safe
13491 for every expression, except for a comparison expression because
13492 its signedness is derived from its operands. So, in the latter
13493 case, only strip conversions that don't change the signedness.
13495 Note that this is done as an internal manipulation within the
13496 constant folder, in order to find the simplest representation of
13497 the arguments so that their form can be studied. In any cases,
13498 the appropriate type conversions should be put back in the tree
13499 that will get out of the constant folder. */
13520 case COMPONENT_REF:
13521 if (TREE_CODE (arg0) == CONSTRUCTOR
13522 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13524 unsigned HOST_WIDE_INT idx;
13526 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13533 case VEC_COND_EXPR:
13534 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13535 so all simple results must be passed through pedantic_non_lvalue. */
13536 if (TREE_CODE (arg0) == INTEGER_CST)
13538 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13539 tem = integer_zerop (arg0) ? op2 : op1;
13540 /* Only optimize constant conditions when the selected branch
13541 has the same type as the COND_EXPR. This avoids optimizing
13542 away "c ? x : throw", where the throw has a void type.
13543 Avoid throwing away that operand which contains label. */
13544 if ((!TREE_SIDE_EFFECTS (unused_op)
13545 || !contains_label_p (unused_op))
13546 && (! VOID_TYPE_P (TREE_TYPE (tem))
13547 || VOID_TYPE_P (type)))
13548 return pedantic_non_lvalue_loc (loc, tem);
13551 else if (TREE_CODE (arg0) == VECTOR_CST)
13553 if (integer_all_onesp (arg0))
13554 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13555 if (integer_zerop (arg0))
13556 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13558 if ((TREE_CODE (arg1) == VECTOR_CST
13559 || TREE_CODE (arg1) == CONSTRUCTOR)
13560 && (TREE_CODE (arg2) == VECTOR_CST
13561 || TREE_CODE (arg2) == CONSTRUCTOR))
13563 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13564 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13565 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13566 for (i = 0; i < nelts; i++)
13568 tree val = VECTOR_CST_ELT (arg0, i);
13569 if (integer_all_onesp (val))
13571 else if (integer_zerop (val))
13572 sel[i] = nelts + i;
13573 else /* Currently unreachable. */
13576 tree t = fold_vec_perm (type, arg1, arg2, sel);
13577 if (t != NULL_TREE)
13582 if (operand_equal_p (arg1, op2, 0))
13583 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13585 /* If we have A op B ? A : C, we may be able to convert this to a
13586 simpler expression, depending on the operation and the values
13587 of B and C. Signed zeros prevent all of these transformations,
13588 for reasons given above each one.
13590 Also try swapping the arguments and inverting the conditional. */
13591 if (COMPARISON_CLASS_P (arg0)
13592 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13593 arg1, TREE_OPERAND (arg0, 1))
13594 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13596 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13601 if (COMPARISON_CLASS_P (arg0)
13602 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13604 TREE_OPERAND (arg0, 1))
13605 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13607 location_t loc0 = expr_location_or (arg0, loc);
13608 tem = fold_invert_truthvalue (loc0, arg0);
13609 if (tem && COMPARISON_CLASS_P (tem))
13611 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13617 /* If the second operand is simpler than the third, swap them
13618 since that produces better jump optimization results. */
13619 if (truth_value_p (TREE_CODE (arg0))
13620 && tree_swap_operands_p (op1, op2, false))
13622 location_t loc0 = expr_location_or (arg0, loc);
13623 /* See if this can be inverted. If it can't, possibly because
13624 it was a floating-point inequality comparison, don't do
13626 tem = fold_invert_truthvalue (loc0, arg0);
13628 return fold_build3_loc (loc, code, type, tem, op2, op1);
13631 /* Convert A ? 1 : 0 to simply A. */
13632 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13633 : (integer_onep (op1)
13634 && !VECTOR_TYPE_P (type)))
13635 && integer_zerop (op2)
13636 /* If we try to convert OP0 to our type, the
13637 call to fold will try to move the conversion inside
13638 a COND, which will recurse. In that case, the COND_EXPR
13639 is probably the best choice, so leave it alone. */
13640 && type == TREE_TYPE (arg0))
13641 return pedantic_non_lvalue_loc (loc, arg0);
13643 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13644 over COND_EXPR in cases such as floating point comparisons. */
13645 if (integer_zerop (op1)
13646 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13647 : (integer_onep (op2)
13648 && !VECTOR_TYPE_P (type)))
13649 && truth_value_p (TREE_CODE (arg0)))
13650 return pedantic_non_lvalue_loc (loc,
13651 fold_convert_loc (loc, type,
13652 invert_truthvalue_loc (loc,
13655 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13656 if (TREE_CODE (arg0) == LT_EXPR
13657 && integer_zerop (TREE_OPERAND (arg0, 1))
13658 && integer_zerop (op2)
13659 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13661 /* sign_bit_p looks through both zero and sign extensions,
13662 but for this optimization only sign extensions are
13664 tree tem2 = TREE_OPERAND (arg0, 0);
13665 while (tem != tem2)
13667 if (TREE_CODE (tem2) != NOP_EXPR
13668 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13673 tem2 = TREE_OPERAND (tem2, 0);
13675 /* sign_bit_p only checks ARG1 bits within A's precision.
13676 If <sign bit of A> has wider type than A, bits outside
13677 of A's precision in <sign bit of A> need to be checked.
13678 If they are all 0, this optimization needs to be done
13679 in unsigned A's type, if they are all 1 in signed A's type,
13680 otherwise this can't be done. */
13682 && TYPE_PRECISION (TREE_TYPE (tem))
13683 < TYPE_PRECISION (TREE_TYPE (arg1))
13684 && TYPE_PRECISION (TREE_TYPE (tem))
13685 < TYPE_PRECISION (type))
13687 int inner_width, outer_width;
13690 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13691 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13692 if (outer_width > TYPE_PRECISION (type))
13693 outer_width = TYPE_PRECISION (type);
13695 wide_int mask = wi::shifted_mask
13696 (inner_width, outer_width - inner_width, false,
13697 TYPE_PRECISION (TREE_TYPE (arg1)));
13699 wide_int common = mask & arg1;
13700 if (common == mask)
13702 tem_type = signed_type_for (TREE_TYPE (tem));
13703 tem = fold_convert_loc (loc, tem_type, tem);
13705 else if (common == 0)
13707 tem_type = unsigned_type_for (TREE_TYPE (tem));
13708 tem = fold_convert_loc (loc, tem_type, tem);
13716 fold_convert_loc (loc, type,
13717 fold_build2_loc (loc, BIT_AND_EXPR,
13718 TREE_TYPE (tem), tem,
13719 fold_convert_loc (loc,
13724 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13725 already handled above. */
13726 if (TREE_CODE (arg0) == BIT_AND_EXPR
13727 && integer_onep (TREE_OPERAND (arg0, 1))
13728 && integer_zerop (op2)
13729 && integer_pow2p (arg1))
13731 tree tem = TREE_OPERAND (arg0, 0);
13733 if (TREE_CODE (tem) == RSHIFT_EXPR
13734 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13735 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13736 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13737 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13738 TREE_OPERAND (tem, 0), arg1);
13741 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13742 is probably obsolete because the first operand should be a
13743 truth value (that's why we have the two cases above), but let's
13744 leave it in until we can confirm this for all front-ends. */
13745 if (integer_zerop (op2)
13746 && TREE_CODE (arg0) == NE_EXPR
13747 && integer_zerop (TREE_OPERAND (arg0, 1))
13748 && integer_pow2p (arg1)
13749 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13750 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13751 arg1, OEP_ONLY_CONST))
13752 return pedantic_non_lvalue_loc (loc,
13753 fold_convert_loc (loc, type,
13754 TREE_OPERAND (arg0, 0)));
13756 /* Disable the transformations below for vectors, since
13757 fold_binary_op_with_conditional_arg may undo them immediately,
13758 yielding an infinite loop. */
13759 if (code == VEC_COND_EXPR)
13762 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13763 if (integer_zerop (op2)
13764 && truth_value_p (TREE_CODE (arg0))
13765 && truth_value_p (TREE_CODE (arg1))
13766 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13767 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13768 : TRUTH_ANDIF_EXPR,
13769 type, fold_convert_loc (loc, type, arg0), arg1);
13771 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13772 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13773 && truth_value_p (TREE_CODE (arg0))
13774 && truth_value_p (TREE_CODE (arg1))
13775 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13777 location_t loc0 = expr_location_or (arg0, loc);
13778 /* Only perform transformation if ARG0 is easily inverted. */
13779 tem = fold_invert_truthvalue (loc0, arg0);
13781 return fold_build2_loc (loc, code == VEC_COND_EXPR
13784 type, fold_convert_loc (loc, type, tem),
13788 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13789 if (integer_zerop (arg1)
13790 && truth_value_p (TREE_CODE (arg0))
13791 && truth_value_p (TREE_CODE (op2))
13792 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13794 location_t loc0 = expr_location_or (arg0, loc);
13795 /* Only perform transformation if ARG0 is easily inverted. */
13796 tem = fold_invert_truthvalue (loc0, arg0);
13798 return fold_build2_loc (loc, code == VEC_COND_EXPR
13799 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13800 type, fold_convert_loc (loc, type, tem),
13804 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13805 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13806 && truth_value_p (TREE_CODE (arg0))
13807 && truth_value_p (TREE_CODE (op2))
13808 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13809 return fold_build2_loc (loc, code == VEC_COND_EXPR
13810 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13811 type, fold_convert_loc (loc, type, arg0), op2);
13816 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13817 of fold_ternary on them. */
13818 gcc_unreachable ();
13820 case BIT_FIELD_REF:
13821 if ((TREE_CODE (arg0) == VECTOR_CST
13822 || (TREE_CODE (arg0) == CONSTRUCTOR
13823 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13824 && (type == TREE_TYPE (TREE_TYPE (arg0))
13825 || (TREE_CODE (type) == VECTOR_TYPE
13826 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13828 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13829 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13830 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13831 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13834 && (idx % width) == 0
13835 && (n % width) == 0
13836 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13841 if (TREE_CODE (arg0) == VECTOR_CST)
13844 return VECTOR_CST_ELT (arg0, idx);
13846 tree *vals = XALLOCAVEC (tree, n);
13847 for (unsigned i = 0; i < n; ++i)
13848 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13849 return build_vector (type, vals);
13852 /* Constructor elements can be subvectors. */
13853 unsigned HOST_WIDE_INT k = 1;
13854 if (CONSTRUCTOR_NELTS (arg0) != 0)
13856 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13857 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13858 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13861 /* We keep an exact subset of the constructor elements. */
13862 if ((idx % k) == 0 && (n % k) == 0)
13864 if (CONSTRUCTOR_NELTS (arg0) == 0)
13865 return build_constructor (type, NULL);
13870 if (idx < CONSTRUCTOR_NELTS (arg0))
13871 return CONSTRUCTOR_ELT (arg0, idx)->value;
13872 return build_zero_cst (type);
13875 vec<constructor_elt, va_gc> *vals;
13876 vec_alloc (vals, n);
13877 for (unsigned i = 0;
13878 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13880 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13882 (arg0, idx + i)->value);
13883 return build_constructor (type, vals);
13885 /* The bitfield references a single constructor element. */
13886 else if (idx + n <= (idx / k + 1) * k)
13888 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13889 return build_zero_cst (type);
13891 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13893 return fold_build3_loc (loc, code, type,
13894 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13895 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13900 /* A bit-field-ref that referenced the full argument can be stripped. */
13901 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13902 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13903 && integer_zerop (op2))
13904 return fold_convert_loc (loc, type, arg0);
13906 /* On constants we can use native encode/interpret to constant
13907 fold (nearly) all BIT_FIELD_REFs. */
13908 if (CONSTANT_CLASS_P (arg0)
13909 && can_native_interpret_type_p (type)
13910 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13911 /* This limitation should not be necessary, we just need to
13912 round this up to mode size. */
13913 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13914 /* Need bit-shifting of the buffer to relax the following. */
13915 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13917 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13918 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13919 unsigned HOST_WIDE_INT clen;
13920 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13921 /* ??? We cannot tell native_encode_expr to start at
13922 some random byte only. So limit us to a reasonable amount
13926 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13927 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13929 && len * BITS_PER_UNIT >= bitpos + bitsize)
13931 tree v = native_interpret_expr (type,
13932 b + bitpos / BITS_PER_UNIT,
13933 bitsize / BITS_PER_UNIT);
13943 /* For integers we can decompose the FMA if possible. */
13944 if (TREE_CODE (arg0) == INTEGER_CST
13945 && TREE_CODE (arg1) == INTEGER_CST)
13946 return fold_build2_loc (loc, PLUS_EXPR, type,
13947 const_binop (MULT_EXPR, arg0, arg1), arg2);
13948 if (integer_zerop (arg2))
13949 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13951 return fold_fma (loc, type, arg0, arg1, arg2);
13953 case VEC_PERM_EXPR:
13954 if (TREE_CODE (arg2) == VECTOR_CST)
13956 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13957 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13958 unsigned char *sel2 = sel + nelts;
13959 bool need_mask_canon = false;
13960 bool need_mask_canon2 = false;
13961 bool all_in_vec0 = true;
13962 bool all_in_vec1 = true;
13963 bool maybe_identity = true;
13964 bool single_arg = (op0 == op1);
13965 bool changed = false;
13967 mask2 = 2 * nelts - 1;
13968 mask = single_arg ? (nelts - 1) : mask2;
13969 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13970 for (i = 0; i < nelts; i++)
13972 tree val = VECTOR_CST_ELT (arg2, i);
13973 if (TREE_CODE (val) != INTEGER_CST)
13976 /* Make sure that the perm value is in an acceptable
13979 need_mask_canon |= wi::gtu_p (t, mask);
13980 need_mask_canon2 |= wi::gtu_p (t, mask2);
13981 sel[i] = t.to_uhwi () & mask;
13982 sel2[i] = t.to_uhwi () & mask2;
13984 if (sel[i] < nelts)
13985 all_in_vec1 = false;
13987 all_in_vec0 = false;
13989 if ((sel[i] & (nelts-1)) != i)
13990 maybe_identity = false;
13993 if (maybe_identity)
14003 else if (all_in_vec1)
14006 for (i = 0; i < nelts; i++)
14008 need_mask_canon = true;
14011 if ((TREE_CODE (op0) == VECTOR_CST
14012 || TREE_CODE (op0) == CONSTRUCTOR)
14013 && (TREE_CODE (op1) == VECTOR_CST
14014 || TREE_CODE (op1) == CONSTRUCTOR))
14016 tree t = fold_vec_perm (type, op0, op1, sel);
14017 if (t != NULL_TREE)
14021 if (op0 == op1 && !single_arg)
14024 /* Some targets are deficient and fail to expand a single
14025 argument permutation while still allowing an equivalent
14026 2-argument version. */
14027 if (need_mask_canon && arg2 == op2
14028 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14029 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14031 need_mask_canon = need_mask_canon2;
14035 if (need_mask_canon && arg2 == op2)
14037 tree *tsel = XALLOCAVEC (tree, nelts);
14038 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14039 for (i = 0; i < nelts; i++)
14040 tsel[i] = build_int_cst (eltype, sel[i]);
14041 op2 = build_vector (TREE_TYPE (arg2), tsel);
14046 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14052 } /* switch (code) */
14055 /* Perform constant folding and related simplification of EXPR.
14056 The related simplifications include x*1 => x, x*0 => 0, etc.,
14057 and application of the associative law.
14058 NOP_EXPR conversions may be removed freely (as long as we
14059 are careful not to change the type of the overall expression).
14060 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14061 but we can constant-fold them if they have constant operands. */
14063 #ifdef ENABLE_FOLD_CHECKING
14064 # define fold(x) fold_1 (x)
14065 static tree fold_1 (tree);
14071 const tree t = expr;
14072 enum tree_code code = TREE_CODE (t);
14073 enum tree_code_class kind = TREE_CODE_CLASS (code);
14075 location_t loc = EXPR_LOCATION (expr);
14077 /* Return right away if a constant. */
14078 if (kind == tcc_constant)
14081 /* CALL_EXPR-like objects with variable numbers of operands are
14082 treated specially. */
14083 if (kind == tcc_vl_exp)
14085 if (code == CALL_EXPR)
14087 tem = fold_call_expr (loc, expr, false);
14088 return tem ? tem : expr;
14093 if (IS_EXPR_CODE_CLASS (kind))
14095 tree type = TREE_TYPE (t);
14096 tree op0, op1, op2;
14098 switch (TREE_CODE_LENGTH (code))
14101 op0 = TREE_OPERAND (t, 0);
14102 tem = fold_unary_loc (loc, code, type, op0);
14103 return tem ? tem : expr;
14105 op0 = TREE_OPERAND (t, 0);
14106 op1 = TREE_OPERAND (t, 1);
14107 tem = fold_binary_loc (loc, code, type, op0, op1);
14108 return tem ? tem : expr;
14110 op0 = TREE_OPERAND (t, 0);
14111 op1 = TREE_OPERAND (t, 1);
14112 op2 = TREE_OPERAND (t, 2);
14113 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14114 return tem ? tem : expr;
14124 tree op0 = TREE_OPERAND (t, 0);
14125 tree op1 = TREE_OPERAND (t, 1);
14127 if (TREE_CODE (op1) == INTEGER_CST
14128 && TREE_CODE (op0) == CONSTRUCTOR
14129 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14131 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14132 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14133 unsigned HOST_WIDE_INT begin = 0;
14135 /* Find a matching index by means of a binary search. */
14136 while (begin != end)
14138 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14139 tree index = (*elts)[middle].index;
14141 if (TREE_CODE (index) == INTEGER_CST
14142 && tree_int_cst_lt (index, op1))
14143 begin = middle + 1;
14144 else if (TREE_CODE (index) == INTEGER_CST
14145 && tree_int_cst_lt (op1, index))
14147 else if (TREE_CODE (index) == RANGE_EXPR
14148 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14149 begin = middle + 1;
14150 else if (TREE_CODE (index) == RANGE_EXPR
14151 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14154 return (*elts)[middle].value;
14161 /* Return a VECTOR_CST if possible. */
14164 tree type = TREE_TYPE (t);
14165 if (TREE_CODE (type) != VECTOR_TYPE)
14168 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14169 unsigned HOST_WIDE_INT idx, pos = 0;
14172 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14174 if (!CONSTANT_CLASS_P (value))
14176 if (TREE_CODE (value) == VECTOR_CST)
14178 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14179 vec[pos++] = VECTOR_CST_ELT (value, i);
14182 vec[pos++] = value;
14184 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14185 vec[pos] = build_zero_cst (TREE_TYPE (type));
14187 return build_vector (type, vec);
14191 return fold (DECL_INITIAL (t));
14195 } /* switch (code) */
14198 #ifdef ENABLE_FOLD_CHECKING
14201 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14202 hash_table<pointer_hash<const tree_node> > *);
14203 static void fold_check_failed (const_tree, const_tree);
14204 void print_fold_checksum (const_tree);
14206 /* When --enable-checking=fold, compute a digest of expr before
14207 and after actual fold call to see if fold did not accidentally
14208 change original expr. */
14214 struct md5_ctx ctx;
14215 unsigned char checksum_before[16], checksum_after[16];
14216 hash_table<pointer_hash<const tree_node> > ht (32);
14218 md5_init_ctx (&ctx);
14219 fold_checksum_tree (expr, &ctx, &ht);
14220 md5_finish_ctx (&ctx, checksum_before);
14223 ret = fold_1 (expr);
14225 md5_init_ctx (&ctx);
14226 fold_checksum_tree (expr, &ctx, &ht);
14227 md5_finish_ctx (&ctx, checksum_after);
14229 if (memcmp (checksum_before, checksum_after, 16))
14230 fold_check_failed (expr, ret);
14236 print_fold_checksum (const_tree expr)
14238 struct md5_ctx ctx;
14239 unsigned char checksum[16], cnt;
14240 hash_table<pointer_hash<const tree_node> > ht (32);
14242 md5_init_ctx (&ctx);
14243 fold_checksum_tree (expr, &ctx, &ht);
14244 md5_finish_ctx (&ctx, checksum);
14245 for (cnt = 0; cnt < 16; ++cnt)
14246 fprintf (stderr, "%02x", checksum[cnt]);
14247 putc ('\n', stderr);
14251 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14253 internal_error ("fold check: original tree changed by fold");
14257 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14258 hash_table<pointer_hash <const tree_node> > *ht)
14260 const tree_node **slot;
14261 enum tree_code code;
14262 union tree_node buf;
14268 slot = ht->find_slot (expr, INSERT);
14272 code = TREE_CODE (expr);
14273 if (TREE_CODE_CLASS (code) == tcc_declaration
14274 && DECL_ASSEMBLER_NAME_SET_P (expr))
14276 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14277 memcpy ((char *) &buf, expr, tree_size (expr));
14278 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14279 expr = (tree) &buf;
14281 else if (TREE_CODE_CLASS (code) == tcc_type
14282 && (TYPE_POINTER_TO (expr)
14283 || TYPE_REFERENCE_TO (expr)
14284 || TYPE_CACHED_VALUES_P (expr)
14285 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14286 || TYPE_NEXT_VARIANT (expr)))
14288 /* Allow these fields to be modified. */
14290 memcpy ((char *) &buf, expr, tree_size (expr));
14291 expr = tmp = (tree) &buf;
14292 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14293 TYPE_POINTER_TO (tmp) = NULL;
14294 TYPE_REFERENCE_TO (tmp) = NULL;
14295 TYPE_NEXT_VARIANT (tmp) = NULL;
14296 if (TYPE_CACHED_VALUES_P (tmp))
14298 TYPE_CACHED_VALUES_P (tmp) = 0;
14299 TYPE_CACHED_VALUES (tmp) = NULL;
14302 md5_process_bytes (expr, tree_size (expr), ctx);
14303 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14304 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14305 if (TREE_CODE_CLASS (code) != tcc_type
14306 && TREE_CODE_CLASS (code) != tcc_declaration
14307 && code != TREE_LIST
14308 && code != SSA_NAME
14309 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14310 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14311 switch (TREE_CODE_CLASS (code))
14317 md5_process_bytes (TREE_STRING_POINTER (expr),
14318 TREE_STRING_LENGTH (expr), ctx);
14321 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14322 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14325 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14326 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14332 case tcc_exceptional:
14336 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14337 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14338 expr = TREE_CHAIN (expr);
14339 goto recursive_label;
14342 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14343 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14349 case tcc_expression:
14350 case tcc_reference:
14351 case tcc_comparison:
14354 case tcc_statement:
14356 len = TREE_OPERAND_LENGTH (expr);
14357 for (i = 0; i < len; ++i)
14358 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14360 case tcc_declaration:
14361 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14362 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14363 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14365 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14366 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14367 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14368 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14369 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14372 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14374 if (TREE_CODE (expr) == FUNCTION_DECL)
14376 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14377 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14379 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14383 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14384 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14385 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14386 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14387 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14388 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14389 if (INTEGRAL_TYPE_P (expr)
14390 || SCALAR_FLOAT_TYPE_P (expr))
14392 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14393 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14395 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14396 if (TREE_CODE (expr) == RECORD_TYPE
14397 || TREE_CODE (expr) == UNION_TYPE
14398 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14399 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14400 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14407 /* Helper function for outputting the checksum of a tree T. When
14408 debugging with gdb, you can "define mynext" to be "next" followed
14409 by "call debug_fold_checksum (op0)", then just trace down till the
14412 DEBUG_FUNCTION void
14413 debug_fold_checksum (const_tree t)
14416 unsigned char checksum[16];
14417 struct md5_ctx ctx;
14418 hash_table<pointer_hash<const tree_node> > ht (32);
14420 md5_init_ctx (&ctx);
14421 fold_checksum_tree (t, &ctx, &ht);
14422 md5_finish_ctx (&ctx, checksum);
14425 for (i = 0; i < 16; i++)
14426 fprintf (stderr, "%d ", checksum[i]);
14428 fprintf (stderr, "\n");
14433 /* Fold a unary tree expression with code CODE of type TYPE with an
14434 operand OP0. LOC is the location of the resulting expression.
14435 Return a folded expression if successful. Otherwise, return a tree
14436 expression with code CODE of type TYPE with an operand OP0. */
14439 fold_build1_stat_loc (location_t loc,
14440 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14443 #ifdef ENABLE_FOLD_CHECKING
14444 unsigned char checksum_before[16], checksum_after[16];
14445 struct md5_ctx ctx;
14446 hash_table<pointer_hash<const tree_node> > ht (32);
14448 md5_init_ctx (&ctx);
14449 fold_checksum_tree (op0, &ctx, &ht);
14450 md5_finish_ctx (&ctx, checksum_before);
14454 tem = fold_unary_loc (loc, code, type, op0);
14456 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14458 #ifdef ENABLE_FOLD_CHECKING
14459 md5_init_ctx (&ctx);
14460 fold_checksum_tree (op0, &ctx, &ht);
14461 md5_finish_ctx (&ctx, checksum_after);
14463 if (memcmp (checksum_before, checksum_after, 16))
14464 fold_check_failed (op0, tem);
14469 /* Fold a binary tree expression with code CODE of type TYPE with
14470 operands OP0 and OP1. LOC is the location of the resulting
14471 expression. Return a folded expression if successful. Otherwise,
14472 return a tree expression with code CODE of type TYPE with operands
14476 fold_build2_stat_loc (location_t loc,
14477 enum tree_code code, tree type, tree op0, tree op1
14481 #ifdef ENABLE_FOLD_CHECKING
14482 unsigned char checksum_before_op0[16],
14483 checksum_before_op1[16],
14484 checksum_after_op0[16],
14485 checksum_after_op1[16];
14486 struct md5_ctx ctx;
14487 hash_table<pointer_hash<const tree_node> > ht (32);
14489 md5_init_ctx (&ctx);
14490 fold_checksum_tree (op0, &ctx, &ht);
14491 md5_finish_ctx (&ctx, checksum_before_op0);
14494 md5_init_ctx (&ctx);
14495 fold_checksum_tree (op1, &ctx, &ht);
14496 md5_finish_ctx (&ctx, checksum_before_op1);
14500 tem = fold_binary_loc (loc, code, type, op0, op1);
14502 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14504 #ifdef ENABLE_FOLD_CHECKING
14505 md5_init_ctx (&ctx);
14506 fold_checksum_tree (op0, &ctx, &ht);
14507 md5_finish_ctx (&ctx, checksum_after_op0);
14510 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14511 fold_check_failed (op0, tem);
14513 md5_init_ctx (&ctx);
14514 fold_checksum_tree (op1, &ctx, &ht);
14515 md5_finish_ctx (&ctx, checksum_after_op1);
14517 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14518 fold_check_failed (op1, tem);
14523 /* Fold a ternary tree expression with code CODE of type TYPE with
14524 operands OP0, OP1, and OP2. Return a folded expression if
14525 successful. Otherwise, return a tree expression with code CODE of
14526 type TYPE with operands OP0, OP1, and OP2. */
14529 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14530 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14533 #ifdef ENABLE_FOLD_CHECKING
14534 unsigned char checksum_before_op0[16],
14535 checksum_before_op1[16],
14536 checksum_before_op2[16],
14537 checksum_after_op0[16],
14538 checksum_after_op1[16],
14539 checksum_after_op2[16];
14540 struct md5_ctx ctx;
14541 hash_table<pointer_hash<const tree_node> > ht (32);
14543 md5_init_ctx (&ctx);
14544 fold_checksum_tree (op0, &ctx, &ht);
14545 md5_finish_ctx (&ctx, checksum_before_op0);
14548 md5_init_ctx (&ctx);
14549 fold_checksum_tree (op1, &ctx, &ht);
14550 md5_finish_ctx (&ctx, checksum_before_op1);
14553 md5_init_ctx (&ctx);
14554 fold_checksum_tree (op2, &ctx, &ht);
14555 md5_finish_ctx (&ctx, checksum_before_op2);
14559 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14560 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14562 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14564 #ifdef ENABLE_FOLD_CHECKING
14565 md5_init_ctx (&ctx);
14566 fold_checksum_tree (op0, &ctx, &ht);
14567 md5_finish_ctx (&ctx, checksum_after_op0);
14570 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14571 fold_check_failed (op0, tem);
14573 md5_init_ctx (&ctx);
14574 fold_checksum_tree (op1, &ctx, &ht);
14575 md5_finish_ctx (&ctx, checksum_after_op1);
14578 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14579 fold_check_failed (op1, tem);
14581 md5_init_ctx (&ctx);
14582 fold_checksum_tree (op2, &ctx, &ht);
14583 md5_finish_ctx (&ctx, checksum_after_op2);
14585 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14586 fold_check_failed (op2, tem);
14591 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14592 arguments in ARGARRAY, and a null static chain.
14593 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14594 of type TYPE from the given operands as constructed by build_call_array. */
14597 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14598 int nargs, tree *argarray)
14601 #ifdef ENABLE_FOLD_CHECKING
14602 unsigned char checksum_before_fn[16],
14603 checksum_before_arglist[16],
14604 checksum_after_fn[16],
14605 checksum_after_arglist[16];
14606 struct md5_ctx ctx;
14607 hash_table<pointer_hash<const tree_node> > ht (32);
14610 md5_init_ctx (&ctx);
14611 fold_checksum_tree (fn, &ctx, &ht);
14612 md5_finish_ctx (&ctx, checksum_before_fn);
14615 md5_init_ctx (&ctx);
14616 for (i = 0; i < nargs; i++)
14617 fold_checksum_tree (argarray[i], &ctx, &ht);
14618 md5_finish_ctx (&ctx, checksum_before_arglist);
14622 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14624 #ifdef ENABLE_FOLD_CHECKING
14625 md5_init_ctx (&ctx);
14626 fold_checksum_tree (fn, &ctx, &ht);
14627 md5_finish_ctx (&ctx, checksum_after_fn);
14630 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14631 fold_check_failed (fn, tem);
14633 md5_init_ctx (&ctx);
14634 for (i = 0; i < nargs; i++)
14635 fold_checksum_tree (argarray[i], &ctx, &ht);
14636 md5_finish_ctx (&ctx, checksum_after_arglist);
14638 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14639 fold_check_failed (NULL_TREE, tem);
14644 /* Perform constant folding and related simplification of initializer
14645 expression EXPR. These behave identically to "fold_buildN" but ignore
14646 potential run-time traps and exceptions that fold must preserve. */
14648 #define START_FOLD_INIT \
14649 int saved_signaling_nans = flag_signaling_nans;\
14650 int saved_trapping_math = flag_trapping_math;\
14651 int saved_rounding_math = flag_rounding_math;\
14652 int saved_trapv = flag_trapv;\
14653 int saved_folding_initializer = folding_initializer;\
14654 flag_signaling_nans = 0;\
14655 flag_trapping_math = 0;\
14656 flag_rounding_math = 0;\
14658 folding_initializer = 1;
14660 #define END_FOLD_INIT \
14661 flag_signaling_nans = saved_signaling_nans;\
14662 flag_trapping_math = saved_trapping_math;\
14663 flag_rounding_math = saved_rounding_math;\
14664 flag_trapv = saved_trapv;\
14665 folding_initializer = saved_folding_initializer;
14668 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14669 tree type, tree op)
14674 result = fold_build1_loc (loc, code, type, op);
14681 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14682 tree type, tree op0, tree op1)
14687 result = fold_build2_loc (loc, code, type, op0, op1);
14694 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14695 int nargs, tree *argarray)
14700 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14706 #undef START_FOLD_INIT
14707 #undef END_FOLD_INIT
14709 /* Determine if first argument is a multiple of second argument. Return 0 if
14710 it is not, or we cannot easily determined it to be.
14712 An example of the sort of thing we care about (at this point; this routine
14713 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14714 fold cases do now) is discovering that
14716 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14722 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14724 This code also handles discovering that
14726 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14728 is a multiple of 8 so we don't have to worry about dealing with a
14729 possible remainder.
14731 Note that we *look* inside a SAVE_EXPR only to determine how it was
14732 calculated; it is not safe for fold to do much of anything else with the
14733 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14734 at run time. For example, the latter example above *cannot* be implemented
14735 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14736 evaluation time of the original SAVE_EXPR is not necessarily the same at
14737 the time the new expression is evaluated. The only optimization of this
14738 sort that would be valid is changing
14740 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14744 SAVE_EXPR (I) * SAVE_EXPR (J)
14746 (where the same SAVE_EXPR (J) is used in the original and the
14747 transformed version). */
14750 multiple_of_p (tree type, const_tree top, const_tree bottom)
14752 if (operand_equal_p (top, bottom, 0))
14755 if (TREE_CODE (type) != INTEGER_TYPE)
14758 switch (TREE_CODE (top))
14761 /* Bitwise and provides a power of two multiple. If the mask is
14762 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14763 if (!integer_pow2p (bottom))
14768 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14769 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14773 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14774 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14777 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14781 op1 = TREE_OPERAND (top, 1);
14782 /* const_binop may not detect overflow correctly,
14783 so check for it explicitly here. */
14784 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14785 && 0 != (t1 = fold_convert (type,
14786 const_binop (LSHIFT_EXPR,
14789 && !TREE_OVERFLOW (t1))
14790 return multiple_of_p (type, t1, bottom);
14795 /* Can't handle conversions from non-integral or wider integral type. */
14796 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14797 || (TYPE_PRECISION (type)
14798 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14801 /* .. fall through ... */
14804 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14807 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14808 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14811 if (TREE_CODE (bottom) != INTEGER_CST
14812 || integer_zerop (bottom)
14813 || (TYPE_UNSIGNED (type)
14814 && (tree_int_cst_sgn (top) < 0
14815 || tree_int_cst_sgn (bottom) < 0)))
14817 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14825 /* Return true if CODE or TYPE is known to be non-negative. */
14828 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14830 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14831 && truth_value_p (code))
14832 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14833 have a signed:1 type (where the value is -1 and 0). */
14838 /* Return true if (CODE OP0) is known to be non-negative. If the return
14839 value is based on the assumption that signed overflow is undefined,
14840 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14841 *STRICT_OVERFLOW_P. */
14844 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14845 bool *strict_overflow_p)
14847 if (TYPE_UNSIGNED (type))
14853 /* We can't return 1 if flag_wrapv is set because
14854 ABS_EXPR<INT_MIN> = INT_MIN. */
14855 if (!INTEGRAL_TYPE_P (type))
14857 if (TYPE_OVERFLOW_UNDEFINED (type))
14859 *strict_overflow_p = true;
14864 case NON_LVALUE_EXPR:
14866 case FIX_TRUNC_EXPR:
14867 return tree_expr_nonnegative_warnv_p (op0,
14868 strict_overflow_p);
14872 tree inner_type = TREE_TYPE (op0);
14873 tree outer_type = type;
14875 if (TREE_CODE (outer_type) == REAL_TYPE)
14877 if (TREE_CODE (inner_type) == REAL_TYPE)
14878 return tree_expr_nonnegative_warnv_p (op0,
14879 strict_overflow_p);
14880 if (INTEGRAL_TYPE_P (inner_type))
14882 if (TYPE_UNSIGNED (inner_type))
14884 return tree_expr_nonnegative_warnv_p (op0,
14885 strict_overflow_p);
14888 else if (INTEGRAL_TYPE_P (outer_type))
14890 if (TREE_CODE (inner_type) == REAL_TYPE)
14891 return tree_expr_nonnegative_warnv_p (op0,
14892 strict_overflow_p);
14893 if (INTEGRAL_TYPE_P (inner_type))
14894 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14895 && TYPE_UNSIGNED (inner_type);
14901 return tree_simple_nonnegative_warnv_p (code, type);
14904 /* We don't know sign of `t', so be conservative and return false. */
14908 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14909 value is based on the assumption that signed overflow is undefined,
14910 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14911 *STRICT_OVERFLOW_P. */
14914 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14915 tree op1, bool *strict_overflow_p)
14917 if (TYPE_UNSIGNED (type))
14922 case POINTER_PLUS_EXPR:
14924 if (FLOAT_TYPE_P (type))
14925 return (tree_expr_nonnegative_warnv_p (op0,
14927 && tree_expr_nonnegative_warnv_p (op1,
14928 strict_overflow_p));
14930 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14931 both unsigned and at least 2 bits shorter than the result. */
14932 if (TREE_CODE (type) == INTEGER_TYPE
14933 && TREE_CODE (op0) == NOP_EXPR
14934 && TREE_CODE (op1) == NOP_EXPR)
14936 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14937 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14938 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14939 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14941 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14942 TYPE_PRECISION (inner2)) + 1;
14943 return prec < TYPE_PRECISION (type);
14949 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14951 /* x * x is always non-negative for floating point x
14952 or without overflow. */
14953 if (operand_equal_p (op0, op1, 0)
14954 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14955 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14957 if (TYPE_OVERFLOW_UNDEFINED (type))
14958 *strict_overflow_p = true;
14963 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14964 both unsigned and their total bits is shorter than the result. */
14965 if (TREE_CODE (type) == INTEGER_TYPE
14966 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14967 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14969 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14970 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14972 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14973 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14976 bool unsigned0 = TYPE_UNSIGNED (inner0);
14977 bool unsigned1 = TYPE_UNSIGNED (inner1);
14979 if (TREE_CODE (op0) == INTEGER_CST)
14980 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14982 if (TREE_CODE (op1) == INTEGER_CST)
14983 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14985 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14986 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14988 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14989 ? tree_int_cst_min_precision (op0, UNSIGNED)
14990 : TYPE_PRECISION (inner0);
14992 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14993 ? tree_int_cst_min_precision (op1, UNSIGNED)
14994 : TYPE_PRECISION (inner1);
14996 return precision0 + precision1 < TYPE_PRECISION (type);
15003 return (tree_expr_nonnegative_warnv_p (op0,
15005 || tree_expr_nonnegative_warnv_p (op1,
15006 strict_overflow_p));
15012 case TRUNC_DIV_EXPR:
15013 case CEIL_DIV_EXPR:
15014 case FLOOR_DIV_EXPR:
15015 case ROUND_DIV_EXPR:
15016 return (tree_expr_nonnegative_warnv_p (op0,
15018 && tree_expr_nonnegative_warnv_p (op1,
15019 strict_overflow_p));
15021 case TRUNC_MOD_EXPR:
15022 case CEIL_MOD_EXPR:
15023 case FLOOR_MOD_EXPR:
15024 case ROUND_MOD_EXPR:
15025 return tree_expr_nonnegative_warnv_p (op0,
15026 strict_overflow_p);
15028 return tree_simple_nonnegative_warnv_p (code, type);
15031 /* We don't know sign of `t', so be conservative and return false. */
15035 /* Return true if T is known to be non-negative. If the return
15036 value is based on the assumption that signed overflow is undefined,
15037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15038 *STRICT_OVERFLOW_P. */
15041 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15043 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15046 switch (TREE_CODE (t))
15049 return tree_int_cst_sgn (t) >= 0;
15052 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15055 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15058 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15060 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15061 strict_overflow_p));
15063 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15066 /* We don't know sign of `t', so be conservative and return false. */
15070 /* Return true if T is known to be non-negative. If the return
15071 value is based on the assumption that signed overflow is undefined,
15072 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15073 *STRICT_OVERFLOW_P. */
15076 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15077 tree arg0, tree arg1, bool *strict_overflow_p)
15079 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15080 switch (DECL_FUNCTION_CODE (fndecl))
15082 CASE_FLT_FN (BUILT_IN_ACOS):
15083 CASE_FLT_FN (BUILT_IN_ACOSH):
15084 CASE_FLT_FN (BUILT_IN_CABS):
15085 CASE_FLT_FN (BUILT_IN_COSH):
15086 CASE_FLT_FN (BUILT_IN_ERFC):
15087 CASE_FLT_FN (BUILT_IN_EXP):
15088 CASE_FLT_FN (BUILT_IN_EXP10):
15089 CASE_FLT_FN (BUILT_IN_EXP2):
15090 CASE_FLT_FN (BUILT_IN_FABS):
15091 CASE_FLT_FN (BUILT_IN_FDIM):
15092 CASE_FLT_FN (BUILT_IN_HYPOT):
15093 CASE_FLT_FN (BUILT_IN_POW10):
15094 CASE_INT_FN (BUILT_IN_FFS):
15095 CASE_INT_FN (BUILT_IN_PARITY):
15096 CASE_INT_FN (BUILT_IN_POPCOUNT):
15097 CASE_INT_FN (BUILT_IN_CLZ):
15098 CASE_INT_FN (BUILT_IN_CLRSB):
15099 case BUILT_IN_BSWAP32:
15100 case BUILT_IN_BSWAP64:
15104 CASE_FLT_FN (BUILT_IN_SQRT):
15105 /* sqrt(-0.0) is -0.0. */
15106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15108 return tree_expr_nonnegative_warnv_p (arg0,
15109 strict_overflow_p);
15111 CASE_FLT_FN (BUILT_IN_ASINH):
15112 CASE_FLT_FN (BUILT_IN_ATAN):
15113 CASE_FLT_FN (BUILT_IN_ATANH):
15114 CASE_FLT_FN (BUILT_IN_CBRT):
15115 CASE_FLT_FN (BUILT_IN_CEIL):
15116 CASE_FLT_FN (BUILT_IN_ERF):
15117 CASE_FLT_FN (BUILT_IN_EXPM1):
15118 CASE_FLT_FN (BUILT_IN_FLOOR):
15119 CASE_FLT_FN (BUILT_IN_FMOD):
15120 CASE_FLT_FN (BUILT_IN_FREXP):
15121 CASE_FLT_FN (BUILT_IN_ICEIL):
15122 CASE_FLT_FN (BUILT_IN_IFLOOR):
15123 CASE_FLT_FN (BUILT_IN_IRINT):
15124 CASE_FLT_FN (BUILT_IN_IROUND):
15125 CASE_FLT_FN (BUILT_IN_LCEIL):
15126 CASE_FLT_FN (BUILT_IN_LDEXP):
15127 CASE_FLT_FN (BUILT_IN_LFLOOR):
15128 CASE_FLT_FN (BUILT_IN_LLCEIL):
15129 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15130 CASE_FLT_FN (BUILT_IN_LLRINT):
15131 CASE_FLT_FN (BUILT_IN_LLROUND):
15132 CASE_FLT_FN (BUILT_IN_LRINT):
15133 CASE_FLT_FN (BUILT_IN_LROUND):
15134 CASE_FLT_FN (BUILT_IN_MODF):
15135 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15136 CASE_FLT_FN (BUILT_IN_RINT):
15137 CASE_FLT_FN (BUILT_IN_ROUND):
15138 CASE_FLT_FN (BUILT_IN_SCALB):
15139 CASE_FLT_FN (BUILT_IN_SCALBLN):
15140 CASE_FLT_FN (BUILT_IN_SCALBN):
15141 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15142 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15143 CASE_FLT_FN (BUILT_IN_SINH):
15144 CASE_FLT_FN (BUILT_IN_TANH):
15145 CASE_FLT_FN (BUILT_IN_TRUNC):
15146 /* True if the 1st argument is nonnegative. */
15147 return tree_expr_nonnegative_warnv_p (arg0,
15148 strict_overflow_p);
15150 CASE_FLT_FN (BUILT_IN_FMAX):
15151 /* True if the 1st OR 2nd arguments are nonnegative. */
15152 return (tree_expr_nonnegative_warnv_p (arg0,
15154 || (tree_expr_nonnegative_warnv_p (arg1,
15155 strict_overflow_p)));
15157 CASE_FLT_FN (BUILT_IN_FMIN):
15158 /* True if the 1st AND 2nd arguments are nonnegative. */
15159 return (tree_expr_nonnegative_warnv_p (arg0,
15161 && (tree_expr_nonnegative_warnv_p (arg1,
15162 strict_overflow_p)));
15164 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15165 /* True if the 2nd argument is nonnegative. */
15166 return tree_expr_nonnegative_warnv_p (arg1,
15167 strict_overflow_p);
15169 CASE_FLT_FN (BUILT_IN_POWI):
15170 /* True if the 1st argument is nonnegative or the second
15171 argument is an even integer. */
15172 if (TREE_CODE (arg1) == INTEGER_CST
15173 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15175 return tree_expr_nonnegative_warnv_p (arg0,
15176 strict_overflow_p);
15178 CASE_FLT_FN (BUILT_IN_POW):
15179 /* True if the 1st argument is nonnegative or the second
15180 argument is an even integer valued real. */
15181 if (TREE_CODE (arg1) == REAL_CST)
15186 c = TREE_REAL_CST (arg1);
15187 n = real_to_integer (&c);
15190 REAL_VALUE_TYPE cint;
15191 real_from_integer (&cint, VOIDmode, n, SIGNED);
15192 if (real_identical (&c, &cint))
15196 return tree_expr_nonnegative_warnv_p (arg0,
15197 strict_overflow_p);
15202 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15206 /* Return true if T is known to be non-negative. If the return
15207 value is based on the assumption that signed overflow is undefined,
15208 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15209 *STRICT_OVERFLOW_P. */
15212 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15214 enum tree_code code = TREE_CODE (t);
15215 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15222 tree temp = TARGET_EXPR_SLOT (t);
15223 t = TARGET_EXPR_INITIAL (t);
15225 /* If the initializer is non-void, then it's a normal expression
15226 that will be assigned to the slot. */
15227 if (!VOID_TYPE_P (t))
15228 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15230 /* Otherwise, the initializer sets the slot in some way. One common
15231 way is an assignment statement at the end of the initializer. */
15234 if (TREE_CODE (t) == BIND_EXPR)
15235 t = expr_last (BIND_EXPR_BODY (t));
15236 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15237 || TREE_CODE (t) == TRY_CATCH_EXPR)
15238 t = expr_last (TREE_OPERAND (t, 0));
15239 else if (TREE_CODE (t) == STATEMENT_LIST)
15244 if (TREE_CODE (t) == MODIFY_EXPR
15245 && TREE_OPERAND (t, 0) == temp)
15246 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15247 strict_overflow_p);
15254 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15255 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15257 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15258 get_callee_fndecl (t),
15261 strict_overflow_p);
15263 case COMPOUND_EXPR:
15265 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15266 strict_overflow_p);
15268 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15269 strict_overflow_p);
15271 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15272 strict_overflow_p);
15275 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15279 /* We don't know sign of `t', so be conservative and return false. */
15283 /* Return true if T is known to be non-negative. If the return
15284 value is based on the assumption that signed overflow is undefined,
15285 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15286 *STRICT_OVERFLOW_P. */
15289 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15291 enum tree_code code;
15292 if (t == error_mark_node)
15295 code = TREE_CODE (t);
15296 switch (TREE_CODE_CLASS (code))
15299 case tcc_comparison:
15300 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15302 TREE_OPERAND (t, 0),
15303 TREE_OPERAND (t, 1),
15304 strict_overflow_p);
15307 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15309 TREE_OPERAND (t, 0),
15310 strict_overflow_p);
15313 case tcc_declaration:
15314 case tcc_reference:
15315 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15323 case TRUTH_AND_EXPR:
15324 case TRUTH_OR_EXPR:
15325 case TRUTH_XOR_EXPR:
15326 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15328 TREE_OPERAND (t, 0),
15329 TREE_OPERAND (t, 1),
15330 strict_overflow_p);
15331 case TRUTH_NOT_EXPR:
15332 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15334 TREE_OPERAND (t, 0),
15335 strict_overflow_p);
15342 case WITH_SIZE_EXPR:
15344 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15347 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15351 /* Return true if `t' is known to be non-negative. Handle warnings
15352 about undefined signed overflow. */
15355 tree_expr_nonnegative_p (tree t)
15357 bool ret, strict_overflow_p;
15359 strict_overflow_p = false;
15360 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15361 if (strict_overflow_p)
15362 fold_overflow_warning (("assuming signed overflow does not occur when "
15363 "determining that expression is always "
15365 WARN_STRICT_OVERFLOW_MISC);
15370 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15371 For floating point we further ensure that T is not denormal.
15372 Similar logic is present in nonzero_address in rtlanal.h.
15374 If the return value is based on the assumption that signed overflow
15375 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15376 change *STRICT_OVERFLOW_P. */
15379 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15380 bool *strict_overflow_p)
15385 return tree_expr_nonzero_warnv_p (op0,
15386 strict_overflow_p);
15390 tree inner_type = TREE_TYPE (op0);
15391 tree outer_type = type;
15393 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15394 && tree_expr_nonzero_warnv_p (op0,
15395 strict_overflow_p));
15399 case NON_LVALUE_EXPR:
15400 return tree_expr_nonzero_warnv_p (op0,
15401 strict_overflow_p);
15410 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15411 For floating point we further ensure that T is not denormal.
15412 Similar logic is present in nonzero_address in rtlanal.h.
15414 If the return value is based on the assumption that signed overflow
15415 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15416 change *STRICT_OVERFLOW_P. */
15419 tree_binary_nonzero_warnv_p (enum tree_code code,
15422 tree op1, bool *strict_overflow_p)
15424 bool sub_strict_overflow_p;
15427 case POINTER_PLUS_EXPR:
15429 if (TYPE_OVERFLOW_UNDEFINED (type))
15431 /* With the presence of negative values it is hard
15432 to say something. */
15433 sub_strict_overflow_p = false;
15434 if (!tree_expr_nonnegative_warnv_p (op0,
15435 &sub_strict_overflow_p)
15436 || !tree_expr_nonnegative_warnv_p (op1,
15437 &sub_strict_overflow_p))
15439 /* One of operands must be positive and the other non-negative. */
15440 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15441 overflows, on a twos-complement machine the sum of two
15442 nonnegative numbers can never be zero. */
15443 return (tree_expr_nonzero_warnv_p (op0,
15445 || tree_expr_nonzero_warnv_p (op1,
15446 strict_overflow_p));
15451 if (TYPE_OVERFLOW_UNDEFINED (type))
15453 if (tree_expr_nonzero_warnv_p (op0,
15455 && tree_expr_nonzero_warnv_p (op1,
15456 strict_overflow_p))
15458 *strict_overflow_p = true;
15465 sub_strict_overflow_p = false;
15466 if (tree_expr_nonzero_warnv_p (op0,
15467 &sub_strict_overflow_p)
15468 && tree_expr_nonzero_warnv_p (op1,
15469 &sub_strict_overflow_p))
15471 if (sub_strict_overflow_p)
15472 *strict_overflow_p = true;
15477 sub_strict_overflow_p = false;
15478 if (tree_expr_nonzero_warnv_p (op0,
15479 &sub_strict_overflow_p))
15481 if (sub_strict_overflow_p)
15482 *strict_overflow_p = true;
15484 /* When both operands are nonzero, then MAX must be too. */
15485 if (tree_expr_nonzero_warnv_p (op1,
15486 strict_overflow_p))
15489 /* MAX where operand 0 is positive is positive. */
15490 return tree_expr_nonnegative_warnv_p (op0,
15491 strict_overflow_p);
15493 /* MAX where operand 1 is positive is positive. */
15494 else if (tree_expr_nonzero_warnv_p (op1,
15495 &sub_strict_overflow_p)
15496 && tree_expr_nonnegative_warnv_p (op1,
15497 &sub_strict_overflow_p))
15499 if (sub_strict_overflow_p)
15500 *strict_overflow_p = true;
15506 return (tree_expr_nonzero_warnv_p (op1,
15508 || tree_expr_nonzero_warnv_p (op0,
15509 strict_overflow_p));
15518 /* Return true when T is an address and is known to be nonzero.
15519 For floating point we further ensure that T is not denormal.
15520 Similar logic is present in nonzero_address in rtlanal.h.
15522 If the return value is based on the assumption that signed overflow
15523 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15524 change *STRICT_OVERFLOW_P. */
15527 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15529 bool sub_strict_overflow_p;
15530 switch (TREE_CODE (t))
15533 return !integer_zerop (t);
15537 tree base = TREE_OPERAND (t, 0);
15539 if (!DECL_P (base))
15540 base = get_base_address (base);
15545 /* For objects in symbol table check if we know they are non-zero.
15546 Don't do anything for variables and functions before symtab is built;
15547 it is quite possible that they will be declared weak later. */
15548 if (DECL_P (base) && decl_in_symtab_p (base))
15550 struct symtab_node *symbol;
15552 symbol = symtab_node::get_create (base);
15554 return symbol->nonzero_address ();
15559 /* Function local objects are never NULL. */
15561 && (DECL_CONTEXT (base)
15562 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15563 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15566 /* Constants are never weak. */
15567 if (CONSTANT_CLASS_P (base))
15574 sub_strict_overflow_p = false;
15575 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15576 &sub_strict_overflow_p)
15577 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15578 &sub_strict_overflow_p))
15580 if (sub_strict_overflow_p)
15581 *strict_overflow_p = true;
15592 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15593 attempt to fold the expression to a constant without modifying TYPE,
15596 If the expression could be simplified to a constant, then return
15597 the constant. If the expression would not be simplified to a
15598 constant, then return NULL_TREE. */
15601 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15603 tree tem = fold_binary (code, type, op0, op1);
15604 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15607 /* Given the components of a unary expression CODE, TYPE and OP0,
15608 attempt to fold the expression to a constant without modifying
15611 If the expression could be simplified to a constant, then return
15612 the constant. If the expression would not be simplified to a
15613 constant, then return NULL_TREE. */
15616 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15618 tree tem = fold_unary (code, type, op0);
15619 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15622 /* If EXP represents referencing an element in a constant string
15623 (either via pointer arithmetic or array indexing), return the
15624 tree representing the value accessed, otherwise return NULL. */
15627 fold_read_from_constant_string (tree exp)
15629 if ((TREE_CODE (exp) == INDIRECT_REF
15630 || TREE_CODE (exp) == ARRAY_REF)
15631 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15633 tree exp1 = TREE_OPERAND (exp, 0);
15636 location_t loc = EXPR_LOCATION (exp);
15638 if (TREE_CODE (exp) == INDIRECT_REF)
15639 string = string_constant (exp1, &index);
15642 tree low_bound = array_ref_low_bound (exp);
15643 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15645 /* Optimize the special-case of a zero lower bound.
15647 We convert the low_bound to sizetype to avoid some problems
15648 with constant folding. (E.g. suppose the lower bound is 1,
15649 and its mode is QI. Without the conversion,l (ARRAY
15650 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15651 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15652 if (! integer_zerop (low_bound))
15653 index = size_diffop_loc (loc, index,
15654 fold_convert_loc (loc, sizetype, low_bound));
15660 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15661 && TREE_CODE (string) == STRING_CST
15662 && TREE_CODE (index) == INTEGER_CST
15663 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15664 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15666 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15667 return build_int_cst_type (TREE_TYPE (exp),
15668 (TREE_STRING_POINTER (string)
15669 [TREE_INT_CST_LOW (index)]));
15674 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15675 an integer constant, real, or fixed-point constant.
15677 TYPE is the type of the result. */
15680 fold_negate_const (tree arg0, tree type)
15682 tree t = NULL_TREE;
15684 switch (TREE_CODE (arg0))
15689 wide_int val = wi::neg (arg0, &overflow);
15690 t = force_fit_type (type, val, 1,
15691 (overflow | TREE_OVERFLOW (arg0))
15692 && !TYPE_UNSIGNED (type));
15697 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15702 FIXED_VALUE_TYPE f;
15703 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15704 &(TREE_FIXED_CST (arg0)), NULL,
15705 TYPE_SATURATING (type));
15706 t = build_fixed (type, f);
15707 /* Propagate overflow flags. */
15708 if (overflow_p | TREE_OVERFLOW (arg0))
15709 TREE_OVERFLOW (t) = 1;
15714 gcc_unreachable ();
15720 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15721 an integer constant or real constant.
15723 TYPE is the type of the result. */
15726 fold_abs_const (tree arg0, tree type)
15728 tree t = NULL_TREE;
15730 switch (TREE_CODE (arg0))
15734 /* If the value is unsigned or non-negative, then the absolute value
15735 is the same as the ordinary value. */
15736 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15739 /* If the value is negative, then the absolute value is
15744 wide_int val = wi::neg (arg0, &overflow);
15745 t = force_fit_type (type, val, -1,
15746 overflow | TREE_OVERFLOW (arg0));
15752 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15753 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15759 gcc_unreachable ();
15765 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15766 constant. TYPE is the type of the result. */
15769 fold_not_const (const_tree arg0, tree type)
15771 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15773 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15776 /* Given CODE, a relational operator, the target type, TYPE and two
15777 constant operands OP0 and OP1, return the result of the
15778 relational operation. If the result is not a compile time
15779 constant, then return NULL_TREE. */
15782 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15784 int result, invert;
15786 /* From here on, the only cases we handle are when the result is
15787 known to be a constant. */
15789 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15791 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15792 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15794 /* Handle the cases where either operand is a NaN. */
15795 if (real_isnan (c0) || real_isnan (c1))
15805 case UNORDERED_EXPR:
15819 if (flag_trapping_math)
15825 gcc_unreachable ();
15828 return constant_boolean_node (result, type);
15831 return constant_boolean_node (real_compare (code, c0, c1), type);
15834 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15836 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15837 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15838 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15841 /* Handle equality/inequality of complex constants. */
15842 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15844 tree rcond = fold_relational_const (code, type,
15845 TREE_REALPART (op0),
15846 TREE_REALPART (op1));
15847 tree icond = fold_relational_const (code, type,
15848 TREE_IMAGPART (op0),
15849 TREE_IMAGPART (op1));
15850 if (code == EQ_EXPR)
15851 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15852 else if (code == NE_EXPR)
15853 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15858 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15860 unsigned count = VECTOR_CST_NELTS (op0);
15861 tree *elts = XALLOCAVEC (tree, count);
15862 gcc_assert (VECTOR_CST_NELTS (op1) == count
15863 && TYPE_VECTOR_SUBPARTS (type) == count);
15865 for (unsigned i = 0; i < count; i++)
15867 tree elem_type = TREE_TYPE (type);
15868 tree elem0 = VECTOR_CST_ELT (op0, i);
15869 tree elem1 = VECTOR_CST_ELT (op1, i);
15871 tree tem = fold_relational_const (code, elem_type,
15874 if (tem == NULL_TREE)
15877 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15880 return build_vector (type, elts);
15883 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15885 To compute GT, swap the arguments and do LT.
15886 To compute GE, do LT and invert the result.
15887 To compute LE, swap the arguments, do LT and invert the result.
15888 To compute NE, do EQ and invert the result.
15890 Therefore, the code below must handle only EQ and LT. */
15892 if (code == LE_EXPR || code == GT_EXPR)
15897 code = swap_tree_comparison (code);
15900 /* Note that it is safe to invert for real values here because we
15901 have already handled the one case that it matters. */
15904 if (code == NE_EXPR || code == GE_EXPR)
15907 code = invert_tree_comparison (code, false);
15910 /* Compute a result for LT or EQ if args permit;
15911 Otherwise return T. */
15912 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15914 if (code == EQ_EXPR)
15915 result = tree_int_cst_equal (op0, op1);
15917 result = tree_int_cst_lt (op0, op1);
15924 return constant_boolean_node (result, type);
15927 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15928 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15932 fold_build_cleanup_point_expr (tree type, tree expr)
15934 /* If the expression does not have side effects then we don't have to wrap
15935 it with a cleanup point expression. */
15936 if (!TREE_SIDE_EFFECTS (expr))
15939 /* If the expression is a return, check to see if the expression inside the
15940 return has no side effects or the right hand side of the modify expression
15941 inside the return. If either don't have side effects set we don't need to
15942 wrap the expression in a cleanup point expression. Note we don't check the
15943 left hand side of the modify because it should always be a return decl. */
15944 if (TREE_CODE (expr) == RETURN_EXPR)
15946 tree op = TREE_OPERAND (expr, 0);
15947 if (!op || !TREE_SIDE_EFFECTS (op))
15949 op = TREE_OPERAND (op, 1);
15950 if (!TREE_SIDE_EFFECTS (op))
15954 return build1 (CLEANUP_POINT_EXPR, type, expr);
15957 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15958 of an indirection through OP0, or NULL_TREE if no simplification is
15962 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15968 subtype = TREE_TYPE (sub);
15969 if (!POINTER_TYPE_P (subtype))
15972 if (TREE_CODE (sub) == ADDR_EXPR)
15974 tree op = TREE_OPERAND (sub, 0);
15975 tree optype = TREE_TYPE (op);
15976 /* *&CONST_DECL -> to the value of the const decl. */
15977 if (TREE_CODE (op) == CONST_DECL)
15978 return DECL_INITIAL (op);
15979 /* *&p => p; make sure to handle *&"str"[cst] here. */
15980 if (type == optype)
15982 tree fop = fold_read_from_constant_string (op);
15988 /* *(foo *)&fooarray => fooarray[0] */
15989 else if (TREE_CODE (optype) == ARRAY_TYPE
15990 && type == TREE_TYPE (optype)
15991 && (!in_gimple_form
15992 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15994 tree type_domain = TYPE_DOMAIN (optype);
15995 tree min_val = size_zero_node;
15996 if (type_domain && TYPE_MIN_VALUE (type_domain))
15997 min_val = TYPE_MIN_VALUE (type_domain);
15999 && TREE_CODE (min_val) != INTEGER_CST)
16001 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16002 NULL_TREE, NULL_TREE);
16004 /* *(foo *)&complexfoo => __real__ complexfoo */
16005 else if (TREE_CODE (optype) == COMPLEX_TYPE
16006 && type == TREE_TYPE (optype))
16007 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16008 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16009 else if (TREE_CODE (optype) == VECTOR_TYPE
16010 && type == TREE_TYPE (optype))
16012 tree part_width = TYPE_SIZE (type);
16013 tree index = bitsize_int (0);
16014 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16018 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16019 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16021 tree op00 = TREE_OPERAND (sub, 0);
16022 tree op01 = TREE_OPERAND (sub, 1);
16025 if (TREE_CODE (op00) == ADDR_EXPR)
16028 op00 = TREE_OPERAND (op00, 0);
16029 op00type = TREE_TYPE (op00);
16031 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16032 if (TREE_CODE (op00type) == VECTOR_TYPE
16033 && type == TREE_TYPE (op00type))
16035 HOST_WIDE_INT offset = tree_to_shwi (op01);
16036 tree part_width = TYPE_SIZE (type);
16037 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16038 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16039 tree index = bitsize_int (indexi);
16041 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16042 return fold_build3_loc (loc,
16043 BIT_FIELD_REF, type, op00,
16044 part_width, index);
16047 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16048 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16049 && type == TREE_TYPE (op00type))
16051 tree size = TYPE_SIZE_UNIT (type);
16052 if (tree_int_cst_equal (size, op01))
16053 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16055 /* ((foo *)&fooarray)[1] => fooarray[1] */
16056 else if (TREE_CODE (op00type) == ARRAY_TYPE
16057 && type == TREE_TYPE (op00type))
16059 tree type_domain = TYPE_DOMAIN (op00type);
16060 tree min_val = size_zero_node;
16061 if (type_domain && TYPE_MIN_VALUE (type_domain))
16062 min_val = TYPE_MIN_VALUE (type_domain);
16063 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16064 TYPE_SIZE_UNIT (type));
16065 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16066 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16067 NULL_TREE, NULL_TREE);
16072 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16073 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16074 && type == TREE_TYPE (TREE_TYPE (subtype))
16075 && (!in_gimple_form
16076 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16079 tree min_val = size_zero_node;
16080 sub = build_fold_indirect_ref_loc (loc, sub);
16081 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16082 if (type_domain && TYPE_MIN_VALUE (type_domain))
16083 min_val = TYPE_MIN_VALUE (type_domain);
16085 && TREE_CODE (min_val) != INTEGER_CST)
16087 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16094 /* Builds an expression for an indirection through T, simplifying some
16098 build_fold_indirect_ref_loc (location_t loc, tree t)
16100 tree type = TREE_TYPE (TREE_TYPE (t));
16101 tree sub = fold_indirect_ref_1 (loc, type, t);
16106 return build1_loc (loc, INDIRECT_REF, type, t);
16109 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16112 fold_indirect_ref_loc (location_t loc, tree t)
16114 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16122 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16123 whose result is ignored. The type of the returned tree need not be
16124 the same as the original expression. */
16127 fold_ignored_result (tree t)
16129 if (!TREE_SIDE_EFFECTS (t))
16130 return integer_zero_node;
16133 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16136 t = TREE_OPERAND (t, 0);
16140 case tcc_comparison:
16141 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16142 t = TREE_OPERAND (t, 0);
16143 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16144 t = TREE_OPERAND (t, 1);
16149 case tcc_expression:
16150 switch (TREE_CODE (t))
16152 case COMPOUND_EXPR:
16153 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16155 t = TREE_OPERAND (t, 0);
16159 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16160 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16162 t = TREE_OPERAND (t, 0);
16175 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16178 round_up_loc (location_t loc, tree value, unsigned int divisor)
16180 tree div = NULL_TREE;
16185 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16186 have to do anything. Only do this when we are not given a const,
16187 because in that case, this check is more expensive than just
16189 if (TREE_CODE (value) != INTEGER_CST)
16191 div = build_int_cst (TREE_TYPE (value), divisor);
16193 if (multiple_of_p (TREE_TYPE (value), value, div))
16197 /* If divisor is a power of two, simplify this to bit manipulation. */
16198 if (divisor == (divisor & -divisor))
16200 if (TREE_CODE (value) == INTEGER_CST)
16202 wide_int val = value;
16205 if ((val & (divisor - 1)) == 0)
16208 overflow_p = TREE_OVERFLOW (value);
16209 val &= ~(divisor - 1);
16214 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16220 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16221 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16222 t = build_int_cst (TREE_TYPE (value), -divisor);
16223 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16229 div = build_int_cst (TREE_TYPE (value), divisor);
16230 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16231 value = size_binop_loc (loc, MULT_EXPR, value, div);
16237 /* Likewise, but round down. */
16240 round_down_loc (location_t loc, tree value, int divisor)
16242 tree div = NULL_TREE;
16244 gcc_assert (divisor > 0);
16248 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16249 have to do anything. Only do this when we are not given a const,
16250 because in that case, this check is more expensive than just
16252 if (TREE_CODE (value) != INTEGER_CST)
16254 div = build_int_cst (TREE_TYPE (value), divisor);
16256 if (multiple_of_p (TREE_TYPE (value), value, div))
16260 /* If divisor is a power of two, simplify this to bit manipulation. */
16261 if (divisor == (divisor & -divisor))
16265 t = build_int_cst (TREE_TYPE (value), -divisor);
16266 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16271 div = build_int_cst (TREE_TYPE (value), divisor);
16272 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16273 value = size_binop_loc (loc, MULT_EXPR, value, div);
16279 /* Returns the pointer to the base of the object addressed by EXP and
16280 extracts the information about the offset of the access, storing it
16281 to PBITPOS and POFFSET. */
16284 split_address_to_core_and_offset (tree exp,
16285 HOST_WIDE_INT *pbitpos, tree *poffset)
16289 int unsignedp, volatilep;
16290 HOST_WIDE_INT bitsize;
16291 location_t loc = EXPR_LOCATION (exp);
16293 if (TREE_CODE (exp) == ADDR_EXPR)
16295 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16296 poffset, &mode, &unsignedp, &volatilep,
16298 core = build_fold_addr_expr_loc (loc, core);
16304 *poffset = NULL_TREE;
16310 /* Returns true if addresses of E1 and E2 differ by a constant, false
16311 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16314 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16317 HOST_WIDE_INT bitpos1, bitpos2;
16318 tree toffset1, toffset2, tdiff, type;
16320 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16321 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16323 if (bitpos1 % BITS_PER_UNIT != 0
16324 || bitpos2 % BITS_PER_UNIT != 0
16325 || !operand_equal_p (core1, core2, 0))
16328 if (toffset1 && toffset2)
16330 type = TREE_TYPE (toffset1);
16331 if (type != TREE_TYPE (toffset2))
16332 toffset2 = fold_convert (type, toffset2);
16334 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16335 if (!cst_and_fits_in_hwi (tdiff))
16338 *diff = int_cst_value (tdiff);
16340 else if (toffset1 || toffset2)
16342 /* If only one of the offsets is non-constant, the difference cannot
16349 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16353 /* Simplify the floating point expression EXP when the sign of the
16354 result is not significant. Return NULL_TREE if no simplification
16358 fold_strip_sign_ops (tree exp)
16361 location_t loc = EXPR_LOCATION (exp);
16363 switch (TREE_CODE (exp))
16367 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16368 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16372 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16374 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16375 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16376 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16377 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16378 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16379 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16382 case COMPOUND_EXPR:
16383 arg0 = TREE_OPERAND (exp, 0);
16384 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16386 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16390 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16391 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16393 return fold_build3_loc (loc,
16394 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16395 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16396 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16401 const enum built_in_function fcode = builtin_mathfn_code (exp);
16404 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16405 /* Strip copysign function call, return the 1st argument. */
16406 arg0 = CALL_EXPR_ARG (exp, 0);
16407 arg1 = CALL_EXPR_ARG (exp, 1);
16408 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16411 /* Strip sign ops from the argument of "odd" math functions. */
16412 if (negate_mathfn_p (fcode))
16414 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16416 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);