1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
958 res = double_int_ior (op1, op2);
962 res = double_int_xor (op1, op2);
966 res = double_int_and (op1, op2);
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1030 if (double_int_one_p (op2))
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1073 res = double_int_min (op1, op2, uns);
1077 res = double_int_max (op1, op2, uns);
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1119 /* The following codes are handled by real_arithmetic. */
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1157 else if (REAL_VALUE_ISNAN (d2))
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1196 /* The following codes are handled by fixed_arithmetic. */
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1265 case TRUNC_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1327 /* In the FALSE branch, we compute
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1576 val = tree_to_double_int (lt);
1582 tree ut = TYPE_MAX_VALUE (type);
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1589 val = tree_to_double_int (ut);
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1608 double_int temp, temp_trunc;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1801 /* Construct a vector of zero elements of vector type TYPE. */
1804 build_zero_vector (tree type)
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1842 case FIXED_POINT_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 switch (TREE_CODE (type))
1873 case REFERENCE_TYPE:
1874 /* Handle conversions between pointers to different address spaces. */
1875 if (POINTER_TYPE_P (orig)
1876 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1877 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1878 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1881 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1883 if (TREE_CODE (arg) == INTEGER_CST)
1885 tem = fold_convert_const (NOP_EXPR, type, arg);
1886 if (tem != NULL_TREE)
1889 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1890 || TREE_CODE (orig) == OFFSET_TYPE)
1891 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1892 if (TREE_CODE (orig) == COMPLEX_TYPE)
1893 return fold_convert_loc (loc, type,
1894 fold_build1_loc (loc, REALPART_EXPR,
1895 TREE_TYPE (orig), arg));
1896 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1897 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1898 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 if (TREE_CODE (arg) == INTEGER_CST)
1903 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1907 else if (TREE_CODE (arg) == REAL_CST)
1909 tem = fold_convert_const (NOP_EXPR, type, arg);
1910 if (tem != NULL_TREE)
1913 else if (TREE_CODE (arg) == FIXED_CST)
1915 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1916 if (tem != NULL_TREE)
1920 switch (TREE_CODE (orig))
1923 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1924 case POINTER_TYPE: case REFERENCE_TYPE:
1925 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1928 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1930 case FIXED_POINT_TYPE:
1931 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1934 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1935 return fold_convert_loc (loc, type, tem);
1941 case FIXED_POINT_TYPE:
1942 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1943 || TREE_CODE (arg) == REAL_CST)
1945 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1946 if (tem != NULL_TREE)
1947 goto fold_convert_exit;
1950 switch (TREE_CODE (orig))
1952 case FIXED_POINT_TYPE:
1957 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1960 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1961 return fold_convert_loc (loc, type, tem);
1968 switch (TREE_CODE (orig))
1971 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1972 case POINTER_TYPE: case REFERENCE_TYPE:
1974 case FIXED_POINT_TYPE:
1975 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1976 fold_convert_loc (loc, TREE_TYPE (type), arg),
1977 fold_convert_loc (loc, TREE_TYPE (type),
1978 integer_zero_node));
1983 if (TREE_CODE (arg) == COMPLEX_EXPR)
1985 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1986 TREE_OPERAND (arg, 0));
1987 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1988 TREE_OPERAND (arg, 1));
1989 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1992 arg = save_expr (arg);
1993 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1994 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1995 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1997 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2005 if (integer_zerop (arg))
2006 return build_zero_vector (type);
2007 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2008 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2009 || TREE_CODE (orig) == VECTOR_TYPE);
2010 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2013 tem = fold_ignored_result (arg);
2014 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2017 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2018 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2022 protected_set_expr_location_unshare (tem, loc);
2026 /* Return false if expr can be assumed not to be an lvalue, true
2030 maybe_lvalue_p (const_tree x)
2032 /* We only need to wrap lvalue tree codes. */
2033 switch (TREE_CODE (x))
2046 case ARRAY_RANGE_REF:
2052 case PREINCREMENT_EXPR:
2053 case PREDECREMENT_EXPR:
2055 case TRY_CATCH_EXPR:
2056 case WITH_CLEANUP_EXPR:
2065 /* Assume the worst for front-end tree codes. */
2066 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2074 /* Return an expr equal to X but certainly not valid as an lvalue. */
2077 non_lvalue_loc (location_t loc, tree x)
2079 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2084 if (! maybe_lvalue_p (x))
2086 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2089 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2090 Zero means allow extended lvalues. */
2092 int pedantic_lvalues;
2094 /* When pedantic, return an expr equal to X but certainly not valid as a
2095 pedantic lvalue. Otherwise, return X. */
2098 pedantic_non_lvalue_loc (location_t loc, tree x)
2100 if (pedantic_lvalues)
2101 return non_lvalue_loc (loc, x);
2103 return protected_set_expr_location_unshare (x, loc);
2106 /* Given a tree comparison code, return the code that is the logical inverse
2107 of the given code. It is not safe to do this for floating-point
2108 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2109 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2112 invert_tree_comparison (enum tree_code code, bool honor_nans)
2114 if (honor_nans && flag_trapping_math)
2124 return honor_nans ? UNLE_EXPR : LE_EXPR;
2126 return honor_nans ? UNLT_EXPR : LT_EXPR;
2128 return honor_nans ? UNGE_EXPR : GE_EXPR;
2130 return honor_nans ? UNGT_EXPR : GT_EXPR;
2144 return UNORDERED_EXPR;
2145 case UNORDERED_EXPR:
2146 return ORDERED_EXPR;
2152 /* Similar, but return the comparison that results if the operands are
2153 swapped. This is safe for floating-point. */
2156 swap_tree_comparison (enum tree_code code)
2163 case UNORDERED_EXPR:
2189 /* Convert a comparison tree code from an enum tree_code representation
2190 into a compcode bit-based encoding. This function is the inverse of
2191 compcode_to_comparison. */
2193 static enum comparison_code
2194 comparison_to_compcode (enum tree_code code)
2211 return COMPCODE_ORD;
2212 case UNORDERED_EXPR:
2213 return COMPCODE_UNORD;
2215 return COMPCODE_UNLT;
2217 return COMPCODE_UNEQ;
2219 return COMPCODE_UNLE;
2221 return COMPCODE_UNGT;
2223 return COMPCODE_LTGT;
2225 return COMPCODE_UNGE;
2231 /* Convert a compcode bit-based encoding of a comparison operator back
2232 to GCC's enum tree_code representation. This function is the
2233 inverse of comparison_to_compcode. */
2235 static enum tree_code
2236 compcode_to_comparison (enum comparison_code code)
2253 return ORDERED_EXPR;
2254 case COMPCODE_UNORD:
2255 return UNORDERED_EXPR;
2273 /* Return a tree for the comparison which is the combination of
2274 doing the AND or OR (depending on CODE) of the two operations LCODE
2275 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2276 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2277 if this makes the transformation invalid. */
2280 combine_comparisons (location_t loc,
2281 enum tree_code code, enum tree_code lcode,
2282 enum tree_code rcode, tree truth_type,
2283 tree ll_arg, tree lr_arg)
2285 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2286 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2287 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2292 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2293 compcode = lcompcode & rcompcode;
2296 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2297 compcode = lcompcode | rcompcode;
2306 /* Eliminate unordered comparisons, as well as LTGT and ORD
2307 which are not used unless the mode has NaNs. */
2308 compcode &= ~COMPCODE_UNORD;
2309 if (compcode == COMPCODE_LTGT)
2310 compcode = COMPCODE_NE;
2311 else if (compcode == COMPCODE_ORD)
2312 compcode = COMPCODE_TRUE;
2314 else if (flag_trapping_math)
2316 /* Check that the original operation and the optimized ones will trap
2317 under the same condition. */
2318 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2319 && (lcompcode != COMPCODE_EQ)
2320 && (lcompcode != COMPCODE_ORD);
2321 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2322 && (rcompcode != COMPCODE_EQ)
2323 && (rcompcode != COMPCODE_ORD);
2324 bool trap = (compcode & COMPCODE_UNORD) == 0
2325 && (compcode != COMPCODE_EQ)
2326 && (compcode != COMPCODE_ORD);
2328 /* In a short-circuited boolean expression the LHS might be
2329 such that the RHS, if evaluated, will never trap. For
2330 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2331 if neither x nor y is NaN. (This is a mixed blessing: for
2332 example, the expression above will never trap, hence
2333 optimizing it to x < y would be invalid). */
2334 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2335 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2338 /* If the comparison was short-circuited, and only the RHS
2339 trapped, we may now generate a spurious trap. */
2341 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2344 /* If we changed the conditions that cause a trap, we lose. */
2345 if ((ltrap || rtrap) != trap)
2349 if (compcode == COMPCODE_TRUE)
2350 return constant_boolean_node (true, truth_type);
2351 else if (compcode == COMPCODE_FALSE)
2352 return constant_boolean_node (false, truth_type);
2355 enum tree_code tcode;
2357 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2358 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2362 /* Return nonzero if two operands (typically of the same tree node)
2363 are necessarily equal. If either argument has side-effects this
2364 function returns zero. FLAGS modifies behavior as follows:
2366 If OEP_ONLY_CONST is set, only return nonzero for constants.
2367 This function tests whether the operands are indistinguishable;
2368 it does not test whether they are equal using C's == operation.
2369 The distinction is important for IEEE floating point, because
2370 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2371 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2373 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2374 even though it may hold multiple values during a function.
2375 This is because a GCC tree node guarantees that nothing else is
2376 executed between the evaluation of its "operands" (which may often
2377 be evaluated in arbitrary order). Hence if the operands themselves
2378 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2379 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2380 unset means assuming isochronic (or instantaneous) tree equivalence.
2381 Unless comparing arbitrary expression trees, such as from different
2382 statements, this flag can usually be left unset.
2384 If OEP_PURE_SAME is set, then pure functions with identical arguments
2385 are considered the same. It is used when the caller has other ways
2386 to ensure that global memory is unchanged in between. */
2389 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2391 /* If either is ERROR_MARK, they aren't equal. */
2392 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2393 || TREE_TYPE (arg0) == error_mark_node
2394 || TREE_TYPE (arg1) == error_mark_node)
2397 /* Similar, if either does not have a type (like a released SSA name),
2398 they aren't equal. */
2399 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2402 /* Check equality of integer constants before bailing out due to
2403 precision differences. */
2404 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2405 return tree_int_cst_equal (arg0, arg1);
2407 /* If both types don't have the same signedness, then we can't consider
2408 them equal. We must check this before the STRIP_NOPS calls
2409 because they may change the signedness of the arguments. As pointers
2410 strictly don't have a signedness, require either two pointers or
2411 two non-pointers as well. */
2412 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2413 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2416 /* We cannot consider pointers to different address space equal. */
2417 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2418 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2419 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2422 /* If both types don't have the same precision, then it is not safe
2424 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2430 /* In case both args are comparisons but with different comparison
2431 code, try to swap the comparison operands of one arg to produce
2432 a match and compare that variant. */
2433 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2434 && COMPARISON_CLASS_P (arg0)
2435 && COMPARISON_CLASS_P (arg1))
2437 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2439 if (TREE_CODE (arg0) == swap_code)
2440 return operand_equal_p (TREE_OPERAND (arg0, 0),
2441 TREE_OPERAND (arg1, 1), flags)
2442 && operand_equal_p (TREE_OPERAND (arg0, 1),
2443 TREE_OPERAND (arg1, 0), flags);
2446 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2447 /* This is needed for conversions and for COMPONENT_REF.
2448 Might as well play it safe and always test this. */
2449 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2450 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2451 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2454 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2455 We don't care about side effects in that case because the SAVE_EXPR
2456 takes care of that for us. In all other cases, two expressions are
2457 equal if they have no side effects. If we have two identical
2458 expressions with side effects that should be treated the same due
2459 to the only side effects being identical SAVE_EXPR's, that will
2460 be detected in the recursive calls below.
2461 If we are taking an invariant address of two identical objects
2462 they are necessarily equal as well. */
2463 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2464 && (TREE_CODE (arg0) == SAVE_EXPR
2465 || (flags & OEP_CONSTANT_ADDRESS_OF)
2466 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2469 /* Next handle constant cases, those for which we can return 1 even
2470 if ONLY_CONST is set. */
2471 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2472 switch (TREE_CODE (arg0))
2475 return tree_int_cst_equal (arg0, arg1);
2478 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2479 TREE_FIXED_CST (arg1));
2482 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2483 TREE_REAL_CST (arg1)))
2487 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2489 /* If we do not distinguish between signed and unsigned zero,
2490 consider them equal. */
2491 if (real_zerop (arg0) && real_zerop (arg1))
2500 v1 = TREE_VECTOR_CST_ELTS (arg0);
2501 v2 = TREE_VECTOR_CST_ELTS (arg1);
2504 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2507 v1 = TREE_CHAIN (v1);
2508 v2 = TREE_CHAIN (v2);
2515 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2517 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2521 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2522 && ! memcmp (TREE_STRING_POINTER (arg0),
2523 TREE_STRING_POINTER (arg1),
2524 TREE_STRING_LENGTH (arg0)));
2527 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2528 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2529 ? OEP_CONSTANT_ADDRESS_OF : 0);
2534 if (flags & OEP_ONLY_CONST)
2537 /* Define macros to test an operand from arg0 and arg1 for equality and a
2538 variant that allows null and views null as being different from any
2539 non-null value. In the latter case, if either is null, the both
2540 must be; otherwise, do the normal comparison. */
2541 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2542 TREE_OPERAND (arg1, N), flags)
2544 #define OP_SAME_WITH_NULL(N) \
2545 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2546 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2548 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2551 /* Two conversions are equal only if signedness and modes match. */
2552 switch (TREE_CODE (arg0))
2555 case FIX_TRUNC_EXPR:
2556 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2557 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2567 case tcc_comparison:
2569 if (OP_SAME (0) && OP_SAME (1))
2572 /* For commutative ops, allow the other order. */
2573 return (commutative_tree_code (TREE_CODE (arg0))
2574 && operand_equal_p (TREE_OPERAND (arg0, 0),
2575 TREE_OPERAND (arg1, 1), flags)
2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
2577 TREE_OPERAND (arg1, 0), flags));
2580 /* If either of the pointer (or reference) expressions we are
2581 dereferencing contain a side effect, these cannot be equal. */
2582 if (TREE_SIDE_EFFECTS (arg0)
2583 || TREE_SIDE_EFFECTS (arg1))
2586 switch (TREE_CODE (arg0))
2594 /* Require equal access sizes, and similar pointer types.
2595 We can have incomplete types for array references of
2596 variable-sized arrays from the Fortran frontent
2598 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2599 || (TYPE_SIZE (TREE_TYPE (arg0))
2600 && TYPE_SIZE (TREE_TYPE (arg1))
2601 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2602 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2603 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2604 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2605 && OP_SAME (0) && OP_SAME (1));
2608 case ARRAY_RANGE_REF:
2609 /* Operands 2 and 3 may be null.
2610 Compare the array index by value if it is constant first as we
2611 may have different types but same value here. */
2613 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2614 TREE_OPERAND (arg1, 1))
2616 && OP_SAME_WITH_NULL (2)
2617 && OP_SAME_WITH_NULL (3));
2620 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2621 may be NULL when we're called to compare MEM_EXPRs. */
2622 return OP_SAME_WITH_NULL (0)
2624 && OP_SAME_WITH_NULL (2);
2627 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2633 case tcc_expression:
2634 switch (TREE_CODE (arg0))
2637 case TRUTH_NOT_EXPR:
2640 case TRUTH_ANDIF_EXPR:
2641 case TRUTH_ORIF_EXPR:
2642 return OP_SAME (0) && OP_SAME (1);
2645 case WIDEN_MULT_PLUS_EXPR:
2646 case WIDEN_MULT_MINUS_EXPR:
2649 /* The multiplcation operands are commutative. */
2652 case TRUTH_AND_EXPR:
2654 case TRUTH_XOR_EXPR:
2655 if (OP_SAME (0) && OP_SAME (1))
2658 /* Otherwise take into account this is a commutative operation. */
2659 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2660 TREE_OPERAND (arg1, 1), flags)
2661 && operand_equal_p (TREE_OPERAND (arg0, 1),
2662 TREE_OPERAND (arg1, 0), flags));
2667 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2674 switch (TREE_CODE (arg0))
2677 /* If the CALL_EXPRs call different functions, then they
2678 clearly can not be equal. */
2679 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2684 unsigned int cef = call_expr_flags (arg0);
2685 if (flags & OEP_PURE_SAME)
2686 cef &= ECF_CONST | ECF_PURE;
2693 /* Now see if all the arguments are the same. */
2695 const_call_expr_arg_iterator iter0, iter1;
2697 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2698 a1 = first_const_call_expr_arg (arg1, &iter1);
2700 a0 = next_const_call_expr_arg (&iter0),
2701 a1 = next_const_call_expr_arg (&iter1))
2702 if (! operand_equal_p (a0, a1, flags))
2705 /* If we get here and both argument lists are exhausted
2706 then the CALL_EXPRs are equal. */
2707 return ! (a0 || a1);
2713 case tcc_declaration:
2714 /* Consider __builtin_sqrt equal to sqrt. */
2715 return (TREE_CODE (arg0) == FUNCTION_DECL
2716 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2717 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2718 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 #undef OP_SAME_WITH_NULL
2728 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2729 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731 When in doubt, return 0. */
2734 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 int unsignedp1, unsignedpo;
2737 tree primarg0, primarg1, primother;
2738 unsigned int correct_width;
2740 if (operand_equal_p (arg0, arg1, 0))
2743 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2744 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2747 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2748 and see if the inner values are the same. This removes any
2749 signedness comparison, which doesn't matter here. */
2750 primarg0 = arg0, primarg1 = arg1;
2751 STRIP_NOPS (primarg0);
2752 STRIP_NOPS (primarg1);
2753 if (operand_equal_p (primarg0, primarg1, 0))
2756 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2757 actual comparison operand, ARG0.
2759 First throw away any conversions to wider types
2760 already present in the operands. */
2762 primarg1 = get_narrower (arg1, &unsignedp1);
2763 primother = get_narrower (other, &unsignedpo);
2765 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2766 if (unsignedp1 == unsignedpo
2767 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2768 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 tree type = TREE_TYPE (arg0);
2772 /* Make sure shorter operand is extended the right way
2773 to match the longer operand. */
2774 primarg1 = fold_convert (signed_or_unsigned_type_for
2775 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2784 /* See if ARG is an expression that is either a comparison or is performing
2785 arithmetic on comparisons. The comparisons must only be comparing
2786 two different values, which will be stored in *CVAL1 and *CVAL2; if
2787 they are nonzero it means that some operands have already been found.
2788 No variables may be used anywhere else in the expression except in the
2789 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2790 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792 If this is true, return 1. Otherwise, return zero. */
2795 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 enum tree_code code = TREE_CODE (arg);
2798 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2800 /* We can handle some of the tcc_expression cases here. */
2801 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2803 else if (tclass == tcc_expression
2804 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2805 || code == COMPOUND_EXPR))
2806 tclass = tcc_binary;
2808 else if (tclass == tcc_expression && code == SAVE_EXPR
2809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 /* If we've already found a CVAL1 or CVAL2, this expression is
2812 two complex to handle. */
2813 if (*cval1 || *cval2)
2823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p));
2833 case tcc_expression:
2834 if (code == COND_EXPR)
2835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2836 cval1, cval2, save_p)
2837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2838 cval1, cval2, save_p)
2839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2840 cval1, cval2, save_p));
2843 case tcc_comparison:
2844 /* First see if we can handle the first operand, then the second. For
2845 the second operand, we know *CVAL1 can't be zero. It must be that
2846 one side of the comparison is each of the values; test for the
2847 case where this isn't true by failing if the two operands
2850 if (operand_equal_p (TREE_OPERAND (arg, 0),
2851 TREE_OPERAND (arg, 1), 0))
2855 *cval1 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 else if (*cval2 == 0)
2859 *cval2 = TREE_OPERAND (arg, 0);
2860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 else if (*cval2 == 0)
2868 *cval2 = TREE_OPERAND (arg, 1);
2869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2881 /* ARG is a tree that is known to contain just arithmetic operations and
2882 comparisons. Evaluate the operations in the tree substituting NEW0 for
2883 any occurrence of OLD0 as an operand of a comparison and likewise for
2887 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2888 tree old1, tree new1)
2890 tree type = TREE_TYPE (arg);
2891 enum tree_code code = TREE_CODE (arg);
2892 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2897 else if (tclass == tcc_expression
2898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2899 tclass = tcc_binary;
2904 return fold_build1_loc (loc, code, type,
2905 eval_subst (loc, TREE_OPERAND (arg, 0),
2906 old0, new0, old1, new1));
2909 return fold_build2_loc (loc, code, type,
2910 eval_subst (loc, TREE_OPERAND (arg, 0),
2911 old0, new0, old1, new1),
2912 eval_subst (loc, TREE_OPERAND (arg, 1),
2913 old0, new0, old1, new1));
2915 case tcc_expression:
2919 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2923 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2927 return fold_build3_loc (loc, code, type,
2928 eval_subst (loc, TREE_OPERAND (arg, 0),
2929 old0, new0, old1, new1),
2930 eval_subst (loc, TREE_OPERAND (arg, 1),
2931 old0, new0, old1, new1),
2932 eval_subst (loc, TREE_OPERAND (arg, 2),
2933 old0, new0, old1, new1));
2937 /* Fall through - ??? */
2939 case tcc_comparison:
2941 tree arg0 = TREE_OPERAND (arg, 0);
2942 tree arg1 = TREE_OPERAND (arg, 1);
2944 /* We need to check both for exact equality and tree equality. The
2945 former will be true if the operand has a side-effect. In that
2946 case, we know the operand occurred exactly once. */
2948 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2950 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2953 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2955 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2958 return fold_build2_loc (loc, code, type, arg0, arg1);
2966 /* Return a tree for the case when the result of an expression is RESULT
2967 converted to TYPE and OMITTED was previously an operand of the expression
2968 but is now not needed (e.g., we folded OMITTED * 0).
2970 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2971 the conversion of RESULT to TYPE. */
2974 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2976 tree t = fold_convert_loc (loc, type, result);
2978 /* If the resulting operand is an empty statement, just return the omitted
2979 statement casted to void. */
2980 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2981 return build1_loc (loc, NOP_EXPR, void_type_node,
2982 fold_ignored_result (omitted));
2984 if (TREE_SIDE_EFFECTS (omitted))
2985 return build2_loc (loc, COMPOUND_EXPR, type,
2986 fold_ignored_result (omitted), t);
2988 return non_lvalue_loc (loc, t);
2991 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2994 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2997 tree t = fold_convert_loc (loc, type, result);
2999 /* If the resulting operand is an empty statement, just return the omitted
3000 statement casted to void. */
3001 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3002 return build1_loc (loc, NOP_EXPR, void_type_node,
3003 fold_ignored_result (omitted));
3005 if (TREE_SIDE_EFFECTS (omitted))
3006 return build2_loc (loc, COMPOUND_EXPR, type,
3007 fold_ignored_result (omitted), t);
3009 return pedantic_non_lvalue_loc (loc, t);
3012 /* Return a tree for the case when the result of an expression is RESULT
3013 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3014 of the expression but are now not needed.
3016 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3017 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3018 evaluated before OMITTED2. Otherwise, if neither has side effects,
3019 just do the conversion of RESULT to TYPE. */
3022 omit_two_operands_loc (location_t loc, tree type, tree result,
3023 tree omitted1, tree omitted2)
3025 tree t = fold_convert_loc (loc, type, result);
3027 if (TREE_SIDE_EFFECTS (omitted2))
3028 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3029 if (TREE_SIDE_EFFECTS (omitted1))
3030 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3032 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3036 /* Return a simplified tree node for the truth-negation of ARG. This
3037 never alters ARG itself. We assume that ARG is an operation that
3038 returns a truth value (0 or 1).
3040 FIXME: one would think we would fold the result, but it causes
3041 problems with the dominator optimizer. */
3044 fold_truth_not_expr (location_t loc, tree arg)
3046 tree type = TREE_TYPE (arg);
3047 enum tree_code code = TREE_CODE (arg);
3048 location_t loc1, loc2;
3050 /* If this is a comparison, we can simply invert it, except for
3051 floating-point non-equality comparisons, in which case we just
3052 enclose a TRUTH_NOT_EXPR around what we have. */
3054 if (TREE_CODE_CLASS (code) == tcc_comparison)
3056 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3057 if (FLOAT_TYPE_P (op_type)
3058 && flag_trapping_math
3059 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3060 && code != NE_EXPR && code != EQ_EXPR)
3063 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3064 if (code == ERROR_MARK)
3067 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3068 TREE_OPERAND (arg, 1));
3074 return constant_boolean_node (integer_zerop (arg), type);
3076 case TRUTH_AND_EXPR:
3077 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3078 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3079 return build2_loc (loc, TRUTH_OR_EXPR, type,
3080 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3081 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3084 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3085 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3086 return build2_loc (loc, TRUTH_AND_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090 case TRUTH_XOR_EXPR:
3091 /* Here we can invert either operand. We invert the first operand
3092 unless the second operand is a TRUTH_NOT_EXPR in which case our
3093 result is the XOR of the first operand with the inside of the
3094 negation of the second operand. */
3096 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3097 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3098 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3100 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3101 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3102 TREE_OPERAND (arg, 1));
3104 case TRUTH_ANDIF_EXPR:
3105 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3106 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3107 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3111 case TRUTH_ORIF_EXPR:
3112 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3113 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3114 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3115 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3116 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3118 case TRUTH_NOT_EXPR:
3119 return TREE_OPERAND (arg, 0);
3123 tree arg1 = TREE_OPERAND (arg, 1);
3124 tree arg2 = TREE_OPERAND (arg, 2);
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3129 /* A COND_EXPR may have a throw as one operand, which
3130 then has void type. Just leave void operands
3132 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3133 VOID_TYPE_P (TREE_TYPE (arg1))
3134 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3135 VOID_TYPE_P (TREE_TYPE (arg2))
3136 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3140 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3141 return build2_loc (loc, COMPOUND_EXPR, type,
3142 TREE_OPERAND (arg, 0),
3143 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3145 case NON_LVALUE_EXPR:
3146 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3147 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3150 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3151 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3153 /* ... fall through ... */
3156 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3157 return build1_loc (loc, TREE_CODE (arg), type,
3158 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3161 if (!integer_onep (TREE_OPERAND (arg, 1)))
3163 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3166 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3168 case CLEANUP_POINT_EXPR:
3169 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3171 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3178 /* Return a simplified tree node for the truth-negation of ARG. This
3179 never alters ARG itself. We assume that ARG is an operation that
3180 returns a truth value (0 or 1).
3182 FIXME: one would think we would fold the result, but it causes
3183 problems with the dominator optimizer. */
3186 invert_truthvalue_loc (location_t loc, tree arg)
3190 if (TREE_CODE (arg) == ERROR_MARK)
3193 tem = fold_truth_not_expr (loc, arg);
3195 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3200 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3201 operands are another bit-wise operation with a common input. If so,
3202 distribute the bit operations to save an operation and possibly two if
3203 constants are involved. For example, convert
3204 (A | B) & (A | C) into A | (B & C)
3205 Further simplification will occur if B and C are constants.
3207 If this optimization cannot be done, 0 will be returned. */
3210 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3211 tree arg0, tree arg1)
3216 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3217 || TREE_CODE (arg0) == code
3218 || (TREE_CODE (arg0) != BIT_AND_EXPR
3219 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3222 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3224 common = TREE_OPERAND (arg0, 0);
3225 left = TREE_OPERAND (arg0, 1);
3226 right = TREE_OPERAND (arg1, 1);
3228 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3230 common = TREE_OPERAND (arg0, 0);
3231 left = TREE_OPERAND (arg0, 1);
3232 right = TREE_OPERAND (arg1, 0);
3234 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3236 common = TREE_OPERAND (arg0, 1);
3237 left = TREE_OPERAND (arg0, 0);
3238 right = TREE_OPERAND (arg1, 1);
3240 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3242 common = TREE_OPERAND (arg0, 1);
3243 left = TREE_OPERAND (arg0, 0);
3244 right = TREE_OPERAND (arg1, 0);
3249 common = fold_convert_loc (loc, type, common);
3250 left = fold_convert_loc (loc, type, left);
3251 right = fold_convert_loc (loc, type, right);
3252 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3253 fold_build2_loc (loc, code, type, left, right));
3256 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3257 with code CODE. This optimization is unsafe. */
3259 distribute_real_division (location_t loc, enum tree_code code, tree type,
3260 tree arg0, tree arg1)
3262 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3263 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3265 /* (A / C) +- (B / C) -> (A +- B) / C. */
3267 && operand_equal_p (TREE_OPERAND (arg0, 1),
3268 TREE_OPERAND (arg1, 1), 0))
3269 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3270 fold_build2_loc (loc, code, type,
3271 TREE_OPERAND (arg0, 0),
3272 TREE_OPERAND (arg1, 0)),
3273 TREE_OPERAND (arg0, 1));
3275 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3276 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3277 TREE_OPERAND (arg1, 0), 0)
3278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3279 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3281 REAL_VALUE_TYPE r0, r1;
3282 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3283 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3285 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3287 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3288 real_arithmetic (&r0, code, &r0, &r1);
3289 return fold_build2_loc (loc, MULT_EXPR, type,
3290 TREE_OPERAND (arg0, 0),
3291 build_real (type, r0));
3297 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3298 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3301 make_bit_field_ref (location_t loc, tree inner, tree type,
3302 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3304 tree result, bftype;
3308 tree size = TYPE_SIZE (TREE_TYPE (inner));
3309 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3310 || POINTER_TYPE_P (TREE_TYPE (inner)))
3311 && host_integerp (size, 0)
3312 && tree_low_cst (size, 0) == bitsize)
3313 return fold_convert_loc (loc, type, inner);
3317 if (TYPE_PRECISION (bftype) != bitsize
3318 || TYPE_UNSIGNED (bftype) == !unsignedp)
3319 bftype = build_nonstandard_integer_type (bitsize, 0);
3321 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3322 size_int (bitsize), bitsize_int (bitpos));
3325 result = fold_convert_loc (loc, type, result);
3330 /* Optimize a bit-field compare.
3332 There are two cases: First is a compare against a constant and the
3333 second is a comparison of two items where the fields are at the same
3334 bit position relative to the start of a chunk (byte, halfword, word)
3335 large enough to contain it. In these cases we can avoid the shift
3336 implicit in bitfield extractions.
3338 For constants, we emit a compare of the shifted constant with the
3339 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3340 compared. For two fields at the same position, we do the ANDs with the
3341 similar mask and compare the result of the ANDs.
3343 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3344 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3345 are the left and right operands of the comparison, respectively.
3347 If the optimization described above can be done, we return the resulting
3348 tree. Otherwise we return zero. */
3351 optimize_bit_field_compare (location_t loc, enum tree_code code,
3352 tree compare_type, tree lhs, tree rhs)
3354 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3355 tree type = TREE_TYPE (lhs);
3356 tree signed_type, unsigned_type;
3357 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3358 enum machine_mode lmode, rmode, nmode;
3359 int lunsignedp, runsignedp;
3360 int lvolatilep = 0, rvolatilep = 0;
3361 tree linner, rinner = NULL_TREE;
3365 /* Get all the information about the extractions being done. If the bit size
3366 if the same as the size of the underlying object, we aren't doing an
3367 extraction at all and so can do nothing. We also don't want to
3368 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3369 then will no longer be able to replace it. */
3370 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3371 &lunsignedp, &lvolatilep, false);
3372 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3373 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3378 /* If this is not a constant, we can only do something if bit positions,
3379 sizes, and signedness are the same. */
3380 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3381 &runsignedp, &rvolatilep, false);
3383 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3384 || lunsignedp != runsignedp || offset != 0
3385 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3389 /* See if we can find a mode to refer to this field. We should be able to,
3390 but fail if we can't. */
3392 && GET_MODE_BITSIZE (lmode) > 0
3393 && flag_strict_volatile_bitfields > 0)
3396 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3397 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3398 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3399 TYPE_ALIGN (TREE_TYPE (rinner))),
3400 word_mode, lvolatilep || rvolatilep);
3401 if (nmode == VOIDmode)
3404 /* Set signed and unsigned types of the precision of this mode for the
3406 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3407 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3409 /* Compute the bit position and size for the new reference and our offset
3410 within it. If the new reference is the same size as the original, we
3411 won't optimize anything, so return zero. */
3412 nbitsize = GET_MODE_BITSIZE (nmode);
3413 nbitpos = lbitpos & ~ (nbitsize - 1);
3415 if (nbitsize == lbitsize)
3418 if (BYTES_BIG_ENDIAN)
3419 lbitpos = nbitsize - lbitsize - lbitpos;
3421 /* Make the mask to be used against the extracted field. */
3422 mask = build_int_cst_type (unsigned_type, -1);
3423 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3424 mask = const_binop (RSHIFT_EXPR, mask,
3425 size_int (nbitsize - lbitsize - lbitpos));
3428 /* If not comparing with constant, just rework the comparison
3430 return fold_build2_loc (loc, code, compare_type,
3431 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3432 make_bit_field_ref (loc, linner,
3437 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3438 make_bit_field_ref (loc, rinner,
3444 /* Otherwise, we are handling the constant case. See if the constant is too
3445 big for the field. Warn and return a tree of for 0 (false) if so. We do
3446 this not only for its own sake, but to avoid having to test for this
3447 error case below. If we didn't, we might generate wrong code.
3449 For unsigned fields, the constant shifted right by the field length should
3450 be all zero. For signed fields, the high-order bits should agree with
3455 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3456 fold_convert_loc (loc,
3457 unsigned_type, rhs),
3458 size_int (lbitsize))))
3460 warning (0, "comparison is always %d due to width of bit-field",
3462 return constant_boolean_node (code == NE_EXPR, compare_type);
3467 tree tem = const_binop (RSHIFT_EXPR,
3468 fold_convert_loc (loc, signed_type, rhs),
3469 size_int (lbitsize - 1));
3470 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3472 warning (0, "comparison is always %d due to width of bit-field",
3474 return constant_boolean_node (code == NE_EXPR, compare_type);
3478 /* Single-bit compares should always be against zero. */
3479 if (lbitsize == 1 && ! integer_zerop (rhs))
3481 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3482 rhs = build_int_cst (type, 0);
3485 /* Make a new bitfield reference, shift the constant over the
3486 appropriate number of bits and mask it with the computed mask
3487 (in case this was a signed field). If we changed it, make a new one. */
3488 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3491 TREE_SIDE_EFFECTS (lhs) = 1;
3492 TREE_THIS_VOLATILE (lhs) = 1;
3495 rhs = const_binop (BIT_AND_EXPR,
3496 const_binop (LSHIFT_EXPR,
3497 fold_convert_loc (loc, unsigned_type, rhs),
3498 size_int (lbitpos)),
3501 lhs = build2_loc (loc, code, compare_type,
3502 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3506 /* Subroutine for fold_truthop: decode a field reference.
3508 If EXP is a comparison reference, we return the innermost reference.
3510 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3511 set to the starting bit number.
3513 If the innermost field can be completely contained in a mode-sized
3514 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3516 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3517 otherwise it is not changed.
3519 *PUNSIGNEDP is set to the signedness of the field.
3521 *PMASK is set to the mask used. This is either contained in a
3522 BIT_AND_EXPR or derived from the width of the field.
3524 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3526 Return 0 if this is not a component reference or is one that we can't
3527 do anything with. */
3530 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3531 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3532 int *punsignedp, int *pvolatilep,
3533 tree *pmask, tree *pand_mask)
3535 tree outer_type = 0;
3537 tree mask, inner, offset;
3539 unsigned int precision;
3541 /* All the optimizations using this function assume integer fields.
3542 There are problems with FP fields since the type_for_size call
3543 below can fail for, e.g., XFmode. */
3544 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3547 /* We are interested in the bare arrangement of bits, so strip everything
3548 that doesn't affect the machine mode. However, record the type of the
3549 outermost expression if it may matter below. */
3550 if (CONVERT_EXPR_P (exp)
3551 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3552 outer_type = TREE_TYPE (exp);
3555 if (TREE_CODE (exp) == BIT_AND_EXPR)
3557 and_mask = TREE_OPERAND (exp, 1);
3558 exp = TREE_OPERAND (exp, 0);
3559 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3560 if (TREE_CODE (and_mask) != INTEGER_CST)
3564 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3565 punsignedp, pvolatilep, false);
3566 if ((inner == exp && and_mask == 0)
3567 || *pbitsize < 0 || offset != 0
3568 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3571 /* If the number of bits in the reference is the same as the bitsize of
3572 the outer type, then the outer type gives the signedness. Otherwise
3573 (in case of a small bitfield) the signedness is unchanged. */
3574 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3575 *punsignedp = TYPE_UNSIGNED (outer_type);
3577 /* Compute the mask to access the bitfield. */
3578 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3579 precision = TYPE_PRECISION (unsigned_type);
3581 mask = build_int_cst_type (unsigned_type, -1);
3583 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3584 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3586 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3588 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3589 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3592 *pand_mask = and_mask;
3596 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3600 all_ones_mask_p (const_tree mask, int size)
3602 tree type = TREE_TYPE (mask);
3603 unsigned int precision = TYPE_PRECISION (type);
3606 tmask = build_int_cst_type (signed_type_for (type), -1);
3609 tree_int_cst_equal (mask,
3610 const_binop (RSHIFT_EXPR,
3611 const_binop (LSHIFT_EXPR, tmask,
3612 size_int (precision - size)),
3613 size_int (precision - size)));
3616 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3617 represents the sign bit of EXP's type. If EXP represents a sign
3618 or zero extension, also test VAL against the unextended type.
3619 The return value is the (sub)expression whose sign bit is VAL,
3620 or NULL_TREE otherwise. */
3623 sign_bit_p (tree exp, const_tree val)
3625 unsigned HOST_WIDE_INT mask_lo, lo;
3626 HOST_WIDE_INT mask_hi, hi;
3630 /* Tree EXP must have an integral type. */
3631 t = TREE_TYPE (exp);
3632 if (! INTEGRAL_TYPE_P (t))
3635 /* Tree VAL must be an integer constant. */
3636 if (TREE_CODE (val) != INTEGER_CST
3637 || TREE_OVERFLOW (val))
3640 width = TYPE_PRECISION (t);
3641 if (width > HOST_BITS_PER_WIDE_INT)
3643 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3646 mask_hi = ((unsigned HOST_WIDE_INT) -1
3647 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3653 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3656 mask_lo = ((unsigned HOST_WIDE_INT) -1
3657 >> (HOST_BITS_PER_WIDE_INT - width));
3660 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3661 treat VAL as if it were unsigned. */
3662 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3663 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3666 /* Handle extension from a narrower type. */
3667 if (TREE_CODE (exp) == NOP_EXPR
3668 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3669 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3674 /* Subroutine for fold_truthop: determine if an operand is simple enough
3675 to be evaluated unconditionally. */
3678 simple_operand_p (const_tree exp)
3680 /* Strip any conversions that don't change the machine mode. */
3683 return (CONSTANT_CLASS_P (exp)
3684 || TREE_CODE (exp) == SSA_NAME
3686 && ! TREE_ADDRESSABLE (exp)
3687 && ! TREE_THIS_VOLATILE (exp)
3688 && ! DECL_NONLOCAL (exp)
3689 /* Don't regard global variables as simple. They may be
3690 allocated in ways unknown to the compiler (shared memory,
3691 #pragma weak, etc). */
3692 && ! TREE_PUBLIC (exp)
3693 && ! DECL_EXTERNAL (exp)
3694 /* Loading a static variable is unduly expensive, but global
3695 registers aren't expensive. */
3696 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3699 /* The following functions are subroutines to fold_range_test and allow it to
3700 try to change a logical combination of comparisons into a range test.
3703 X == 2 || X == 3 || X == 4 || X == 5
3707 (unsigned) (X - 2) <= 3
3709 We describe each set of comparisons as being either inside or outside
3710 a range, using a variable named like IN_P, and then describe the
3711 range with a lower and upper bound. If one of the bounds is omitted,
3712 it represents either the highest or lowest value of the type.
3714 In the comments below, we represent a range by two numbers in brackets
3715 preceded by a "+" to designate being inside that range, or a "-" to
3716 designate being outside that range, so the condition can be inverted by
3717 flipping the prefix. An omitted bound is represented by a "-". For
3718 example, "- [-, 10]" means being outside the range starting at the lowest
3719 possible value and ending at 10, in other words, being greater than 10.
3720 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3723 We set up things so that the missing bounds are handled in a consistent
3724 manner so neither a missing bound nor "true" and "false" need to be
3725 handled using a special case. */
3727 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3728 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3729 and UPPER1_P are nonzero if the respective argument is an upper bound
3730 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3731 must be specified for a comparison. ARG1 will be converted to ARG0's
3732 type if both are specified. */
3735 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3736 tree arg1, int upper1_p)
3742 /* If neither arg represents infinity, do the normal operation.
3743 Else, if not a comparison, return infinity. Else handle the special
3744 comparison rules. Note that most of the cases below won't occur, but
3745 are handled for consistency. */
3747 if (arg0 != 0 && arg1 != 0)
3749 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3750 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3752 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3755 if (TREE_CODE_CLASS (code) != tcc_comparison)
3758 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3759 for neither. In real maths, we cannot assume open ended ranges are
3760 the same. But, this is computer arithmetic, where numbers are finite.
3761 We can therefore make the transformation of any unbounded range with
3762 the value Z, Z being greater than any representable number. This permits
3763 us to treat unbounded ranges as equal. */
3764 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3765 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3769 result = sgn0 == sgn1;
3772 result = sgn0 != sgn1;
3775 result = sgn0 < sgn1;
3778 result = sgn0 <= sgn1;
3781 result = sgn0 > sgn1;
3784 result = sgn0 >= sgn1;
3790 return constant_boolean_node (result, type);
3793 /* Given EXP, a logical expression, set the range it is testing into
3794 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3795 actually being tested. *PLOW and *PHIGH will be made of the same
3796 type as the returned expression. If EXP is not a comparison, we
3797 will most likely not be returning a useful value and range. Set
3798 *STRICT_OVERFLOW_P to true if the return value is only valid
3799 because signed overflow is undefined; otherwise, do not change
3800 *STRICT_OVERFLOW_P. */
3803 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3804 bool *strict_overflow_p)
3806 enum tree_code code;
3807 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3808 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3810 tree low, high, n_low, n_high;
3811 location_t loc = EXPR_LOCATION (exp);
3813 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3814 and see if we can refine the range. Some of the cases below may not
3815 happen, but it doesn't seem worth worrying about this. We "continue"
3816 the outer loop when we've changed something; otherwise we "break"
3817 the switch, which will "break" the while. */
3820 low = high = build_int_cst (TREE_TYPE (exp), 0);
3824 code = TREE_CODE (exp);
3825 exp_type = TREE_TYPE (exp);
3827 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3829 if (TREE_OPERAND_LENGTH (exp) > 0)
3830 arg0 = TREE_OPERAND (exp, 0);
3831 if (TREE_CODE_CLASS (code) == tcc_comparison
3832 || TREE_CODE_CLASS (code) == tcc_unary
3833 || TREE_CODE_CLASS (code) == tcc_binary)
3834 arg0_type = TREE_TYPE (arg0);
3835 if (TREE_CODE_CLASS (code) == tcc_binary
3836 || TREE_CODE_CLASS (code) == tcc_comparison
3837 || (TREE_CODE_CLASS (code) == tcc_expression
3838 && TREE_OPERAND_LENGTH (exp) > 1))
3839 arg1 = TREE_OPERAND (exp, 1);
3844 case TRUTH_NOT_EXPR:
3845 in_p = ! in_p, exp = arg0;
3848 case EQ_EXPR: case NE_EXPR:
3849 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3850 /* We can only do something if the range is testing for zero
3851 and if the second operand is an integer constant. Note that
3852 saying something is "in" the range we make is done by
3853 complementing IN_P since it will set in the initial case of
3854 being not equal to zero; "out" is leaving it alone. */
3855 if (low == 0 || high == 0
3856 || ! integer_zerop (low) || ! integer_zerop (high)
3857 || TREE_CODE (arg1) != INTEGER_CST)
3862 case NE_EXPR: /* - [c, c] */
3865 case EQ_EXPR: /* + [c, c] */
3866 in_p = ! in_p, low = high = arg1;
3868 case GT_EXPR: /* - [-, c] */
3869 low = 0, high = arg1;
3871 case GE_EXPR: /* + [c, -] */
3872 in_p = ! in_p, low = arg1, high = 0;
3874 case LT_EXPR: /* - [c, -] */
3875 low = arg1, high = 0;
3877 case LE_EXPR: /* + [-, c] */
3878 in_p = ! in_p, low = 0, high = arg1;
3884 /* If this is an unsigned comparison, we also know that EXP is
3885 greater than or equal to zero. We base the range tests we make
3886 on that fact, so we record it here so we can parse existing
3887 range tests. We test arg0_type since often the return type
3888 of, e.g. EQ_EXPR, is boolean. */
3889 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3891 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3893 build_int_cst (arg0_type, 0),
3897 in_p = n_in_p, low = n_low, high = n_high;
3899 /* If the high bound is missing, but we have a nonzero low
3900 bound, reverse the range so it goes from zero to the low bound
3902 if (high == 0 && low && ! integer_zerop (low))
3905 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3906 integer_one_node, 0);
3907 low = build_int_cst (arg0_type, 0);
3915 /* (-x) IN [a,b] -> x in [-b, -a] */
3916 n_low = range_binop (MINUS_EXPR, exp_type,
3917 build_int_cst (exp_type, 0),
3919 n_high = range_binop (MINUS_EXPR, exp_type,
3920 build_int_cst (exp_type, 0),
3922 if (n_high != 0 && TREE_OVERFLOW (n_high))
3928 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3929 build_int_cst (exp_type, 1));
3932 case PLUS_EXPR: case MINUS_EXPR:
3933 if (TREE_CODE (arg1) != INTEGER_CST)
3936 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3937 move a constant to the other side. */
3938 if (!TYPE_UNSIGNED (arg0_type)
3939 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3942 /* If EXP is signed, any overflow in the computation is undefined,
3943 so we don't worry about it so long as our computations on
3944 the bounds don't overflow. For unsigned, overflow is defined
3945 and this is exactly the right thing. */
3946 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3947 arg0_type, low, 0, arg1, 0);
3948 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3949 arg0_type, high, 1, arg1, 0);
3950 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3951 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3954 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3955 *strict_overflow_p = true;
3958 /* Check for an unsigned range which has wrapped around the maximum
3959 value thus making n_high < n_low, and normalize it. */
3960 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3962 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3963 integer_one_node, 0);
3964 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3965 integer_one_node, 0);
3967 /* If the range is of the form +/- [ x+1, x ], we won't
3968 be able to normalize it. But then, it represents the
3969 whole range or the empty set, so make it
3971 if (tree_int_cst_equal (n_low, low)
3972 && tree_int_cst_equal (n_high, high))
3978 low = n_low, high = n_high;
3983 CASE_CONVERT: case NON_LVALUE_EXPR:
3984 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3987 if (! INTEGRAL_TYPE_P (arg0_type)
3988 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3989 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3992 n_low = low, n_high = high;
3995 n_low = fold_convert_loc (loc, arg0_type, n_low);
3998 n_high = fold_convert_loc (loc, arg0_type, n_high);
4001 /* If we're converting arg0 from an unsigned type, to exp,
4002 a signed type, we will be doing the comparison as unsigned.
4003 The tests above have already verified that LOW and HIGH
4006 So we have to ensure that we will handle large unsigned
4007 values the same way that the current signed bounds treat
4010 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4014 /* For fixed-point modes, we need to pass the saturating flag
4015 as the 2nd parameter. */
4016 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4017 equiv_type = lang_hooks.types.type_for_mode
4018 (TYPE_MODE (arg0_type),
4019 TYPE_SATURATING (arg0_type));
4021 equiv_type = lang_hooks.types.type_for_mode
4022 (TYPE_MODE (arg0_type), 1);
4024 /* A range without an upper bound is, naturally, unbounded.
4025 Since convert would have cropped a very large value, use
4026 the max value for the destination type. */
4028 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4029 : TYPE_MAX_VALUE (arg0_type);
4031 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4032 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4033 fold_convert_loc (loc, arg0_type,
4035 build_int_cst (arg0_type, 1));
4037 /* If the low bound is specified, "and" the range with the
4038 range for which the original unsigned value will be
4042 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4043 1, n_low, n_high, 1,
4044 fold_convert_loc (loc, arg0_type,
4049 in_p = (n_in_p == in_p);
4053 /* Otherwise, "or" the range with the range of the input
4054 that will be interpreted as negative. */
4055 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4056 0, n_low, n_high, 1,
4057 fold_convert_loc (loc, arg0_type,
4062 in_p = (in_p != n_in_p);
4067 low = n_low, high = n_high;
4077 /* If EXP is a constant, we can evaluate whether this is true or false. */
4078 if (TREE_CODE (exp) == INTEGER_CST)
4080 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4082 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4088 *pin_p = in_p, *plow = low, *phigh = high;
4092 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4093 type, TYPE, return an expression to test if EXP is in (or out of, depending
4094 on IN_P) the range. Return 0 if the test couldn't be created. */
4097 build_range_check (location_t loc, tree type, tree exp, int in_p,
4098 tree low, tree high)
4100 tree etype = TREE_TYPE (exp), value;
4102 #ifdef HAVE_canonicalize_funcptr_for_compare
4103 /* Disable this optimization for function pointer expressions
4104 on targets that require function pointer canonicalization. */
4105 if (HAVE_canonicalize_funcptr_for_compare
4106 && TREE_CODE (etype) == POINTER_TYPE
4107 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4113 value = build_range_check (loc, type, exp, 1, low, high);
4115 return invert_truthvalue_loc (loc, value);
4120 if (low == 0 && high == 0)
4121 return build_int_cst (type, 1);
4124 return fold_build2_loc (loc, LE_EXPR, type, exp,
4125 fold_convert_loc (loc, etype, high));
4128 return fold_build2_loc (loc, GE_EXPR, type, exp,
4129 fold_convert_loc (loc, etype, low));
4131 if (operand_equal_p (low, high, 0))
4132 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4133 fold_convert_loc (loc, etype, low));
4135 if (integer_zerop (low))
4137 if (! TYPE_UNSIGNED (etype))
4139 etype = unsigned_type_for (etype);
4140 high = fold_convert_loc (loc, etype, high);
4141 exp = fold_convert_loc (loc, etype, exp);
4143 return build_range_check (loc, type, exp, 1, 0, high);
4146 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4147 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4149 unsigned HOST_WIDE_INT lo;
4153 prec = TYPE_PRECISION (etype);
4154 if (prec <= HOST_BITS_PER_WIDE_INT)
4157 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4161 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4162 lo = (unsigned HOST_WIDE_INT) -1;
4165 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4167 if (TYPE_UNSIGNED (etype))
4169 tree signed_etype = signed_type_for (etype);
4170 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4172 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4174 etype = signed_etype;
4175 exp = fold_convert_loc (loc, etype, exp);
4177 return fold_build2_loc (loc, GT_EXPR, type, exp,
4178 build_int_cst (etype, 0));
4182 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4183 This requires wrap-around arithmetics for the type of the expression.
4184 First make sure that arithmetics in this type is valid, then make sure
4185 that it wraps around. */
4186 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4187 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4188 TYPE_UNSIGNED (etype));
4190 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4192 tree utype, minv, maxv;
4194 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4195 for the type in question, as we rely on this here. */
4196 utype = unsigned_type_for (etype);
4197 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4198 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4199 integer_one_node, 1);
4200 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4202 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4209 high = fold_convert_loc (loc, etype, high);
4210 low = fold_convert_loc (loc, etype, low);
4211 exp = fold_convert_loc (loc, etype, exp);
4213 value = const_binop (MINUS_EXPR, high, low);
4216 if (POINTER_TYPE_P (etype))
4218 if (value != 0 && !TREE_OVERFLOW (value))
4220 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4221 return build_range_check (loc, type,
4222 fold_build_pointer_plus_loc (loc, exp, low),
4223 1, build_int_cst (etype, 0), value);
4228 if (value != 0 && !TREE_OVERFLOW (value))
4229 return build_range_check (loc, type,
4230 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4231 1, build_int_cst (etype, 0), value);
4236 /* Return the predecessor of VAL in its type, handling the infinite case. */
4239 range_predecessor (tree val)
4241 tree type = TREE_TYPE (val);
4243 if (INTEGRAL_TYPE_P (type)
4244 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4247 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4250 /* Return the successor of VAL in its type, handling the infinite case. */
4253 range_successor (tree val)
4255 tree type = TREE_TYPE (val);
4257 if (INTEGRAL_TYPE_P (type)
4258 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4261 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4264 /* Given two ranges, see if we can merge them into one. Return 1 if we
4265 can, 0 if we can't. Set the output range into the specified parameters. */
4268 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4269 tree high0, int in1_p, tree low1, tree high1)
4277 int lowequal = ((low0 == 0 && low1 == 0)
4278 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4279 low0, 0, low1, 0)));
4280 int highequal = ((high0 == 0 && high1 == 0)
4281 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4282 high0, 1, high1, 1)));
4284 /* Make range 0 be the range that starts first, or ends last if they
4285 start at the same value. Swap them if it isn't. */
4286 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4289 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4290 high1, 1, high0, 1))))
4292 temp = in0_p, in0_p = in1_p, in1_p = temp;
4293 tem = low0, low0 = low1, low1 = tem;
4294 tem = high0, high0 = high1, high1 = tem;
4297 /* Now flag two cases, whether the ranges are disjoint or whether the
4298 second range is totally subsumed in the first. Note that the tests
4299 below are simplified by the ones above. */
4300 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4301 high0, 1, low1, 0));
4302 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4303 high1, 1, high0, 1));
4305 /* We now have four cases, depending on whether we are including or
4306 excluding the two ranges. */
4309 /* If they don't overlap, the result is false. If the second range
4310 is a subset it is the result. Otherwise, the range is from the start
4311 of the second to the end of the first. */
4313 in_p = 0, low = high = 0;
4315 in_p = 1, low = low1, high = high1;
4317 in_p = 1, low = low1, high = high0;
4320 else if (in0_p && ! in1_p)
4322 /* If they don't overlap, the result is the first range. If they are
4323 equal, the result is false. If the second range is a subset of the
4324 first, and the ranges begin at the same place, we go from just after
4325 the end of the second range to the end of the first. If the second
4326 range is not a subset of the first, or if it is a subset and both
4327 ranges end at the same place, the range starts at the start of the
4328 first range and ends just before the second range.
4329 Otherwise, we can't describe this as a single range. */
4331 in_p = 1, low = low0, high = high0;
4332 else if (lowequal && highequal)
4333 in_p = 0, low = high = 0;
4334 else if (subset && lowequal)
4336 low = range_successor (high1);
4341 /* We are in the weird situation where high0 > high1 but
4342 high1 has no successor. Punt. */
4346 else if (! subset || highequal)
4349 high = range_predecessor (low1);
4353 /* low0 < low1 but low1 has no predecessor. Punt. */
4361 else if (! in0_p && in1_p)
4363 /* If they don't overlap, the result is the second range. If the second
4364 is a subset of the first, the result is false. Otherwise,
4365 the range starts just after the first range and ends at the
4366 end of the second. */
4368 in_p = 1, low = low1, high = high1;
4369 else if (subset || highequal)
4370 in_p = 0, low = high = 0;
4373 low = range_successor (high0);
4378 /* high1 > high0 but high0 has no successor. Punt. */
4386 /* The case where we are excluding both ranges. Here the complex case
4387 is if they don't overlap. In that case, the only time we have a
4388 range is if they are adjacent. If the second is a subset of the
4389 first, the result is the first. Otherwise, the range to exclude
4390 starts at the beginning of the first range and ends at the end of the
4394 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4395 range_successor (high0),
4397 in_p = 0, low = low0, high = high1;
4400 /* Canonicalize - [min, x] into - [-, x]. */
4401 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4402 switch (TREE_CODE (TREE_TYPE (low0)))
4405 if (TYPE_PRECISION (TREE_TYPE (low0))
4406 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4410 if (tree_int_cst_equal (low0,
4411 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4415 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4416 && integer_zerop (low0))
4423 /* Canonicalize - [x, max] into - [x, -]. */
4424 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4425 switch (TREE_CODE (TREE_TYPE (high1)))
4428 if (TYPE_PRECISION (TREE_TYPE (high1))
4429 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4433 if (tree_int_cst_equal (high1,
4434 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4438 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4439 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4441 integer_one_node, 1)))
4448 /* The ranges might be also adjacent between the maximum and
4449 minimum values of the given type. For
4450 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4451 return + [x + 1, y - 1]. */
4452 if (low0 == 0 && high1 == 0)
4454 low = range_successor (high0);
4455 high = range_predecessor (low1);
4456 if (low == 0 || high == 0)
4466 in_p = 0, low = low0, high = high0;
4468 in_p = 0, low = low0, high = high1;
4471 *pin_p = in_p, *plow = low, *phigh = high;
4476 /* Subroutine of fold, looking inside expressions of the form
4477 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4478 of the COND_EXPR. This function is being used also to optimize
4479 A op B ? C : A, by reversing the comparison first.
4481 Return a folded expression whose code is not a COND_EXPR
4482 anymore, or NULL_TREE if no folding opportunity is found. */
4485 fold_cond_expr_with_comparison (location_t loc, tree type,
4486 tree arg0, tree arg1, tree arg2)
4488 enum tree_code comp_code = TREE_CODE (arg0);
4489 tree arg00 = TREE_OPERAND (arg0, 0);
4490 tree arg01 = TREE_OPERAND (arg0, 1);
4491 tree arg1_type = TREE_TYPE (arg1);
4497 /* If we have A op 0 ? A : -A, consider applying the following
4500 A == 0? A : -A same as -A
4501 A != 0? A : -A same as A
4502 A >= 0? A : -A same as abs (A)
4503 A > 0? A : -A same as abs (A)
4504 A <= 0? A : -A same as -abs (A)
4505 A < 0? A : -A same as -abs (A)
4507 None of these transformations work for modes with signed
4508 zeros. If A is +/-0, the first two transformations will
4509 change the sign of the result (from +0 to -0, or vice
4510 versa). The last four will fix the sign of the result,
4511 even though the original expressions could be positive or
4512 negative, depending on the sign of A.
4514 Note that all these transformations are correct if A is
4515 NaN, since the two alternatives (A and -A) are also NaNs. */
4516 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4517 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4518 ? real_zerop (arg01)
4519 : integer_zerop (arg01))
4520 && ((TREE_CODE (arg2) == NEGATE_EXPR
4521 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4522 /* In the case that A is of the form X-Y, '-A' (arg2) may
4523 have already been folded to Y-X, check for that. */
4524 || (TREE_CODE (arg1) == MINUS_EXPR
4525 && TREE_CODE (arg2) == MINUS_EXPR
4526 && operand_equal_p (TREE_OPERAND (arg1, 0),
4527 TREE_OPERAND (arg2, 1), 0)
4528 && operand_equal_p (TREE_OPERAND (arg1, 1),
4529 TREE_OPERAND (arg2, 0), 0))))
4534 tem = fold_convert_loc (loc, arg1_type, arg1);
4535 return pedantic_non_lvalue_loc (loc,
4536 fold_convert_loc (loc, type,
4537 negate_expr (tem)));
4540 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4543 if (flag_trapping_math)
4548 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4549 arg1 = fold_convert_loc (loc, signed_type_for
4550 (TREE_TYPE (arg1)), arg1);
4551 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4552 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4555 if (flag_trapping_math)
4559 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4560 arg1 = fold_convert_loc (loc, signed_type_for
4561 (TREE_TYPE (arg1)), arg1);
4562 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4563 return negate_expr (fold_convert_loc (loc, type, tem));
4565 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4569 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4570 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4571 both transformations are correct when A is NaN: A != 0
4572 is then true, and A == 0 is false. */
4574 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4575 && integer_zerop (arg01) && integer_zerop (arg2))
4577 if (comp_code == NE_EXPR)
4578 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4579 else if (comp_code == EQ_EXPR)
4580 return build_int_cst (type, 0);
4583 /* Try some transformations of A op B ? A : B.
4585 A == B? A : B same as B
4586 A != B? A : B same as A
4587 A >= B? A : B same as max (A, B)
4588 A > B? A : B same as max (B, A)
4589 A <= B? A : B same as min (A, B)
4590 A < B? A : B same as min (B, A)
4592 As above, these transformations don't work in the presence
4593 of signed zeros. For example, if A and B are zeros of
4594 opposite sign, the first two transformations will change
4595 the sign of the result. In the last four, the original
4596 expressions give different results for (A=+0, B=-0) and
4597 (A=-0, B=+0), but the transformed expressions do not.
4599 The first two transformations are correct if either A or B
4600 is a NaN. In the first transformation, the condition will
4601 be false, and B will indeed be chosen. In the case of the
4602 second transformation, the condition A != B will be true,
4603 and A will be chosen.
4605 The conversions to max() and min() are not correct if B is
4606 a number and A is not. The conditions in the original
4607 expressions will be false, so all four give B. The min()
4608 and max() versions would give a NaN instead. */
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4610 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4611 /* Avoid these transformations if the COND_EXPR may be used
4612 as an lvalue in the C++ front-end. PR c++/19199. */
4614 || (strcmp (lang_hooks.name, "GNU C++") != 0
4615 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4616 || ! maybe_lvalue_p (arg1)
4617 || ! maybe_lvalue_p (arg2)))
4619 tree comp_op0 = arg00;
4620 tree comp_op1 = arg01;
4621 tree comp_type = TREE_TYPE (comp_op0);
4623 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4624 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4634 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4636 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4641 /* In C++ a ?: expression can be an lvalue, so put the
4642 operand which will be used if they are equal first
4643 so that we can convert this back to the
4644 corresponding COND_EXPR. */
4645 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4647 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4648 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4649 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4650 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4651 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4652 comp_op1, comp_op0);
4653 return pedantic_non_lvalue_loc (loc,
4654 fold_convert_loc (loc, type, tem));
4661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4664 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4665 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4666 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4667 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4668 comp_op1, comp_op0);
4669 return pedantic_non_lvalue_loc (loc,
4670 fold_convert_loc (loc, type, tem));
4674 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4675 return pedantic_non_lvalue_loc (loc,
4676 fold_convert_loc (loc, type, arg2));
4679 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 return pedantic_non_lvalue_loc (loc,
4681 fold_convert_loc (loc, type, arg1));
4684 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4689 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4690 we might still be able to simplify this. For example,
4691 if C1 is one less or one more than C2, this might have started
4692 out as a MIN or MAX and been transformed by this function.
4693 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4695 if (INTEGRAL_TYPE_P (type)
4696 && TREE_CODE (arg01) == INTEGER_CST
4697 && TREE_CODE (arg2) == INTEGER_CST)
4701 if (TREE_CODE (arg1) == INTEGER_CST)
4703 /* We can replace A with C1 in this case. */
4704 arg1 = fold_convert_loc (loc, type, arg01);
4705 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4708 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4709 MIN_EXPR, to preserve the signedness of the comparison. */
4710 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4712 && operand_equal_p (arg01,
4713 const_binop (PLUS_EXPR, arg2,
4714 build_int_cst (type, 1)),
4717 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4718 fold_convert_loc (loc, TREE_TYPE (arg00),
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, tem));
4726 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4728 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4730 && operand_equal_p (arg01,
4731 const_binop (MINUS_EXPR, arg2,
4732 build_int_cst (type, 1)),
4735 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4736 fold_convert_loc (loc, TREE_TYPE (arg00),
4738 return pedantic_non_lvalue_loc (loc,
4739 fold_convert_loc (loc, type, tem));
4744 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4745 MAX_EXPR, to preserve the signedness of the comparison. */
4746 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4748 && operand_equal_p (arg01,
4749 const_binop (MINUS_EXPR, arg2,
4750 build_int_cst (type, 1)),
4753 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4754 fold_convert_loc (loc, TREE_TYPE (arg00),
4756 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4761 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4762 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4764 && operand_equal_p (arg01,
4765 const_binop (PLUS_EXPR, arg2,
4766 build_int_cst (type, 1)),
4769 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4770 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4786 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4787 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4788 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4792 /* EXP is some logical combination of boolean tests. See if we can
4793 merge it into some range test. Return the new tree if so. */
4796 fold_range_test (location_t loc, enum tree_code code, tree type,
4799 int or_op = (code == TRUTH_ORIF_EXPR
4800 || code == TRUTH_OR_EXPR);
4801 int in0_p, in1_p, in_p;
4802 tree low0, low1, low, high0, high1, high;
4803 bool strict_overflow_p = false;
4804 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4805 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4807 const char * const warnmsg = G_("assuming signed overflow does not occur "
4808 "when simplifying range test");
4810 /* If this is an OR operation, invert both sides; we will invert
4811 again at the end. */
4813 in0_p = ! in0_p, in1_p = ! in1_p;
4815 /* If both expressions are the same, if we can merge the ranges, and we
4816 can build the range test, return it or it inverted. If one of the
4817 ranges is always true or always false, consider it to be the same
4818 expression as the other. */
4819 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4820 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4822 && 0 != (tem = (build_range_check (loc, type,
4824 : rhs != 0 ? rhs : integer_zero_node,
4827 if (strict_overflow_p)
4828 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4829 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4832 /* On machines where the branch cost is expensive, if this is a
4833 short-circuited branch and the underlying object on both sides
4834 is the same, make a non-short-circuit operation. */
4835 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4836 && lhs != 0 && rhs != 0
4837 && (code == TRUTH_ANDIF_EXPR
4838 || code == TRUTH_ORIF_EXPR)
4839 && operand_equal_p (lhs, rhs, 0))
4841 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4842 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4843 which cases we can't do this. */
4844 if (simple_operand_p (lhs))
4845 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4846 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4849 else if (!lang_hooks.decls.global_bindings_p ()
4850 && !CONTAINS_PLACEHOLDER_P (lhs))
4852 tree common = save_expr (lhs);
4854 if (0 != (lhs = build_range_check (loc, type, common,
4855 or_op ? ! in0_p : in0_p,
4857 && (0 != (rhs = build_range_check (loc, type, common,
4858 or_op ? ! in1_p : in1_p,
4861 if (strict_overflow_p)
4862 fold_overflow_warning (warnmsg,
4863 WARN_STRICT_OVERFLOW_COMPARISON);
4864 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4865 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4874 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4875 bit value. Arrange things so the extra bits will be set to zero if and
4876 only if C is signed-extended to its full width. If MASK is nonzero,
4877 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4880 unextend (tree c, int p, int unsignedp, tree mask)
4882 tree type = TREE_TYPE (c);
4883 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4886 if (p == modesize || unsignedp)
4889 /* We work by getting just the sign bit into the low-order bit, then
4890 into the high-order bit, then sign-extend. We then XOR that value
4892 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4893 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4895 /* We must use a signed type in order to get an arithmetic right shift.
4896 However, we must also avoid introducing accidental overflows, so that
4897 a subsequent call to integer_zerop will work. Hence we must
4898 do the type conversion here. At this point, the constant is either
4899 zero or one, and the conversion to a signed type can never overflow.
4900 We could get an overflow if this conversion is done anywhere else. */
4901 if (TYPE_UNSIGNED (type))
4902 temp = fold_convert (signed_type_for (type), temp);
4904 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4905 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4907 temp = const_binop (BIT_AND_EXPR, temp,
4908 fold_convert (TREE_TYPE (c), mask));
4909 /* If necessary, convert the type back to match the type of C. */
4910 if (TYPE_UNSIGNED (type))
4911 temp = fold_convert (type, temp);
4913 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4916 /* For an expression that has the form
4920 we can drop one of the inner expressions and simplify to
4924 LOC is the location of the resulting expression. OP is the inner
4925 logical operation; the left-hand side in the examples above, while CMPOP
4926 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4927 removing a condition that guards another, as in
4928 (A != NULL && A->...) || A == NULL
4929 which we must not transform. If RHS_ONLY is true, only eliminate the
4930 right-most operand of the inner logical operation. */
4933 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4936 tree type = TREE_TYPE (cmpop);
4937 enum tree_code code = TREE_CODE (cmpop);
4938 enum tree_code truthop_code = TREE_CODE (op);
4939 tree lhs = TREE_OPERAND (op, 0);
4940 tree rhs = TREE_OPERAND (op, 1);
4941 tree orig_lhs = lhs, orig_rhs = rhs;
4942 enum tree_code rhs_code = TREE_CODE (rhs);
4943 enum tree_code lhs_code = TREE_CODE (lhs);
4944 enum tree_code inv_code;
4946 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4949 if (TREE_CODE_CLASS (code) != tcc_comparison)
4952 if (rhs_code == truthop_code)
4954 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4955 if (newrhs != NULL_TREE)
4958 rhs_code = TREE_CODE (rhs);
4961 if (lhs_code == truthop_code && !rhs_only)
4963 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4964 if (newlhs != NULL_TREE)
4967 lhs_code = TREE_CODE (lhs);
4971 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4972 if (inv_code == rhs_code
4973 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4974 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4976 if (!rhs_only && inv_code == lhs_code
4977 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4978 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4980 if (rhs != orig_rhs || lhs != orig_lhs)
4981 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4986 /* Find ways of folding logical expressions of LHS and RHS:
4987 Try to merge two comparisons to the same innermost item.
4988 Look for range tests like "ch >= '0' && ch <= '9'".
4989 Look for combinations of simple terms on machines with expensive branches
4990 and evaluate the RHS unconditionally.
4992 For example, if we have p->a == 2 && p->b == 4 and we can make an
4993 object large enough to span both A and B, we can do this with a comparison
4994 against the object ANDed with the a mask.
4996 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4997 operations to do this with one comparison.
4999 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5000 function and the one above.
5002 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5003 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5005 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5008 We return the simplified tree or 0 if no optimization is possible. */
5011 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5014 /* If this is the "or" of two comparisons, we can do something if
5015 the comparisons are NE_EXPR. If this is the "and", we can do something
5016 if the comparisons are EQ_EXPR. I.e.,
5017 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5019 WANTED_CODE is this operation code. For single bit fields, we can
5020 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5021 comparison for one-bit fields. */
5023 enum tree_code wanted_code;
5024 enum tree_code lcode, rcode;
5025 tree ll_arg, lr_arg, rl_arg, rr_arg;
5026 tree ll_inner, lr_inner, rl_inner, rr_inner;
5027 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5028 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5029 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5030 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5031 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5032 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5033 enum machine_mode lnmode, rnmode;
5034 tree ll_mask, lr_mask, rl_mask, rr_mask;
5035 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5036 tree l_const, r_const;
5037 tree lntype, rntype, result;
5038 HOST_WIDE_INT first_bit, end_bit;
5040 tree orig_lhs = lhs, orig_rhs = rhs;
5041 enum tree_code orig_code = code;
5043 /* Start by getting the comparison codes. Fail if anything is volatile.
5044 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5045 it were surrounded with a NE_EXPR. */
5047 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5050 lcode = TREE_CODE (lhs);
5051 rcode = TREE_CODE (rhs);
5053 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5055 lhs = build2 (NE_EXPR, truth_type, lhs,
5056 build_int_cst (TREE_TYPE (lhs), 0));
5060 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5062 rhs = build2 (NE_EXPR, truth_type, rhs,
5063 build_int_cst (TREE_TYPE (rhs), 0));
5067 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5068 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5071 ll_arg = TREE_OPERAND (lhs, 0);
5072 lr_arg = TREE_OPERAND (lhs, 1);
5073 rl_arg = TREE_OPERAND (rhs, 0);
5074 rr_arg = TREE_OPERAND (rhs, 1);
5076 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5077 if (simple_operand_p (ll_arg)
5078 && simple_operand_p (lr_arg))
5080 if (operand_equal_p (ll_arg, rl_arg, 0)
5081 && operand_equal_p (lr_arg, rr_arg, 0))
5083 result = combine_comparisons (loc, code, lcode, rcode,
5084 truth_type, ll_arg, lr_arg);
5088 else if (operand_equal_p (ll_arg, rr_arg, 0)
5089 && operand_equal_p (lr_arg, rl_arg, 0))
5091 result = combine_comparisons (loc, code, lcode,
5092 swap_tree_comparison (rcode),
5093 truth_type, ll_arg, lr_arg);
5099 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5100 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5102 /* If the RHS can be evaluated unconditionally and its operands are
5103 simple, it wins to evaluate the RHS unconditionally on machines
5104 with expensive branches. In this case, this isn't a comparison
5105 that can be merged. Avoid doing this if the RHS is a floating-point
5106 comparison since those can trap. */
5108 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5110 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5111 && simple_operand_p (rl_arg)
5112 && simple_operand_p (rr_arg))
5114 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5115 if (code == TRUTH_OR_EXPR
5116 && lcode == NE_EXPR && integer_zerop (lr_arg)
5117 && rcode == NE_EXPR && integer_zerop (rr_arg)
5118 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5119 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5120 return build2_loc (loc, NE_EXPR, truth_type,
5121 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5123 build_int_cst (TREE_TYPE (ll_arg), 0));
5125 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5126 if (code == TRUTH_AND_EXPR
5127 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5128 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5129 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5130 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5131 return build2_loc (loc, EQ_EXPR, truth_type,
5132 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5134 build_int_cst (TREE_TYPE (ll_arg), 0));
5136 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5138 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5139 return build2_loc (loc, code, truth_type, lhs, rhs);
5144 /* See if the comparisons can be merged. Then get all the parameters for
5147 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5148 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5152 ll_inner = decode_field_reference (loc, ll_arg,
5153 &ll_bitsize, &ll_bitpos, &ll_mode,
5154 &ll_unsignedp, &volatilep, &ll_mask,
5156 lr_inner = decode_field_reference (loc, lr_arg,
5157 &lr_bitsize, &lr_bitpos, &lr_mode,
5158 &lr_unsignedp, &volatilep, &lr_mask,
5160 rl_inner = decode_field_reference (loc, rl_arg,
5161 &rl_bitsize, &rl_bitpos, &rl_mode,
5162 &rl_unsignedp, &volatilep, &rl_mask,
5164 rr_inner = decode_field_reference (loc, rr_arg,
5165 &rr_bitsize, &rr_bitpos, &rr_mode,
5166 &rr_unsignedp, &volatilep, &rr_mask,
5169 /* It must be true that the inner operation on the lhs of each
5170 comparison must be the same if we are to be able to do anything.
5171 Then see if we have constants. If not, the same must be true for
5173 if (volatilep || ll_inner == 0 || rl_inner == 0
5174 || ! operand_equal_p (ll_inner, rl_inner, 0))
5177 if (TREE_CODE (lr_arg) == INTEGER_CST
5178 && TREE_CODE (rr_arg) == INTEGER_CST)
5179 l_const = lr_arg, r_const = rr_arg;
5180 else if (lr_inner == 0 || rr_inner == 0
5181 || ! operand_equal_p (lr_inner, rr_inner, 0))
5184 l_const = r_const = 0;
5186 /* If either comparison code is not correct for our logical operation,
5187 fail. However, we can convert a one-bit comparison against zero into
5188 the opposite comparison against that bit being set in the field. */
5190 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5191 if (lcode != wanted_code)
5193 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5195 /* Make the left operand unsigned, since we are only interested
5196 in the value of one bit. Otherwise we are doing the wrong
5205 /* This is analogous to the code for l_const above. */
5206 if (rcode != wanted_code)
5208 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5217 /* See if we can find a mode that contains both fields being compared on
5218 the left. If we can't, fail. Otherwise, update all constants and masks
5219 to be relative to a field of that size. */
5220 first_bit = MIN (ll_bitpos, rl_bitpos);
5221 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5222 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5223 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5225 if (lnmode == VOIDmode)
5228 lnbitsize = GET_MODE_BITSIZE (lnmode);
5229 lnbitpos = first_bit & ~ (lnbitsize - 1);
5230 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5231 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5233 if (BYTES_BIG_ENDIAN)
5235 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5236 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5239 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5240 size_int (xll_bitpos));
5241 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5242 size_int (xrl_bitpos));
5246 l_const = fold_convert_loc (loc, lntype, l_const);
5247 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5248 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5249 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5250 fold_build1_loc (loc, BIT_NOT_EXPR,
5253 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5255 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5260 r_const = fold_convert_loc (loc, lntype, r_const);
5261 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5262 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5263 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5264 fold_build1_loc (loc, BIT_NOT_EXPR,
5267 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5269 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5273 /* If the right sides are not constant, do the same for it. Also,
5274 disallow this optimization if a size or signedness mismatch occurs
5275 between the left and right sides. */
5278 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5279 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5280 /* Make sure the two fields on the right
5281 correspond to the left without being swapped. */
5282 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5285 first_bit = MIN (lr_bitpos, rr_bitpos);
5286 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5287 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5288 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5290 if (rnmode == VOIDmode)
5293 rnbitsize = GET_MODE_BITSIZE (rnmode);
5294 rnbitpos = first_bit & ~ (rnbitsize - 1);
5295 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5296 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5298 if (BYTES_BIG_ENDIAN)
5300 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5301 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5304 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5306 size_int (xlr_bitpos));
5307 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5309 size_int (xrr_bitpos));
5311 /* Make a mask that corresponds to both fields being compared.
5312 Do this for both items being compared. If the operands are the
5313 same size and the bits being compared are in the same position
5314 then we can do this by masking both and comparing the masked
5316 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5317 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5318 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5320 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5321 ll_unsignedp || rl_unsignedp);
5322 if (! all_ones_mask_p (ll_mask, lnbitsize))
5323 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5325 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5326 lr_unsignedp || rr_unsignedp);
5327 if (! all_ones_mask_p (lr_mask, rnbitsize))
5328 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5330 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5333 /* There is still another way we can do something: If both pairs of
5334 fields being compared are adjacent, we may be able to make a wider
5335 field containing them both.
5337 Note that we still must mask the lhs/rhs expressions. Furthermore,
5338 the mask must be shifted to account for the shift done by
5339 make_bit_field_ref. */
5340 if ((ll_bitsize + ll_bitpos == rl_bitpos
5341 && lr_bitsize + lr_bitpos == rr_bitpos)
5342 || (ll_bitpos == rl_bitpos + rl_bitsize
5343 && lr_bitpos == rr_bitpos + rr_bitsize))
5347 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5348 ll_bitsize + rl_bitsize,
5349 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5350 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5351 lr_bitsize + rr_bitsize,
5352 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5354 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5355 size_int (MIN (xll_bitpos, xrl_bitpos)));
5356 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5357 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5359 /* Convert to the smaller type before masking out unwanted bits. */
5361 if (lntype != rntype)
5363 if (lnbitsize > rnbitsize)
5365 lhs = fold_convert_loc (loc, rntype, lhs);
5366 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5369 else if (lnbitsize < rnbitsize)
5371 rhs = fold_convert_loc (loc, lntype, rhs);
5372 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5377 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5378 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5380 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5381 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5383 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5389 /* Handle the case of comparisons with constants. If there is something in
5390 common between the masks, those bits of the constants must be the same.
5391 If not, the condition is always false. Test for this to avoid generating
5392 incorrect code below. */
5393 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5394 if (! integer_zerop (result)
5395 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5396 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5398 if (wanted_code == NE_EXPR)
5400 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5401 return constant_boolean_node (true, truth_type);
5405 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5406 return constant_boolean_node (false, truth_type);
5410 /* Construct the expression we will return. First get the component
5411 reference we will make. Unless the mask is all ones the width of
5412 that field, perform the mask operation. Then compare with the
5414 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5415 ll_unsignedp || rl_unsignedp);
5417 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5418 if (! all_ones_mask_p (ll_mask, lnbitsize))
5419 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5421 return build2_loc (loc, wanted_code, truth_type, result,
5422 const_binop (BIT_IOR_EXPR, l_const, r_const));
5425 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5429 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5433 enum tree_code op_code;
5436 int consts_equal, consts_lt;
5439 STRIP_SIGN_NOPS (arg0);
5441 op_code = TREE_CODE (arg0);
5442 minmax_const = TREE_OPERAND (arg0, 1);
5443 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5444 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5445 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5446 inner = TREE_OPERAND (arg0, 0);
5448 /* If something does not permit us to optimize, return the original tree. */
5449 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5450 || TREE_CODE (comp_const) != INTEGER_CST
5451 || TREE_OVERFLOW (comp_const)
5452 || TREE_CODE (minmax_const) != INTEGER_CST
5453 || TREE_OVERFLOW (minmax_const))
5456 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5457 and GT_EXPR, doing the rest with recursive calls using logical
5461 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5464 = optimize_minmax_comparison (loc,
5465 invert_tree_comparison (code, false),
5468 return invert_truthvalue_loc (loc, tem);
5474 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5475 optimize_minmax_comparison
5476 (loc, EQ_EXPR, type, arg0, comp_const),
5477 optimize_minmax_comparison
5478 (loc, GT_EXPR, type, arg0, comp_const));
5481 if (op_code == MAX_EXPR && consts_equal)
5482 /* MAX (X, 0) == 0 -> X <= 0 */
5483 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5485 else if (op_code == MAX_EXPR && consts_lt)
5486 /* MAX (X, 0) == 5 -> X == 5 */
5487 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5489 else if (op_code == MAX_EXPR)
5490 /* MAX (X, 0) == -1 -> false */
5491 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5493 else if (consts_equal)
5494 /* MIN (X, 0) == 0 -> X >= 0 */
5495 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5498 /* MIN (X, 0) == 5 -> false */
5499 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5502 /* MIN (X, 0) == -1 -> X == -1 */
5503 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5506 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5507 /* MAX (X, 0) > 0 -> X > 0
5508 MAX (X, 0) > 5 -> X > 5 */
5509 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5511 else if (op_code == MAX_EXPR)
5512 /* MAX (X, 0) > -1 -> true */
5513 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5515 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5516 /* MIN (X, 0) > 0 -> false
5517 MIN (X, 0) > 5 -> false */
5518 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5521 /* MIN (X, 0) > -1 -> X > -1 */
5522 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5529 /* T is an integer expression that is being multiplied, divided, or taken a
5530 modulus (CODE says which and what kind of divide or modulus) by a
5531 constant C. See if we can eliminate that operation by folding it with
5532 other operations already in T. WIDE_TYPE, if non-null, is a type that
5533 should be used for the computation if wider than our type.
5535 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5536 (X * 2) + (Y * 4). We must, however, be assured that either the original
5537 expression would not overflow or that overflow is undefined for the type
5538 in the language in question.
5540 If we return a non-null expression, it is an equivalent form of the
5541 original computation, but need not be in the original type.
5543 We set *STRICT_OVERFLOW_P to true if the return values depends on
5544 signed overflow being undefined. Otherwise we do not change
5545 *STRICT_OVERFLOW_P. */
5548 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5549 bool *strict_overflow_p)
5551 /* To avoid exponential search depth, refuse to allow recursion past
5552 three levels. Beyond that (1) it's highly unlikely that we'll find
5553 something interesting and (2) we've probably processed it before
5554 when we built the inner expression. */
5563 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5570 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5571 bool *strict_overflow_p)
5573 tree type = TREE_TYPE (t);
5574 enum tree_code tcode = TREE_CODE (t);
5575 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5576 > GET_MODE_SIZE (TYPE_MODE (type)))
5577 ? wide_type : type);
5579 int same_p = tcode == code;
5580 tree op0 = NULL_TREE, op1 = NULL_TREE;
5581 bool sub_strict_overflow_p;
5583 /* Don't deal with constants of zero here; they confuse the code below. */
5584 if (integer_zerop (c))
5587 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5588 op0 = TREE_OPERAND (t, 0);
5590 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5591 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5593 /* Note that we need not handle conditional operations here since fold
5594 already handles those cases. So just do arithmetic here. */
5598 /* For a constant, we can always simplify if we are a multiply
5599 or (for divide and modulus) if it is a multiple of our constant. */
5600 if (code == MULT_EXPR
5601 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5602 return const_binop (code, fold_convert (ctype, t),
5603 fold_convert (ctype, c));
5606 CASE_CONVERT: case NON_LVALUE_EXPR:
5607 /* If op0 is an expression ... */
5608 if ((COMPARISON_CLASS_P (op0)
5609 || UNARY_CLASS_P (op0)
5610 || BINARY_CLASS_P (op0)
5611 || VL_EXP_CLASS_P (op0)
5612 || EXPRESSION_CLASS_P (op0))
5613 /* ... and has wrapping overflow, and its type is smaller
5614 than ctype, then we cannot pass through as widening. */
5615 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5616 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5617 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5618 && (TYPE_PRECISION (ctype)
5619 > TYPE_PRECISION (TREE_TYPE (op0))))
5620 /* ... or this is a truncation (t is narrower than op0),
5621 then we cannot pass through this narrowing. */
5622 || (TYPE_PRECISION (type)
5623 < TYPE_PRECISION (TREE_TYPE (op0)))
5624 /* ... or signedness changes for division or modulus,
5625 then we cannot pass through this conversion. */
5626 || (code != MULT_EXPR
5627 && (TYPE_UNSIGNED (ctype)
5628 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5629 /* ... or has undefined overflow while the converted to
5630 type has not, we cannot do the operation in the inner type
5631 as that would introduce undefined overflow. */
5632 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5633 && !TYPE_OVERFLOW_UNDEFINED (type))))
5636 /* Pass the constant down and see if we can make a simplification. If
5637 we can, replace this expression with the inner simplification for
5638 possible later conversion to our or some other type. */
5639 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5640 && TREE_CODE (t2) == INTEGER_CST
5641 && !TREE_OVERFLOW (t2)
5642 && (0 != (t1 = extract_muldiv (op0, t2, code,
5644 ? ctype : NULL_TREE,
5645 strict_overflow_p))))
5650 /* If widening the type changes it from signed to unsigned, then we
5651 must avoid building ABS_EXPR itself as unsigned. */
5652 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654 tree cstype = (*signed_type_for) (ctype);
5655 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5658 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5659 return fold_convert (ctype, t1);
5663 /* If the constant is negative, we cannot simplify this. */
5664 if (tree_int_cst_sgn (c) == -1)
5668 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5670 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5673 case MIN_EXPR: case MAX_EXPR:
5674 /* If widening the type changes the signedness, then we can't perform
5675 this optimization as that changes the result. */
5676 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5679 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5680 sub_strict_overflow_p = false;
5681 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5682 &sub_strict_overflow_p)) != 0
5683 && (t2 = extract_muldiv (op1, c, code, wide_type,
5684 &sub_strict_overflow_p)) != 0)
5686 if (tree_int_cst_sgn (c) < 0)
5687 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5688 if (sub_strict_overflow_p)
5689 *strict_overflow_p = true;
5690 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5691 fold_convert (ctype, t2));
5695 case LSHIFT_EXPR: case RSHIFT_EXPR:
5696 /* If the second operand is constant, this is a multiplication
5697 or floor division, by a power of two, so we can treat it that
5698 way unless the multiplier or divisor overflows. Signed
5699 left-shift overflow is implementation-defined rather than
5700 undefined in C90, so do not convert signed left shift into
5702 if (TREE_CODE (op1) == INTEGER_CST
5703 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5704 /* const_binop may not detect overflow correctly,
5705 so check for it explicitly here. */
5706 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5707 && TREE_INT_CST_HIGH (op1) == 0
5708 && 0 != (t1 = fold_convert (ctype,
5709 const_binop (LSHIFT_EXPR,
5712 && !TREE_OVERFLOW (t1))
5713 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5714 ? MULT_EXPR : FLOOR_DIV_EXPR,
5716 fold_convert (ctype, op0),
5718 c, code, wide_type, strict_overflow_p);
5721 case PLUS_EXPR: case MINUS_EXPR:
5722 /* See if we can eliminate the operation on both sides. If we can, we
5723 can return a new PLUS or MINUS. If we can't, the only remaining
5724 cases where we can do anything are if the second operand is a
5726 sub_strict_overflow_p = false;
5727 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5728 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5729 if (t1 != 0 && t2 != 0
5730 && (code == MULT_EXPR
5731 /* If not multiplication, we can only do this if both operands
5732 are divisible by c. */
5733 || (multiple_of_p (ctype, op0, c)
5734 && multiple_of_p (ctype, op1, c))))
5736 if (sub_strict_overflow_p)
5737 *strict_overflow_p = true;
5738 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5739 fold_convert (ctype, t2));
5742 /* If this was a subtraction, negate OP1 and set it to be an addition.
5743 This simplifies the logic below. */
5744 if (tcode == MINUS_EXPR)
5746 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5747 /* If OP1 was not easily negatable, the constant may be OP0. */
5748 if (TREE_CODE (op0) == INTEGER_CST)
5759 if (TREE_CODE (op1) != INTEGER_CST)
5762 /* If either OP1 or C are negative, this optimization is not safe for
5763 some of the division and remainder types while for others we need
5764 to change the code. */
5765 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5767 if (code == CEIL_DIV_EXPR)
5768 code = FLOOR_DIV_EXPR;
5769 else if (code == FLOOR_DIV_EXPR)
5770 code = CEIL_DIV_EXPR;
5771 else if (code != MULT_EXPR
5772 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5776 /* If it's a multiply or a division/modulus operation of a multiple
5777 of our constant, do the operation and verify it doesn't overflow. */
5778 if (code == MULT_EXPR
5779 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5781 op1 = const_binop (code, fold_convert (ctype, op1),
5782 fold_convert (ctype, c));
5783 /* We allow the constant to overflow with wrapping semantics. */
5785 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5791 /* If we have an unsigned type is not a sizetype, we cannot widen
5792 the operation since it will change the result if the original
5793 computation overflowed. */
5794 if (TYPE_UNSIGNED (ctype)
5795 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5799 /* If we were able to eliminate our operation from the first side,
5800 apply our operation to the second side and reform the PLUS. */
5801 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5804 /* The last case is if we are a multiply. In that case, we can
5805 apply the distributive law to commute the multiply and addition
5806 if the multiplication of the constants doesn't overflow. */
5807 if (code == MULT_EXPR)
5808 return fold_build2 (tcode, ctype,
5809 fold_build2 (code, ctype,
5810 fold_convert (ctype, op0),
5811 fold_convert (ctype, c)),
5817 /* We have a special case here if we are doing something like
5818 (C * 8) % 4 since we know that's zero. */
5819 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5820 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5821 /* If the multiplication can overflow we cannot optimize this.
5822 ??? Until we can properly mark individual operations as
5823 not overflowing we need to treat sizetype special here as
5824 stor-layout relies on this opimization to make
5825 DECL_FIELD_BIT_OFFSET always a constant. */
5826 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5827 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5828 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5829 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5830 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5832 *strict_overflow_p = true;
5833 return omit_one_operand (type, integer_zero_node, op0);
5836 /* ... fall through ... */
5838 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5839 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5840 /* If we can extract our operation from the LHS, do so and return a
5841 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5842 do something only if the second operand is a constant. */
5844 && (t1 = extract_muldiv (op0, c, code, wide_type,
5845 strict_overflow_p)) != 0)
5846 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5847 fold_convert (ctype, op1));
5848 else if (tcode == MULT_EXPR && code == MULT_EXPR
5849 && (t1 = extract_muldiv (op1, c, code, wide_type,
5850 strict_overflow_p)) != 0)
5851 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5852 fold_convert (ctype, t1));
5853 else if (TREE_CODE (op1) != INTEGER_CST)
5856 /* If these are the same operation types, we can associate them
5857 assuming no overflow. */
5862 mul = double_int_mul_with_sign
5864 (tree_to_double_int (op1),
5865 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5867 (tree_to_double_int (c),
5868 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5869 false, &overflow_p);
5870 overflow_p = (((!TYPE_UNSIGNED (ctype)
5871 || (TREE_CODE (ctype) == INTEGER_TYPE
5872 && TYPE_IS_SIZETYPE (ctype)))
5874 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5875 if (!double_int_fits_to_tree_p (ctype, mul)
5876 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5877 || !TYPE_UNSIGNED (ctype)
5878 || (TREE_CODE (ctype) == INTEGER_TYPE
5879 && TYPE_IS_SIZETYPE (ctype))))
5882 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5883 double_int_to_tree (ctype, mul));
5886 /* If these operations "cancel" each other, we have the main
5887 optimizations of this pass, which occur when either constant is a
5888 multiple of the other, in which case we replace this with either an
5889 operation or CODE or TCODE.
5891 If we have an unsigned type that is not a sizetype, we cannot do
5892 this since it will change the result if the original computation
5894 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5895 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5896 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5897 || (tcode == MULT_EXPR
5898 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5899 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5900 && code != MULT_EXPR)))
5902 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5904 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5905 *strict_overflow_p = true;
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5907 fold_convert (ctype,
5908 const_binop (TRUNC_DIV_EXPR,
5911 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5913 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5914 *strict_overflow_p = true;
5915 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5916 fold_convert (ctype,
5917 const_binop (TRUNC_DIV_EXPR,
5930 /* Return a node which has the indicated constant VALUE (either 0 or
5931 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5932 and is of the indicated TYPE. */
5935 constant_boolean_node (bool value, tree type)
5937 if (type == integer_type_node)
5938 return value ? integer_one_node : integer_zero_node;
5939 else if (type == boolean_type_node)
5940 return value ? boolean_true_node : boolean_false_node;
5941 else if (TREE_CODE (type) == VECTOR_TYPE)
5942 return build_vector_from_val (type,
5943 build_int_cst (TREE_TYPE (type),
5946 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5950 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5951 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5952 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5953 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5954 COND is the first argument to CODE; otherwise (as in the example
5955 given here), it is the second argument. TYPE is the type of the
5956 original expression. Return NULL_TREE if no simplification is
5960 fold_binary_op_with_conditional_arg (location_t loc,
5961 enum tree_code code,
5962 tree type, tree op0, tree op1,
5963 tree cond, tree arg, int cond_first_p)
5965 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5966 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5967 tree test, true_value, false_value;
5968 tree lhs = NULL_TREE;
5969 tree rhs = NULL_TREE;
5971 if (TREE_CODE (cond) == COND_EXPR)
5973 test = TREE_OPERAND (cond, 0);
5974 true_value = TREE_OPERAND (cond, 1);
5975 false_value = TREE_OPERAND (cond, 2);
5976 /* If this operand throws an expression, then it does not make
5977 sense to try to perform a logical or arithmetic operation
5979 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5981 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5986 tree testtype = TREE_TYPE (cond);
5988 true_value = constant_boolean_node (true, testtype);
5989 false_value = constant_boolean_node (false, testtype);
5992 /* This transformation is only worthwhile if we don't have to wrap ARG
5993 in a SAVE_EXPR and the operation can be simplified on at least one
5994 of the branches once its pushed inside the COND_EXPR. */
5995 if (!TREE_CONSTANT (arg)
5996 && (TREE_SIDE_EFFECTS (arg)
5997 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6000 arg = fold_convert_loc (loc, arg_type, arg);
6003 true_value = fold_convert_loc (loc, cond_type, true_value);
6005 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6007 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6011 false_value = fold_convert_loc (loc, cond_type, false_value);
6013 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6015 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6018 /* Check that we have simplified at least one of the branches. */
6019 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6022 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6026 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6028 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6029 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6030 ADDEND is the same as X.
6032 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6033 and finite. The problematic cases are when X is zero, and its mode
6034 has signed zeros. In the case of rounding towards -infinity,
6035 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6036 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6039 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6041 if (!real_zerop (addend))
6044 /* Don't allow the fold with -fsignaling-nans. */
6045 if (HONOR_SNANS (TYPE_MODE (type)))
6048 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6049 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6052 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6053 if (TREE_CODE (addend) == REAL_CST
6054 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6057 /* The mode has signed zeros, and we have to honor their sign.
6058 In this situation, there is only one case we can return true for.
6059 X - 0 is the same as X unless rounding towards -infinity is
6061 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6064 /* Subroutine of fold() that checks comparisons of built-in math
6065 functions against real constants.
6067 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6068 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6069 is the type of the result and ARG0 and ARG1 are the operands of the
6070 comparison. ARG1 must be a TREE_REAL_CST.
6072 The function returns the constant folded tree if a simplification
6073 can be made, and NULL_TREE otherwise. */
6076 fold_mathfn_compare (location_t loc,
6077 enum built_in_function fcode, enum tree_code code,
6078 tree type, tree arg0, tree arg1)
6082 if (BUILTIN_SQRT_P (fcode))
6084 tree arg = CALL_EXPR_ARG (arg0, 0);
6085 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6087 c = TREE_REAL_CST (arg1);
6088 if (REAL_VALUE_NEGATIVE (c))
6090 /* sqrt(x) < y is always false, if y is negative. */
6091 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6092 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6094 /* sqrt(x) > y is always true, if y is negative and we
6095 don't care about NaNs, i.e. negative values of x. */
6096 if (code == NE_EXPR || !HONOR_NANS (mode))
6097 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6099 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6100 return fold_build2_loc (loc, GE_EXPR, type, arg,
6101 build_real (TREE_TYPE (arg), dconst0));
6103 else if (code == GT_EXPR || code == GE_EXPR)
6107 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6108 real_convert (&c2, mode, &c2);
6110 if (REAL_VALUE_ISINF (c2))
6112 /* sqrt(x) > y is x == +Inf, when y is very large. */
6113 if (HONOR_INFINITIES (mode))
6114 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6115 build_real (TREE_TYPE (arg), c2));
6117 /* sqrt(x) > y is always false, when y is very large
6118 and we don't care about infinities. */
6119 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6122 /* sqrt(x) > c is the same as x > c*c. */
6123 return fold_build2_loc (loc, code, type, arg,
6124 build_real (TREE_TYPE (arg), c2));
6126 else if (code == LT_EXPR || code == LE_EXPR)
6130 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6131 real_convert (&c2, mode, &c2);
6133 if (REAL_VALUE_ISINF (c2))
6135 /* sqrt(x) < y is always true, when y is a very large
6136 value and we don't care about NaNs or Infinities. */
6137 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6138 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6140 /* sqrt(x) < y is x != +Inf when y is very large and we
6141 don't care about NaNs. */
6142 if (! HONOR_NANS (mode))
6143 return fold_build2_loc (loc, NE_EXPR, type, arg,
6144 build_real (TREE_TYPE (arg), c2));
6146 /* sqrt(x) < y is x >= 0 when y is very large and we
6147 don't care about Infinities. */
6148 if (! HONOR_INFINITIES (mode))
6149 return fold_build2_loc (loc, GE_EXPR, type, arg,
6150 build_real (TREE_TYPE (arg), dconst0));
6152 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6153 arg = save_expr (arg);
6154 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6155 fold_build2_loc (loc, GE_EXPR, type, arg,
6156 build_real (TREE_TYPE (arg),
6158 fold_build2_loc (loc, NE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg),
6163 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6164 if (! HONOR_NANS (mode))
6165 return fold_build2_loc (loc, code, type, arg,
6166 build_real (TREE_TYPE (arg), c2));
6168 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6169 arg = save_expr (arg);
6170 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6171 fold_build2_loc (loc, GE_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg),
6174 fold_build2_loc (loc, code, type, arg,
6175 build_real (TREE_TYPE (arg),
6183 /* Subroutine of fold() that optimizes comparisons against Infinities,
6184 either +Inf or -Inf.
6186 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6187 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6188 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6190 The function returns the constant folded tree if a simplification
6191 can be made, and NULL_TREE otherwise. */
6194 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6195 tree arg0, tree arg1)
6197 enum machine_mode mode;
6198 REAL_VALUE_TYPE max;
6202 mode = TYPE_MODE (TREE_TYPE (arg0));
6204 /* For negative infinity swap the sense of the comparison. */
6205 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6207 code = swap_tree_comparison (code);
6212 /* x > +Inf is always false, if with ignore sNANs. */
6213 if (HONOR_SNANS (mode))
6215 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6218 /* x <= +Inf is always true, if we don't case about NaNs. */
6219 if (! HONOR_NANS (mode))
6220 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6222 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6223 arg0 = save_expr (arg0);
6224 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6228 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6229 real_maxval (&max, neg, mode);
6230 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6231 arg0, build_real (TREE_TYPE (arg0), max));
6234 /* x < +Inf is always equal to x <= DBL_MAX. */
6235 real_maxval (&max, neg, mode);
6236 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6237 arg0, build_real (TREE_TYPE (arg0), max));
6240 /* x != +Inf is always equal to !(x > DBL_MAX). */
6241 real_maxval (&max, neg, mode);
6242 if (! HONOR_NANS (mode))
6243 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6246 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6247 arg0, build_real (TREE_TYPE (arg0), max));
6248 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6257 /* Subroutine of fold() that optimizes comparisons of a division by
6258 a nonzero integer constant against an integer constant, i.e.
6261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6265 The function returns the constant folded tree if a simplification
6266 can be made, and NULL_TREE otherwise. */
6269 fold_div_compare (location_t loc,
6270 enum tree_code code, tree type, tree arg0, tree arg1)
6272 tree prod, tmp, hi, lo;
6273 tree arg00 = TREE_OPERAND (arg0, 0);
6274 tree arg01 = TREE_OPERAND (arg0, 1);
6276 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6280 /* We have to do this the hard way to detect unsigned overflow.
6281 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6282 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6283 TREE_INT_CST_HIGH (arg01),
6284 TREE_INT_CST_LOW (arg1),
6285 TREE_INT_CST_HIGH (arg1),
6286 &val.low, &val.high, unsigned_p);
6287 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6288 neg_overflow = false;
6292 tmp = int_const_binop (MINUS_EXPR, arg01,
6293 build_int_cst (TREE_TYPE (arg01), 1));
6296 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6297 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6298 TREE_INT_CST_HIGH (prod),
6299 TREE_INT_CST_LOW (tmp),
6300 TREE_INT_CST_HIGH (tmp),
6301 &val.low, &val.high, unsigned_p);
6302 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6303 -1, overflow | TREE_OVERFLOW (prod));
6305 else if (tree_int_cst_sgn (arg01) >= 0)
6307 tmp = int_const_binop (MINUS_EXPR, arg01,
6308 build_int_cst (TREE_TYPE (arg01), 1));
6309 switch (tree_int_cst_sgn (arg1))
6312 neg_overflow = true;
6313 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6318 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6323 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6333 /* A negative divisor reverses the relational operators. */
6334 code = swap_tree_comparison (code);
6336 tmp = int_const_binop (PLUS_EXPR, arg01,
6337 build_int_cst (TREE_TYPE (arg01), 1));
6338 switch (tree_int_cst_sgn (arg1))
6341 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6346 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6351 neg_overflow = true;
6352 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6364 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6365 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6366 if (TREE_OVERFLOW (hi))
6367 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6368 if (TREE_OVERFLOW (lo))
6369 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6370 return build_range_check (loc, type, arg00, 1, lo, hi);
6373 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6374 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6375 if (TREE_OVERFLOW (hi))
6376 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6377 if (TREE_OVERFLOW (lo))
6378 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6379 return build_range_check (loc, type, arg00, 0, lo, hi);
6382 if (TREE_OVERFLOW (lo))
6384 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6385 return omit_one_operand_loc (loc, type, tmp, arg00);
6387 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6390 if (TREE_OVERFLOW (hi))
6392 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6393 return omit_one_operand_loc (loc, type, tmp, arg00);
6395 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6398 if (TREE_OVERFLOW (hi))
6400 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6401 return omit_one_operand_loc (loc, type, tmp, arg00);
6403 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6406 if (TREE_OVERFLOW (lo))
6408 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6409 return omit_one_operand_loc (loc, type, tmp, arg00);
6411 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6421 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6422 equality/inequality test, then return a simplified form of the test
6423 using a sign testing. Otherwise return NULL. TYPE is the desired
6427 fold_single_bit_test_into_sign_test (location_t loc,
6428 enum tree_code code, tree arg0, tree arg1,
6431 /* If this is testing a single bit, we can optimize the test. */
6432 if ((code == NE_EXPR || code == EQ_EXPR)
6433 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6434 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6436 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6437 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6438 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6440 if (arg00 != NULL_TREE
6441 /* This is only a win if casting to a signed type is cheap,
6442 i.e. when arg00's type is not a partial mode. */
6443 && TYPE_PRECISION (TREE_TYPE (arg00))
6444 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6446 tree stype = signed_type_for (TREE_TYPE (arg00));
6447 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6449 fold_convert_loc (loc, stype, arg00),
6450 build_int_cst (stype, 0));
6457 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6458 equality/inequality test, then return a simplified form of
6459 the test using shifts and logical operations. Otherwise return
6460 NULL. TYPE is the desired result type. */
6463 fold_single_bit_test (location_t loc, enum tree_code code,
6464 tree arg0, tree arg1, tree result_type)
6466 /* If this is testing a single bit, we can optimize the test. */
6467 if ((code == NE_EXPR || code == EQ_EXPR)
6468 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6469 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6471 tree inner = TREE_OPERAND (arg0, 0);
6472 tree type = TREE_TYPE (arg0);
6473 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6474 enum machine_mode operand_mode = TYPE_MODE (type);
6476 tree signed_type, unsigned_type, intermediate_type;
6479 /* First, see if we can fold the single bit test into a sign-bit
6481 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6486 /* Otherwise we have (A & C) != 0 where C is a single bit,
6487 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6488 Similarly for (A & C) == 0. */
6490 /* If INNER is a right shift of a constant and it plus BITNUM does
6491 not overflow, adjust BITNUM and INNER. */
6492 if (TREE_CODE (inner) == RSHIFT_EXPR
6493 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6494 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6495 && bitnum < TYPE_PRECISION (type)
6496 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6497 bitnum - TYPE_PRECISION (type)))
6499 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6500 inner = TREE_OPERAND (inner, 0);
6503 /* If we are going to be able to omit the AND below, we must do our
6504 operations as unsigned. If we must use the AND, we have a choice.
6505 Normally unsigned is faster, but for some machines signed is. */
6506 #ifdef LOAD_EXTEND_OP
6507 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6508 && !flag_syntax_only) ? 0 : 1;
6513 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6514 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6515 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6516 inner = fold_convert_loc (loc, intermediate_type, inner);
6519 inner = build2 (RSHIFT_EXPR, intermediate_type,
6520 inner, size_int (bitnum));
6522 one = build_int_cst (intermediate_type, 1);
6524 if (code == EQ_EXPR)
6525 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6527 /* Put the AND last so it can combine with more things. */
6528 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6530 /* Make sure to return the proper type. */
6531 inner = fold_convert_loc (loc, result_type, inner);
6538 /* Check whether we are allowed to reorder operands arg0 and arg1,
6539 such that the evaluation of arg1 occurs before arg0. */
6542 reorder_operands_p (const_tree arg0, const_tree arg1)
6544 if (! flag_evaluation_order)
6546 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6548 return ! TREE_SIDE_EFFECTS (arg0)
6549 && ! TREE_SIDE_EFFECTS (arg1);
6552 /* Test whether it is preferable two swap two operands, ARG0 and
6553 ARG1, for example because ARG0 is an integer constant and ARG1
6554 isn't. If REORDER is true, only recommend swapping if we can
6555 evaluate the operands in reverse order. */
6558 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6560 STRIP_SIGN_NOPS (arg0);
6561 STRIP_SIGN_NOPS (arg1);
6563 if (TREE_CODE (arg1) == INTEGER_CST)
6565 if (TREE_CODE (arg0) == INTEGER_CST)
6568 if (TREE_CODE (arg1) == REAL_CST)
6570 if (TREE_CODE (arg0) == REAL_CST)
6573 if (TREE_CODE (arg1) == FIXED_CST)
6575 if (TREE_CODE (arg0) == FIXED_CST)
6578 if (TREE_CODE (arg1) == COMPLEX_CST)
6580 if (TREE_CODE (arg0) == COMPLEX_CST)
6583 if (TREE_CONSTANT (arg1))
6585 if (TREE_CONSTANT (arg0))
6588 if (optimize_function_for_size_p (cfun))
6591 if (reorder && flag_evaluation_order
6592 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6595 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6596 for commutative and comparison operators. Ensuring a canonical
6597 form allows the optimizers to find additional redundancies without
6598 having to explicitly check for both orderings. */
6599 if (TREE_CODE (arg0) == SSA_NAME
6600 && TREE_CODE (arg1) == SSA_NAME
6601 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6604 /* Put SSA_NAMEs last. */
6605 if (TREE_CODE (arg1) == SSA_NAME)
6607 if (TREE_CODE (arg0) == SSA_NAME)
6610 /* Put variables last. */
6619 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6620 ARG0 is extended to a wider type. */
6623 fold_widened_comparison (location_t loc, enum tree_code code,
6624 tree type, tree arg0, tree arg1)
6626 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6628 tree shorter_type, outer_type;
6632 if (arg0_unw == arg0)
6634 shorter_type = TREE_TYPE (arg0_unw);
6636 #ifdef HAVE_canonicalize_funcptr_for_compare
6637 /* Disable this optimization if we're casting a function pointer
6638 type on targets that require function pointer canonicalization. */
6639 if (HAVE_canonicalize_funcptr_for_compare
6640 && TREE_CODE (shorter_type) == POINTER_TYPE
6641 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6645 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6648 arg1_unw = get_unwidened (arg1, NULL_TREE);
6650 /* If possible, express the comparison in the shorter mode. */
6651 if ((code == EQ_EXPR || code == NE_EXPR
6652 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6653 && (TREE_TYPE (arg1_unw) == shorter_type
6654 || ((TYPE_PRECISION (shorter_type)
6655 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6656 && (TYPE_UNSIGNED (shorter_type)
6657 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6658 || (TREE_CODE (arg1_unw) == INTEGER_CST
6659 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6660 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6661 && int_fits_type_p (arg1_unw, shorter_type))))
6662 return fold_build2_loc (loc, code, type, arg0_unw,
6663 fold_convert_loc (loc, shorter_type, arg1_unw));
6665 if (TREE_CODE (arg1_unw) != INTEGER_CST
6666 || TREE_CODE (shorter_type) != INTEGER_TYPE
6667 || !int_fits_type_p (arg1_unw, shorter_type))
6670 /* If we are comparing with the integer that does not fit into the range
6671 of the shorter type, the result is known. */
6672 outer_type = TREE_TYPE (arg1_unw);
6673 min = lower_bound_in_type (outer_type, shorter_type);
6674 max = upper_bound_in_type (outer_type, shorter_type);
6676 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6678 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6690 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6696 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6698 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6703 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6705 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6715 ARG0 just the signedness is changed. */
6718 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6719 tree arg0, tree arg1)
6722 tree inner_type, outer_type;
6724 if (!CONVERT_EXPR_P (arg0))
6727 outer_type = TREE_TYPE (arg0);
6728 arg0_inner = TREE_OPERAND (arg0, 0);
6729 inner_type = TREE_TYPE (arg0_inner);
6731 #ifdef HAVE_canonicalize_funcptr_for_compare
6732 /* Disable this optimization if we're casting a function pointer
6733 type on targets that require function pointer canonicalization. */
6734 if (HAVE_canonicalize_funcptr_for_compare
6735 && TREE_CODE (inner_type) == POINTER_TYPE
6736 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6740 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6743 if (TREE_CODE (arg1) != INTEGER_CST
6744 && !(CONVERT_EXPR_P (arg1)
6745 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6748 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6749 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6754 if (TREE_CODE (arg1) == INTEGER_CST)
6755 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6756 0, TREE_OVERFLOW (arg1));
6758 arg1 = fold_convert_loc (loc, inner_type, arg1);
6760 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6763 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6764 step of the array. Reconstructs s and delta in the case of s *
6765 delta being an integer constant (and thus already folded). ADDR is
6766 the address. MULT is the multiplicative expression. If the
6767 function succeeds, the new address expression is returned.
6768 Otherwise NULL_TREE is returned. LOC is the location of the
6769 resulting expression. */
6772 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6774 tree s, delta, step;
6775 tree ref = TREE_OPERAND (addr, 0), pref;
6780 /* Strip the nops that might be added when converting op1 to sizetype. */
6783 /* Canonicalize op1 into a possibly non-constant delta
6784 and an INTEGER_CST s. */
6785 if (TREE_CODE (op1) == MULT_EXPR)
6787 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6792 if (TREE_CODE (arg0) == INTEGER_CST)
6797 else if (TREE_CODE (arg1) == INTEGER_CST)
6805 else if (TREE_CODE (op1) == INTEGER_CST)
6812 /* Simulate we are delta * 1. */
6814 s = integer_one_node;
6817 for (;; ref = TREE_OPERAND (ref, 0))
6819 if (TREE_CODE (ref) == ARRAY_REF)
6823 /* Remember if this was a multi-dimensional array. */
6824 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6827 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6830 itype = TREE_TYPE (domain);
6832 step = array_ref_element_size (ref);
6833 if (TREE_CODE (step) != INTEGER_CST)
6838 if (! tree_int_cst_equal (step, s))
6843 /* Try if delta is a multiple of step. */
6844 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6850 /* Only fold here if we can verify we do not overflow one
6851 dimension of a multi-dimensional array. */
6856 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6857 || !TYPE_MAX_VALUE (domain)
6858 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6861 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6862 fold_convert_loc (loc, itype,
6863 TREE_OPERAND (ref, 1)),
6864 fold_convert_loc (loc, itype, delta));
6866 || TREE_CODE (tmp) != INTEGER_CST
6867 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6873 else if (TREE_CODE (ref) == COMPONENT_REF
6874 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6878 /* Remember if this was a multi-dimensional array. */
6879 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6882 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6885 itype = TREE_TYPE (domain);
6887 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6888 if (TREE_CODE (step) != INTEGER_CST)
6893 if (! tree_int_cst_equal (step, s))
6898 /* Try if delta is a multiple of step. */
6899 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6905 /* Only fold here if we can verify we do not overflow one
6906 dimension of a multi-dimensional array. */
6911 if (!TYPE_MIN_VALUE (domain)
6912 || !TYPE_MAX_VALUE (domain)
6913 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6916 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6917 fold_convert_loc (loc, itype,
6918 TYPE_MIN_VALUE (domain)),
6919 fold_convert_loc (loc, itype, delta));
6920 if (TREE_CODE (tmp) != INTEGER_CST
6921 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6930 if (!handled_component_p (ref))
6934 /* We found the suitable array reference. So copy everything up to it,
6935 and replace the index. */
6937 pref = TREE_OPERAND (addr, 0);
6938 ret = copy_node (pref);
6939 SET_EXPR_LOCATION (ret, loc);
6944 pref = TREE_OPERAND (pref, 0);
6945 TREE_OPERAND (pos, 0) = copy_node (pref);
6946 pos = TREE_OPERAND (pos, 0);
6949 if (TREE_CODE (ref) == ARRAY_REF)
6951 TREE_OPERAND (pos, 1)
6952 = fold_build2_loc (loc, PLUS_EXPR, itype,
6953 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6954 fold_convert_loc (loc, itype, delta));
6955 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6957 else if (TREE_CODE (ref) == COMPONENT_REF)
6959 gcc_assert (ret == pos);
6960 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6962 (loc, PLUS_EXPR, itype,
6963 fold_convert_loc (loc, itype,
6965 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6966 fold_convert_loc (loc, itype, delta)),
6967 NULL_TREE, NULL_TREE);
6968 return build_fold_addr_expr_loc (loc, ret);
6975 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6976 means A >= Y && A != MAX, but in this case we know that
6977 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6980 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6982 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6984 if (TREE_CODE (bound) == LT_EXPR)
6985 a = TREE_OPERAND (bound, 0);
6986 else if (TREE_CODE (bound) == GT_EXPR)
6987 a = TREE_OPERAND (bound, 1);
6991 typea = TREE_TYPE (a);
6992 if (!INTEGRAL_TYPE_P (typea)
6993 && !POINTER_TYPE_P (typea))
6996 if (TREE_CODE (ineq) == LT_EXPR)
6998 a1 = TREE_OPERAND (ineq, 1);
6999 y = TREE_OPERAND (ineq, 0);
7001 else if (TREE_CODE (ineq) == GT_EXPR)
7003 a1 = TREE_OPERAND (ineq, 0);
7004 y = TREE_OPERAND (ineq, 1);
7009 if (TREE_TYPE (a1) != typea)
7012 if (POINTER_TYPE_P (typea))
7014 /* Convert the pointer types into integer before taking the difference. */
7015 tree ta = fold_convert_loc (loc, ssizetype, a);
7016 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7017 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7020 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7022 if (!diff || !integer_onep (diff))
7025 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7028 /* Fold a sum or difference of at least one multiplication.
7029 Returns the folded tree or NULL if no simplification could be made. */
7032 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7033 tree arg0, tree arg1)
7035 tree arg00, arg01, arg10, arg11;
7036 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7038 /* (A * C) +- (B * C) -> (A+-B) * C.
7039 (A * C) +- A -> A * (C+-1).
7040 We are most concerned about the case where C is a constant,
7041 but other combinations show up during loop reduction. Since
7042 it is not difficult, try all four possibilities. */
7044 if (TREE_CODE (arg0) == MULT_EXPR)
7046 arg00 = TREE_OPERAND (arg0, 0);
7047 arg01 = TREE_OPERAND (arg0, 1);
7049 else if (TREE_CODE (arg0) == INTEGER_CST)
7051 arg00 = build_one_cst (type);
7056 /* We cannot generate constant 1 for fract. */
7057 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7060 arg01 = build_one_cst (type);
7062 if (TREE_CODE (arg1) == MULT_EXPR)
7064 arg10 = TREE_OPERAND (arg1, 0);
7065 arg11 = TREE_OPERAND (arg1, 1);
7067 else if (TREE_CODE (arg1) == INTEGER_CST)
7069 arg10 = build_one_cst (type);
7070 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7071 the purpose of this canonicalization. */
7072 if (TREE_INT_CST_HIGH (arg1) == -1
7073 && negate_expr_p (arg1)
7074 && code == PLUS_EXPR)
7076 arg11 = negate_expr (arg1);
7084 /* We cannot generate constant 1 for fract. */
7085 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7088 arg11 = build_one_cst (type);
7092 if (operand_equal_p (arg01, arg11, 0))
7093 same = arg01, alt0 = arg00, alt1 = arg10;
7094 else if (operand_equal_p (arg00, arg10, 0))
7095 same = arg00, alt0 = arg01, alt1 = arg11;
7096 else if (operand_equal_p (arg00, arg11, 0))
7097 same = arg00, alt0 = arg01, alt1 = arg10;
7098 else if (operand_equal_p (arg01, arg10, 0))
7099 same = arg01, alt0 = arg00, alt1 = arg11;
7101 /* No identical multiplicands; see if we can find a common
7102 power-of-two factor in non-power-of-two multiplies. This
7103 can help in multi-dimensional array access. */
7104 else if (host_integerp (arg01, 0)
7105 && host_integerp (arg11, 0))
7107 HOST_WIDE_INT int01, int11, tmp;
7110 int01 = TREE_INT_CST_LOW (arg01);
7111 int11 = TREE_INT_CST_LOW (arg11);
7113 /* Move min of absolute values to int11. */
7114 if (absu_hwi (int01) < absu_hwi (int11))
7116 tmp = int01, int01 = int11, int11 = tmp;
7117 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7124 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7125 /* The remainder should not be a constant, otherwise we
7126 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7127 increased the number of multiplications necessary. */
7128 && TREE_CODE (arg10) != INTEGER_CST)
7130 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7131 build_int_cst (TREE_TYPE (arg00),
7136 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7141 return fold_build2_loc (loc, MULT_EXPR, type,
7142 fold_build2_loc (loc, code, type,
7143 fold_convert_loc (loc, type, alt0),
7144 fold_convert_loc (loc, type, alt1)),
7145 fold_convert_loc (loc, type, same));
7150 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7156 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7158 tree type = TREE_TYPE (expr);
7159 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7160 int byte, offset, word, words;
7161 unsigned char value;
7163 if (total_bytes > len)
7165 words = total_bytes / UNITS_PER_WORD;
7167 for (byte = 0; byte < total_bytes; byte++)
7169 int bitpos = byte * BITS_PER_UNIT;
7170 if (bitpos < HOST_BITS_PER_WIDE_INT)
7171 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7173 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7174 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7176 if (total_bytes > UNITS_PER_WORD)
7178 word = byte / UNITS_PER_WORD;
7179 if (WORDS_BIG_ENDIAN)
7180 word = (words - 1) - word;
7181 offset = word * UNITS_PER_WORD;
7182 if (BYTES_BIG_ENDIAN)
7183 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7185 offset += byte % UNITS_PER_WORD;
7188 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7189 ptr[offset] = value;
7195 /* Subroutine of native_encode_expr. Encode the REAL_CST
7196 specified by EXPR into the buffer PTR of length LEN bytes.
7197 Return the number of bytes placed in the buffer, or zero
7201 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7203 tree type = TREE_TYPE (expr);
7204 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7205 int byte, offset, word, words, bitpos;
7206 unsigned char value;
7208 /* There are always 32 bits in each long, no matter the size of
7209 the hosts long. We handle floating point representations with
7213 if (total_bytes > len)
7215 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7217 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7219 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7220 bitpos += BITS_PER_UNIT)
7222 byte = (bitpos / BITS_PER_UNIT) & 3;
7223 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7225 if (UNITS_PER_WORD < 4)
7227 word = byte / UNITS_PER_WORD;
7228 if (WORDS_BIG_ENDIAN)
7229 word = (words - 1) - word;
7230 offset = word * UNITS_PER_WORD;
7231 if (BYTES_BIG_ENDIAN)
7232 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7234 offset += byte % UNITS_PER_WORD;
7237 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7238 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7243 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7244 specified by EXPR into the buffer PTR of length LEN bytes.
7245 Return the number of bytes placed in the buffer, or zero
7249 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7254 part = TREE_REALPART (expr);
7255 rsize = native_encode_expr (part, ptr, len);
7258 part = TREE_IMAGPART (expr);
7259 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7262 return rsize + isize;
7266 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7267 specified by EXPR into the buffer PTR of length LEN bytes.
7268 Return the number of bytes placed in the buffer, or zero
7272 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7274 int i, size, offset, count;
7275 tree itype, elem, elements;
7278 elements = TREE_VECTOR_CST_ELTS (expr);
7279 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7280 itype = TREE_TYPE (TREE_TYPE (expr));
7281 size = GET_MODE_SIZE (TYPE_MODE (itype));
7282 for (i = 0; i < count; i++)
7286 elem = TREE_VALUE (elements);
7287 elements = TREE_CHAIN (elements);
7294 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7299 if (offset + size > len)
7301 memset (ptr+offset, 0, size);
7309 /* Subroutine of native_encode_expr. Encode the STRING_CST
7310 specified by EXPR into the buffer PTR of length LEN bytes.
7311 Return the number of bytes placed in the buffer, or zero
7315 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7317 tree type = TREE_TYPE (expr);
7318 HOST_WIDE_INT total_bytes;
7320 if (TREE_CODE (type) != ARRAY_TYPE
7321 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7322 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7323 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7325 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7326 if (total_bytes > len)
7328 if (TREE_STRING_LENGTH (expr) < total_bytes)
7330 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7331 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7332 total_bytes - TREE_STRING_LENGTH (expr));
7335 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7340 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7341 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7342 buffer PTR of length LEN bytes. Return the number of bytes
7343 placed in the buffer, or zero upon failure. */
7346 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7348 switch (TREE_CODE (expr))
7351 return native_encode_int (expr, ptr, len);
7354 return native_encode_real (expr, ptr, len);
7357 return native_encode_complex (expr, ptr, len);
7360 return native_encode_vector (expr, ptr, len);
7363 return native_encode_string (expr, ptr, len);
7371 /* Subroutine of native_interpret_expr. Interpret the contents of
7372 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7373 If the buffer cannot be interpreted, return NULL_TREE. */
7376 native_interpret_int (tree type, const unsigned char *ptr, int len)
7378 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7379 int byte, offset, word, words;
7380 unsigned char value;
7383 if (total_bytes > len)
7385 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7388 result = double_int_zero;
7389 words = total_bytes / UNITS_PER_WORD;
7391 for (byte = 0; byte < total_bytes; byte++)
7393 int bitpos = byte * BITS_PER_UNIT;
7394 if (total_bytes > UNITS_PER_WORD)
7396 word = byte / UNITS_PER_WORD;
7397 if (WORDS_BIG_ENDIAN)
7398 word = (words - 1) - word;
7399 offset = word * UNITS_PER_WORD;
7400 if (BYTES_BIG_ENDIAN)
7401 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7403 offset += byte % UNITS_PER_WORD;
7406 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7407 value = ptr[offset];
7409 if (bitpos < HOST_BITS_PER_WIDE_INT)
7410 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7412 result.high |= (unsigned HOST_WIDE_INT) value
7413 << (bitpos - HOST_BITS_PER_WIDE_INT);
7416 return double_int_to_tree (type, result);
7420 /* Subroutine of native_interpret_expr. Interpret the contents of
7421 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7422 If the buffer cannot be interpreted, return NULL_TREE. */
7425 native_interpret_real (tree type, const unsigned char *ptr, int len)
7427 enum machine_mode mode = TYPE_MODE (type);
7428 int total_bytes = GET_MODE_SIZE (mode);
7429 int byte, offset, word, words, bitpos;
7430 unsigned char value;
7431 /* There are always 32 bits in each long, no matter the size of
7432 the hosts long. We handle floating point representations with
7437 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7438 if (total_bytes > len || total_bytes > 24)
7440 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7442 memset (tmp, 0, sizeof (tmp));
7443 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7444 bitpos += BITS_PER_UNIT)
7446 byte = (bitpos / BITS_PER_UNIT) & 3;
7447 if (UNITS_PER_WORD < 4)
7449 word = byte / UNITS_PER_WORD;
7450 if (WORDS_BIG_ENDIAN)
7451 word = (words - 1) - word;
7452 offset = word * UNITS_PER_WORD;
7453 if (BYTES_BIG_ENDIAN)
7454 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7456 offset += byte % UNITS_PER_WORD;
7459 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7460 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7462 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7465 real_from_target (&r, tmp, mode);
7466 return build_real (type, r);
7470 /* Subroutine of native_interpret_expr. Interpret the contents of
7471 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7472 If the buffer cannot be interpreted, return NULL_TREE. */
7475 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7477 tree etype, rpart, ipart;
7480 etype = TREE_TYPE (type);
7481 size = GET_MODE_SIZE (TYPE_MODE (etype));
7484 rpart = native_interpret_expr (etype, ptr, size);
7487 ipart = native_interpret_expr (etype, ptr+size, size);
7490 return build_complex (type, rpart, ipart);
7494 /* Subroutine of native_interpret_expr. Interpret the contents of
7495 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7496 If the buffer cannot be interpreted, return NULL_TREE. */
7499 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7501 tree etype, elem, elements;
7504 etype = TREE_TYPE (type);
7505 size = GET_MODE_SIZE (TYPE_MODE (etype));
7506 count = TYPE_VECTOR_SUBPARTS (type);
7507 if (size * count > len)
7510 elements = NULL_TREE;
7511 for (i = count - 1; i >= 0; i--)
7513 elem = native_interpret_expr (etype, ptr+(i*size), size);
7516 elements = tree_cons (NULL_TREE, elem, elements);
7518 return build_vector (type, elements);
7522 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a constant of type TYPE. For
7524 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7525 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7526 return NULL_TREE. */
7529 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7531 switch (TREE_CODE (type))
7536 return native_interpret_int (type, ptr, len);
7539 return native_interpret_real (type, ptr, len);
7542 return native_interpret_complex (type, ptr, len);
7545 return native_interpret_vector (type, ptr, len);
7553 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7554 TYPE at compile-time. If we're unable to perform the conversion
7555 return NULL_TREE. */
7558 fold_view_convert_expr (tree type, tree expr)
7560 /* We support up to 512-bit values (for V8DFmode). */
7561 unsigned char buffer[64];
7564 /* Check that the host and target are sane. */
7565 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7568 len = native_encode_expr (expr, buffer, sizeof (buffer));
7572 return native_interpret_expr (type, buffer, len);
7575 /* Build an expression for the address of T. Folds away INDIRECT_REF
7576 to avoid confusing the gimplify process. */
7579 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7581 /* The size of the object is not relevant when talking about its address. */
7582 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7583 t = TREE_OPERAND (t, 0);
7585 if (TREE_CODE (t) == INDIRECT_REF)
7587 t = TREE_OPERAND (t, 0);
7589 if (TREE_TYPE (t) != ptrtype)
7590 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7592 else if (TREE_CODE (t) == MEM_REF
7593 && integer_zerop (TREE_OPERAND (t, 1)))
7594 return TREE_OPERAND (t, 0);
7595 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7597 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7599 if (TREE_TYPE (t) != ptrtype)
7600 t = fold_convert_loc (loc, ptrtype, t);
7603 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7608 /* Build an expression for the address of T. */
7611 build_fold_addr_expr_loc (location_t loc, tree t)
7613 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7615 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7618 /* Fold a unary expression of code CODE and type TYPE with operand
7619 OP0. Return the folded expression if folding is successful.
7620 Otherwise, return NULL_TREE. */
7623 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7627 enum tree_code_class kind = TREE_CODE_CLASS (code);
7629 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7630 && TREE_CODE_LENGTH (code) == 1);
7635 if (CONVERT_EXPR_CODE_P (code)
7636 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7638 /* Don't use STRIP_NOPS, because signedness of argument type
7640 STRIP_SIGN_NOPS (arg0);
7644 /* Strip any conversions that don't change the mode. This
7645 is safe for every expression, except for a comparison
7646 expression because its signedness is derived from its
7649 Note that this is done as an internal manipulation within
7650 the constant folder, in order to find the simplest
7651 representation of the arguments so that their form can be
7652 studied. In any cases, the appropriate type conversions
7653 should be put back in the tree that will get out of the
7659 if (TREE_CODE_CLASS (code) == tcc_unary)
7661 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7662 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7663 fold_build1_loc (loc, code, type,
7664 fold_convert_loc (loc, TREE_TYPE (op0),
7665 TREE_OPERAND (arg0, 1))));
7666 else if (TREE_CODE (arg0) == COND_EXPR)
7668 tree arg01 = TREE_OPERAND (arg0, 1);
7669 tree arg02 = TREE_OPERAND (arg0, 2);
7670 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7671 arg01 = fold_build1_loc (loc, code, type,
7672 fold_convert_loc (loc,
7673 TREE_TYPE (op0), arg01));
7674 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7675 arg02 = fold_build1_loc (loc, code, type,
7676 fold_convert_loc (loc,
7677 TREE_TYPE (op0), arg02));
7678 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7681 /* If this was a conversion, and all we did was to move into
7682 inside the COND_EXPR, bring it back out. But leave it if
7683 it is a conversion from integer to integer and the
7684 result precision is no wider than a word since such a
7685 conversion is cheap and may be optimized away by combine,
7686 while it couldn't if it were outside the COND_EXPR. Then return
7687 so we don't get into an infinite recursion loop taking the
7688 conversion out and then back in. */
7690 if ((CONVERT_EXPR_CODE_P (code)
7691 || code == NON_LVALUE_EXPR)
7692 && TREE_CODE (tem) == COND_EXPR
7693 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7694 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7695 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7696 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7697 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7698 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7699 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7701 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7702 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7703 || flag_syntax_only))
7704 tem = build1_loc (loc, code, type,
7706 TREE_TYPE (TREE_OPERAND
7707 (TREE_OPERAND (tem, 1), 0)),
7708 TREE_OPERAND (tem, 0),
7709 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7710 TREE_OPERAND (TREE_OPERAND (tem, 2),
7719 /* Re-association barriers around constants and other re-association
7720 barriers can be removed. */
7721 if (CONSTANT_CLASS_P (op0)
7722 || TREE_CODE (op0) == PAREN_EXPR)
7723 return fold_convert_loc (loc, type, op0);
7728 case FIX_TRUNC_EXPR:
7729 if (TREE_TYPE (op0) == type)
7732 if (COMPARISON_CLASS_P (op0))
7734 /* If we have (type) (a CMP b) and type is an integral type, return
7735 new expression involving the new type. Canonicalize
7736 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7738 Do not fold the result as that would not simplify further, also
7739 folding again results in recursions. */
7740 if (TREE_CODE (type) == BOOLEAN_TYPE)
7741 return build2_loc (loc, TREE_CODE (op0), type,
7742 TREE_OPERAND (op0, 0),
7743 TREE_OPERAND (op0, 1));
7744 else if (!INTEGRAL_TYPE_P (type))
7745 return build3_loc (loc, COND_EXPR, type, op0,
7746 constant_boolean_node (true, type),
7747 constant_boolean_node (false, type));
7750 /* Handle cases of two conversions in a row. */
7751 if (CONVERT_EXPR_P (op0))
7753 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7754 tree inter_type = TREE_TYPE (op0);
7755 int inside_int = INTEGRAL_TYPE_P (inside_type);
7756 int inside_ptr = POINTER_TYPE_P (inside_type);
7757 int inside_float = FLOAT_TYPE_P (inside_type);
7758 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7759 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7760 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7761 int inter_int = INTEGRAL_TYPE_P (inter_type);
7762 int inter_ptr = POINTER_TYPE_P (inter_type);
7763 int inter_float = FLOAT_TYPE_P (inter_type);
7764 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7765 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7766 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7767 int final_int = INTEGRAL_TYPE_P (type);
7768 int final_ptr = POINTER_TYPE_P (type);
7769 int final_float = FLOAT_TYPE_P (type);
7770 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7771 unsigned int final_prec = TYPE_PRECISION (type);
7772 int final_unsignedp = TYPE_UNSIGNED (type);
7774 /* In addition to the cases of two conversions in a row
7775 handled below, if we are converting something to its own
7776 type via an object of identical or wider precision, neither
7777 conversion is needed. */
7778 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7779 && (((inter_int || inter_ptr) && final_int)
7780 || (inter_float && final_float))
7781 && inter_prec >= final_prec)
7782 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7784 /* Likewise, if the intermediate and initial types are either both
7785 float or both integer, we don't need the middle conversion if the
7786 former is wider than the latter and doesn't change the signedness
7787 (for integers). Avoid this if the final type is a pointer since
7788 then we sometimes need the middle conversion. Likewise if the
7789 final type has a precision not equal to the size of its mode. */
7790 if (((inter_int && inside_int)
7791 || (inter_float && inside_float)
7792 || (inter_vec && inside_vec))
7793 && inter_prec >= inside_prec
7794 && (inter_float || inter_vec
7795 || inter_unsignedp == inside_unsignedp)
7796 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7797 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7799 && (! final_vec || inter_prec == inside_prec))
7800 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7802 /* If we have a sign-extension of a zero-extended value, we can
7803 replace that by a single zero-extension. */
7804 if (inside_int && inter_int && final_int
7805 && inside_prec < inter_prec && inter_prec < final_prec
7806 && inside_unsignedp && !inter_unsignedp)
7807 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7809 /* Two conversions in a row are not needed unless:
7810 - some conversion is floating-point (overstrict for now), or
7811 - some conversion is a vector (overstrict for now), or
7812 - the intermediate type is narrower than both initial and
7814 - the intermediate type and innermost type differ in signedness,
7815 and the outermost type is wider than the intermediate, or
7816 - the initial type is a pointer type and the precisions of the
7817 intermediate and final types differ, or
7818 - the final type is a pointer type and the precisions of the
7819 initial and intermediate types differ. */
7820 if (! inside_float && ! inter_float && ! final_float
7821 && ! inside_vec && ! inter_vec && ! final_vec
7822 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7823 && ! (inside_int && inter_int
7824 && inter_unsignedp != inside_unsignedp
7825 && inter_prec < final_prec)
7826 && ((inter_unsignedp && inter_prec > inside_prec)
7827 == (final_unsignedp && final_prec > inter_prec))
7828 && ! (inside_ptr && inter_prec != final_prec)
7829 && ! (final_ptr && inside_prec != inter_prec)
7830 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7831 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7832 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7835 /* Handle (T *)&A.B.C for A being of type T and B and C
7836 living at offset zero. This occurs frequently in
7837 C++ upcasting and then accessing the base. */
7838 if (TREE_CODE (op0) == ADDR_EXPR
7839 && POINTER_TYPE_P (type)
7840 && handled_component_p (TREE_OPERAND (op0, 0)))
7842 HOST_WIDE_INT bitsize, bitpos;
7844 enum machine_mode mode;
7845 int unsignedp, volatilep;
7846 tree base = TREE_OPERAND (op0, 0);
7847 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7848 &mode, &unsignedp, &volatilep, false);
7849 /* If the reference was to a (constant) zero offset, we can use
7850 the address of the base if it has the same base type
7851 as the result type and the pointer type is unqualified. */
7852 if (! offset && bitpos == 0
7853 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7854 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7855 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7856 return fold_convert_loc (loc, type,
7857 build_fold_addr_expr_loc (loc, base));
7860 if (TREE_CODE (op0) == MODIFY_EXPR
7861 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7862 /* Detect assigning a bitfield. */
7863 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7865 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7867 /* Don't leave an assignment inside a conversion
7868 unless assigning a bitfield. */
7869 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7870 /* First do the assignment, then return converted constant. */
7871 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7872 TREE_NO_WARNING (tem) = 1;
7873 TREE_USED (tem) = 1;
7877 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7878 constants (if x has signed type, the sign bit cannot be set
7879 in c). This folds extension into the BIT_AND_EXPR.
7880 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7881 very likely don't have maximal range for their precision and this
7882 transformation effectively doesn't preserve non-maximal ranges. */
7883 if (TREE_CODE (type) == INTEGER_TYPE
7884 && TREE_CODE (op0) == BIT_AND_EXPR
7885 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7887 tree and_expr = op0;
7888 tree and0 = TREE_OPERAND (and_expr, 0);
7889 tree and1 = TREE_OPERAND (and_expr, 1);
7892 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7893 || (TYPE_PRECISION (type)
7894 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7896 else if (TYPE_PRECISION (TREE_TYPE (and1))
7897 <= HOST_BITS_PER_WIDE_INT
7898 && host_integerp (and1, 1))
7900 unsigned HOST_WIDE_INT cst;
7902 cst = tree_low_cst (and1, 1);
7903 cst &= (HOST_WIDE_INT) -1
7904 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7905 change = (cst == 0);
7906 #ifdef LOAD_EXTEND_OP
7908 && !flag_syntax_only
7909 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7912 tree uns = unsigned_type_for (TREE_TYPE (and0));
7913 and0 = fold_convert_loc (loc, uns, and0);
7914 and1 = fold_convert_loc (loc, uns, and1);
7920 tem = force_fit_type_double (type, tree_to_double_int (and1),
7921 0, TREE_OVERFLOW (and1));
7922 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7923 fold_convert_loc (loc, type, and0), tem);
7927 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7928 when one of the new casts will fold away. Conservatively we assume
7929 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7930 if (POINTER_TYPE_P (type)
7931 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7932 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7933 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7934 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7936 tree arg00 = TREE_OPERAND (arg0, 0);
7937 tree arg01 = TREE_OPERAND (arg0, 1);
7939 return fold_build_pointer_plus_loc
7940 (loc, fold_convert_loc (loc, type, arg00), arg01);
7943 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7944 of the same precision, and X is an integer type not narrower than
7945 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7946 if (INTEGRAL_TYPE_P (type)
7947 && TREE_CODE (op0) == BIT_NOT_EXPR
7948 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7949 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7950 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7952 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7953 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7954 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7955 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7956 fold_convert_loc (loc, type, tem));
7959 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7960 type of X and Y (integer types only). */
7961 if (INTEGRAL_TYPE_P (type)
7962 && TREE_CODE (op0) == MULT_EXPR
7963 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7964 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7966 /* Be careful not to introduce new overflows. */
7968 if (TYPE_OVERFLOW_WRAPS (type))
7971 mult_type = unsigned_type_for (type);
7973 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7975 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7976 fold_convert_loc (loc, mult_type,
7977 TREE_OPERAND (op0, 0)),
7978 fold_convert_loc (loc, mult_type,
7979 TREE_OPERAND (op0, 1)));
7980 return fold_convert_loc (loc, type, tem);
7984 tem = fold_convert_const (code, type, op0);
7985 return tem ? tem : NULL_TREE;
7987 case ADDR_SPACE_CONVERT_EXPR:
7988 if (integer_zerop (arg0))
7989 return fold_convert_const (code, type, arg0);
7992 case FIXED_CONVERT_EXPR:
7993 tem = fold_convert_const (code, type, arg0);
7994 return tem ? tem : NULL_TREE;
7996 case VIEW_CONVERT_EXPR:
7997 if (TREE_TYPE (op0) == type)
7999 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8000 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8001 type, TREE_OPERAND (op0, 0));
8002 if (TREE_CODE (op0) == MEM_REF)
8003 return fold_build2_loc (loc, MEM_REF, type,
8004 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8006 /* For integral conversions with the same precision or pointer
8007 conversions use a NOP_EXPR instead. */
8008 if ((INTEGRAL_TYPE_P (type)
8009 || POINTER_TYPE_P (type))
8010 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8011 || POINTER_TYPE_P (TREE_TYPE (op0)))
8012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8013 return fold_convert_loc (loc, type, op0);
8015 /* Strip inner integral conversions that do not change the precision. */
8016 if (CONVERT_EXPR_P (op0)
8017 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8018 || POINTER_TYPE_P (TREE_TYPE (op0)))
8019 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8020 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8021 && (TYPE_PRECISION (TREE_TYPE (op0))
8022 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8023 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8024 type, TREE_OPERAND (op0, 0));
8026 return fold_view_convert_expr (type, op0);
8029 tem = fold_negate_expr (loc, arg0);
8031 return fold_convert_loc (loc, type, tem);
8035 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8036 return fold_abs_const (arg0, type);
8037 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8038 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8039 /* Convert fabs((double)float) into (double)fabsf(float). */
8040 else if (TREE_CODE (arg0) == NOP_EXPR
8041 && TREE_CODE (type) == REAL_TYPE)
8043 tree targ0 = strip_float_extensions (arg0);
8045 return fold_convert_loc (loc, type,
8046 fold_build1_loc (loc, ABS_EXPR,
8050 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8051 else if (TREE_CODE (arg0) == ABS_EXPR)
8053 else if (tree_expr_nonnegative_p (arg0))
8056 /* Strip sign ops from argument. */
8057 if (TREE_CODE (type) == REAL_TYPE)
8059 tem = fold_strip_sign_ops (arg0);
8061 return fold_build1_loc (loc, ABS_EXPR, type,
8062 fold_convert_loc (loc, type, tem));
8067 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8068 return fold_convert_loc (loc, type, arg0);
8069 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8071 tree itype = TREE_TYPE (type);
8072 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8073 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8074 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8075 negate_expr (ipart));
8077 if (TREE_CODE (arg0) == COMPLEX_CST)
8079 tree itype = TREE_TYPE (type);
8080 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8081 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8082 return build_complex (type, rpart, negate_expr (ipart));
8084 if (TREE_CODE (arg0) == CONJ_EXPR)
8085 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8089 if (TREE_CODE (arg0) == INTEGER_CST)
8090 return fold_not_const (arg0, type);
8091 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8092 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8093 /* Convert ~ (-A) to A - 1. */
8094 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8095 return fold_build2_loc (loc, MINUS_EXPR, type,
8096 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8097 build_int_cst (type, 1));
8098 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8099 else if (INTEGRAL_TYPE_P (type)
8100 && ((TREE_CODE (arg0) == MINUS_EXPR
8101 && integer_onep (TREE_OPERAND (arg0, 1)))
8102 || (TREE_CODE (arg0) == PLUS_EXPR
8103 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8104 return fold_build1_loc (loc, NEGATE_EXPR, type,
8105 fold_convert_loc (loc, type,
8106 TREE_OPERAND (arg0, 0)));
8107 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8108 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8109 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8110 fold_convert_loc (loc, type,
8111 TREE_OPERAND (arg0, 0)))))
8112 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8113 fold_convert_loc (loc, type,
8114 TREE_OPERAND (arg0, 1)));
8115 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8116 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8117 fold_convert_loc (loc, type,
8118 TREE_OPERAND (arg0, 1)))))
8119 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8120 fold_convert_loc (loc, type,
8121 TREE_OPERAND (arg0, 0)), tem);
8122 /* Perform BIT_NOT_EXPR on each element individually. */
8123 else if (TREE_CODE (arg0) == VECTOR_CST)
8125 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8126 int count = TYPE_VECTOR_SUBPARTS (type), i;
8128 for (i = 0; i < count; i++)
8132 elem = TREE_VALUE (elements);
8133 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8134 if (elem == NULL_TREE)
8136 elements = TREE_CHAIN (elements);
8139 elem = build_int_cst (TREE_TYPE (type), -1);
8140 list = tree_cons (NULL_TREE, elem, list);
8143 return build_vector (type, nreverse (list));
8148 case TRUTH_NOT_EXPR:
8149 /* The argument to invert_truthvalue must have Boolean type. */
8150 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8151 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8153 /* Note that the operand of this must be an int
8154 and its values must be 0 or 1.
8155 ("true" is a fixed value perhaps depending on the language,
8156 but we don't handle values other than 1 correctly yet.) */
8157 tem = fold_truth_not_expr (loc, arg0);
8160 return fold_convert_loc (loc, type, tem);
8163 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8164 return fold_convert_loc (loc, type, arg0);
8165 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8166 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8167 TREE_OPERAND (arg0, 1));
8168 if (TREE_CODE (arg0) == COMPLEX_CST)
8169 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8170 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8172 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8173 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8174 fold_build1_loc (loc, REALPART_EXPR, itype,
8175 TREE_OPERAND (arg0, 0)),
8176 fold_build1_loc (loc, REALPART_EXPR, itype,
8177 TREE_OPERAND (arg0, 1)));
8178 return fold_convert_loc (loc, type, tem);
8180 if (TREE_CODE (arg0) == CONJ_EXPR)
8182 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8183 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8184 TREE_OPERAND (arg0, 0));
8185 return fold_convert_loc (loc, type, tem);
8187 if (TREE_CODE (arg0) == CALL_EXPR)
8189 tree fn = get_callee_fndecl (arg0);
8190 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8191 switch (DECL_FUNCTION_CODE (fn))
8193 CASE_FLT_FN (BUILT_IN_CEXPI):
8194 fn = mathfn_built_in (type, BUILT_IN_COS);
8196 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8206 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8207 return build_zero_cst (type);
8208 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8209 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8210 TREE_OPERAND (arg0, 0));
8211 if (TREE_CODE (arg0) == COMPLEX_CST)
8212 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8213 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8215 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8216 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8217 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8218 TREE_OPERAND (arg0, 0)),
8219 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8220 TREE_OPERAND (arg0, 1)));
8221 return fold_convert_loc (loc, type, tem);
8223 if (TREE_CODE (arg0) == CONJ_EXPR)
8225 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8226 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8227 return fold_convert_loc (loc, type, negate_expr (tem));
8229 if (TREE_CODE (arg0) == CALL_EXPR)
8231 tree fn = get_callee_fndecl (arg0);
8232 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8233 switch (DECL_FUNCTION_CODE (fn))
8235 CASE_FLT_FN (BUILT_IN_CEXPI):
8236 fn = mathfn_built_in (type, BUILT_IN_SIN);
8238 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8248 /* Fold *&X to X if X is an lvalue. */
8249 if (TREE_CODE (op0) == ADDR_EXPR)
8251 tree op00 = TREE_OPERAND (op0, 0);
8252 if ((TREE_CODE (op00) == VAR_DECL
8253 || TREE_CODE (op00) == PARM_DECL
8254 || TREE_CODE (op00) == RESULT_DECL)
8255 && !TREE_READONLY (op00))
8262 } /* switch (code) */
8266 /* If the operation was a conversion do _not_ mark a resulting constant
8267 with TREE_OVERFLOW if the original constant was not. These conversions
8268 have implementation defined behavior and retaining the TREE_OVERFLOW
8269 flag here would confuse later passes such as VRP. */
8271 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8272 tree type, tree op0)
8274 tree res = fold_unary_loc (loc, code, type, op0);
8276 && TREE_CODE (res) == INTEGER_CST
8277 && TREE_CODE (op0) == INTEGER_CST
8278 && CONVERT_EXPR_CODE_P (code))
8279 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8284 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8285 operands OP0 and OP1. LOC is the location of the resulting expression.
8286 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8287 Return the folded expression if folding is successful. Otherwise,
8288 return NULL_TREE. */
8290 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8291 tree arg0, tree arg1, tree op0, tree op1)
8295 /* We only do these simplifications if we are optimizing. */
8299 /* Check for things like (A || B) && (A || C). We can convert this
8300 to A || (B && C). Note that either operator can be any of the four
8301 truth and/or operations and the transformation will still be
8302 valid. Also note that we only care about order for the
8303 ANDIF and ORIF operators. If B contains side effects, this
8304 might change the truth-value of A. */
8305 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8306 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8307 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8308 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8309 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8310 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8312 tree a00 = TREE_OPERAND (arg0, 0);
8313 tree a01 = TREE_OPERAND (arg0, 1);
8314 tree a10 = TREE_OPERAND (arg1, 0);
8315 tree a11 = TREE_OPERAND (arg1, 1);
8316 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8317 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8318 && (code == TRUTH_AND_EXPR
8319 || code == TRUTH_OR_EXPR));
8321 if (operand_equal_p (a00, a10, 0))
8322 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8323 fold_build2_loc (loc, code, type, a01, a11));
8324 else if (commutative && operand_equal_p (a00, a11, 0))
8325 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8326 fold_build2_loc (loc, code, type, a01, a10));
8327 else if (commutative && operand_equal_p (a01, a10, 0))
8328 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8329 fold_build2_loc (loc, code, type, a00, a11));
8331 /* This case if tricky because we must either have commutative
8332 operators or else A10 must not have side-effects. */
8334 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8335 && operand_equal_p (a01, a11, 0))
8336 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8337 fold_build2_loc (loc, code, type, a00, a10),
8341 /* See if we can build a range comparison. */
8342 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8345 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8346 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8348 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8350 return fold_build2_loc (loc, code, type, tem, arg1);
8353 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8354 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8356 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8358 return fold_build2_loc (loc, code, type, arg0, tem);
8361 /* Check for the possibility of merging component references. If our
8362 lhs is another similar operation, try to merge its rhs with our
8363 rhs. Then try to merge our lhs and rhs. */
8364 if (TREE_CODE (arg0) == code
8365 && 0 != (tem = fold_truthop (loc, code, type,
8366 TREE_OPERAND (arg0, 1), arg1)))
8367 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8369 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
8375 /* Fold a binary expression of code CODE and type TYPE with operands
8376 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8377 Return the folded expression if folding is successful. Otherwise,
8378 return NULL_TREE. */
8381 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8383 enum tree_code compl_code;
8385 if (code == MIN_EXPR)
8386 compl_code = MAX_EXPR;
8387 else if (code == MAX_EXPR)
8388 compl_code = MIN_EXPR;
8392 /* MIN (MAX (a, b), b) == b. */
8393 if (TREE_CODE (op0) == compl_code
8394 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8395 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8397 /* MIN (MAX (b, a), b) == b. */
8398 if (TREE_CODE (op0) == compl_code
8399 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8400 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8401 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8403 /* MIN (a, MAX (a, b)) == a. */
8404 if (TREE_CODE (op1) == compl_code
8405 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8406 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8407 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8409 /* MIN (a, MAX (b, a)) == a. */
8410 if (TREE_CODE (op1) == compl_code
8411 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8412 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8413 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8418 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8419 by changing CODE to reduce the magnitude of constants involved in
8420 ARG0 of the comparison.
8421 Returns a canonicalized comparison tree if a simplification was
8422 possible, otherwise returns NULL_TREE.
8423 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8424 valid if signed overflow is undefined. */
8427 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8428 tree arg0, tree arg1,
8429 bool *strict_overflow_p)
8431 enum tree_code code0 = TREE_CODE (arg0);
8432 tree t, cst0 = NULL_TREE;
8436 /* Match A +- CST code arg1 and CST code arg1. We can change the
8437 first form only if overflow is undefined. */
8438 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8439 /* In principle pointers also have undefined overflow behavior,
8440 but that causes problems elsewhere. */
8441 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8442 && (code0 == MINUS_EXPR
8443 || code0 == PLUS_EXPR)
8444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8445 || code0 == INTEGER_CST))
8448 /* Identify the constant in arg0 and its sign. */
8449 if (code0 == INTEGER_CST)
8452 cst0 = TREE_OPERAND (arg0, 1);
8453 sgn0 = tree_int_cst_sgn (cst0);
8455 /* Overflowed constants and zero will cause problems. */
8456 if (integer_zerop (cst0)
8457 || TREE_OVERFLOW (cst0))
8460 /* See if we can reduce the magnitude of the constant in
8461 arg0 by changing the comparison code. */
8462 if (code0 == INTEGER_CST)
8464 /* CST <= arg1 -> CST-1 < arg1. */
8465 if (code == LE_EXPR && sgn0 == 1)
8467 /* -CST < arg1 -> -CST-1 <= arg1. */
8468 else if (code == LT_EXPR && sgn0 == -1)
8470 /* CST > arg1 -> CST-1 >= arg1. */
8471 else if (code == GT_EXPR && sgn0 == 1)
8473 /* -CST >= arg1 -> -CST-1 > arg1. */
8474 else if (code == GE_EXPR && sgn0 == -1)
8478 /* arg1 code' CST' might be more canonical. */
8483 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8485 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8487 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8488 else if (code == GT_EXPR
8489 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8491 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8492 else if (code == LE_EXPR
8493 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8495 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8496 else if (code == GE_EXPR
8497 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8501 *strict_overflow_p = true;
8504 /* Now build the constant reduced in magnitude. But not if that
8505 would produce one outside of its types range. */
8506 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8508 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8509 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8511 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8512 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8513 /* We cannot swap the comparison here as that would cause us to
8514 endlessly recurse. */
8517 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8518 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8519 if (code0 != INTEGER_CST)
8520 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8521 t = fold_convert (TREE_TYPE (arg1), t);
8523 /* If swapping might yield to a more canonical form, do so. */
8525 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8527 return fold_build2_loc (loc, code, type, t, arg1);
8530 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8531 overflow further. Try to decrease the magnitude of constants involved
8532 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8533 and put sole constants at the second argument position.
8534 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8537 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8538 tree arg0, tree arg1)
8541 bool strict_overflow_p;
8542 const char * const warnmsg = G_("assuming signed overflow does not occur "
8543 "when reducing constant in comparison");
8545 /* Try canonicalization by simplifying arg0. */
8546 strict_overflow_p = false;
8547 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8548 &strict_overflow_p);
8551 if (strict_overflow_p)
8552 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8556 /* Try canonicalization by simplifying arg1 using the swapped
8558 code = swap_tree_comparison (code);
8559 strict_overflow_p = false;
8560 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8561 &strict_overflow_p);
8562 if (t && strict_overflow_p)
8563 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8567 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8568 space. This is used to avoid issuing overflow warnings for
8569 expressions like &p->x which can not wrap. */
8572 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8574 unsigned HOST_WIDE_INT offset_low, total_low;
8575 HOST_WIDE_INT size, offset_high, total_high;
8577 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8583 if (offset == NULL_TREE)
8588 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8592 offset_low = TREE_INT_CST_LOW (offset);
8593 offset_high = TREE_INT_CST_HIGH (offset);
8596 if (add_double_with_sign (offset_low, offset_high,
8597 bitpos / BITS_PER_UNIT, 0,
8598 &total_low, &total_high,
8602 if (total_high != 0)
8605 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8609 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8611 if (TREE_CODE (base) == ADDR_EXPR)
8613 HOST_WIDE_INT base_size;
8615 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8616 if (base_size > 0 && size < base_size)
8620 return total_low > (unsigned HOST_WIDE_INT) size;
8623 /* Subroutine of fold_binary. This routine performs all of the
8624 transformations that are common to the equality/inequality
8625 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8626 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8627 fold_binary should call fold_binary. Fold a comparison with
8628 tree code CODE and type TYPE with operands OP0 and OP1. Return
8629 the folded comparison or NULL_TREE. */
8632 fold_comparison (location_t loc, enum tree_code code, tree type,
8635 tree arg0, arg1, tem;
8640 STRIP_SIGN_NOPS (arg0);
8641 STRIP_SIGN_NOPS (arg1);
8643 tem = fold_relational_const (code, type, arg0, arg1);
8644 if (tem != NULL_TREE)
8647 /* If one arg is a real or integer constant, put it last. */
8648 if (tree_swap_operands_p (arg0, arg1, true))
8649 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8651 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8652 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8653 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8654 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8655 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8656 && (TREE_CODE (arg1) == INTEGER_CST
8657 && !TREE_OVERFLOW (arg1)))
8659 tree const1 = TREE_OPERAND (arg0, 1);
8661 tree variable = TREE_OPERAND (arg0, 0);
8664 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8666 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8667 TREE_TYPE (arg1), const2, const1);
8669 /* If the constant operation overflowed this can be
8670 simplified as a comparison against INT_MAX/INT_MIN. */
8671 if (TREE_CODE (lhs) == INTEGER_CST
8672 && TREE_OVERFLOW (lhs))
8674 int const1_sgn = tree_int_cst_sgn (const1);
8675 enum tree_code code2 = code;
8677 /* Get the sign of the constant on the lhs if the
8678 operation were VARIABLE + CONST1. */
8679 if (TREE_CODE (arg0) == MINUS_EXPR)
8680 const1_sgn = -const1_sgn;
8682 /* The sign of the constant determines if we overflowed
8683 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8684 Canonicalize to the INT_MIN overflow by swapping the comparison
8686 if (const1_sgn == -1)
8687 code2 = swap_tree_comparison (code);
8689 /* We now can look at the canonicalized case
8690 VARIABLE + 1 CODE2 INT_MIN
8691 and decide on the result. */
8692 if (code2 == LT_EXPR
8694 || code2 == EQ_EXPR)
8695 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8696 else if (code2 == NE_EXPR
8698 || code2 == GT_EXPR)
8699 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8702 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8703 && (TREE_CODE (lhs) != INTEGER_CST
8704 || !TREE_OVERFLOW (lhs)))
8706 if (code != EQ_EXPR && code != NE_EXPR)
8707 fold_overflow_warning ("assuming signed overflow does not occur "
8708 "when changing X +- C1 cmp C2 to "
8710 WARN_STRICT_OVERFLOW_COMPARISON);
8711 return fold_build2_loc (loc, code, type, variable, lhs);
8715 /* For comparisons of pointers we can decompose it to a compile time
8716 comparison of the base objects and the offsets into the object.
8717 This requires at least one operand being an ADDR_EXPR or a
8718 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8719 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8720 && (TREE_CODE (arg0) == ADDR_EXPR
8721 || TREE_CODE (arg1) == ADDR_EXPR
8722 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8723 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8725 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8726 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8727 enum machine_mode mode;
8728 int volatilep, unsignedp;
8729 bool indirect_base0 = false, indirect_base1 = false;
8731 /* Get base and offset for the access. Strip ADDR_EXPR for
8732 get_inner_reference, but put it back by stripping INDIRECT_REF
8733 off the base object if possible. indirect_baseN will be true
8734 if baseN is not an address but refers to the object itself. */
8736 if (TREE_CODE (arg0) == ADDR_EXPR)
8738 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8739 &bitsize, &bitpos0, &offset0, &mode,
8740 &unsignedp, &volatilep, false);
8741 if (TREE_CODE (base0) == INDIRECT_REF)
8742 base0 = TREE_OPERAND (base0, 0);
8744 indirect_base0 = true;
8746 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8748 base0 = TREE_OPERAND (arg0, 0);
8749 STRIP_SIGN_NOPS (base0);
8750 if (TREE_CODE (base0) == ADDR_EXPR)
8752 base0 = TREE_OPERAND (base0, 0);
8753 indirect_base0 = true;
8755 offset0 = TREE_OPERAND (arg0, 1);
8759 if (TREE_CODE (arg1) == ADDR_EXPR)
8761 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8762 &bitsize, &bitpos1, &offset1, &mode,
8763 &unsignedp, &volatilep, false);
8764 if (TREE_CODE (base1) == INDIRECT_REF)
8765 base1 = TREE_OPERAND (base1, 0);
8767 indirect_base1 = true;
8769 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8771 base1 = TREE_OPERAND (arg1, 0);
8772 STRIP_SIGN_NOPS (base1);
8773 if (TREE_CODE (base1) == ADDR_EXPR)
8775 base1 = TREE_OPERAND (base1, 0);
8776 indirect_base1 = true;
8778 offset1 = TREE_OPERAND (arg1, 1);
8781 /* A local variable can never be pointed to by
8782 the default SSA name of an incoming parameter. */
8783 if ((TREE_CODE (arg0) == ADDR_EXPR
8785 && TREE_CODE (base0) == VAR_DECL
8786 && auto_var_in_fn_p (base0, current_function_decl)
8788 && TREE_CODE (base1) == SSA_NAME
8789 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8790 && SSA_NAME_IS_DEFAULT_DEF (base1))
8791 || (TREE_CODE (arg1) == ADDR_EXPR
8793 && TREE_CODE (base1) == VAR_DECL
8794 && auto_var_in_fn_p (base1, current_function_decl)
8796 && TREE_CODE (base0) == SSA_NAME
8797 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8798 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8800 if (code == NE_EXPR)
8801 return constant_boolean_node (1, type);
8802 else if (code == EQ_EXPR)
8803 return constant_boolean_node (0, type);
8805 /* If we have equivalent bases we might be able to simplify. */
8806 else if (indirect_base0 == indirect_base1
8807 && operand_equal_p (base0, base1, 0))
8809 /* We can fold this expression to a constant if the non-constant
8810 offset parts are equal. */
8811 if ((offset0 == offset1
8812 || (offset0 && offset1
8813 && operand_equal_p (offset0, offset1, 0)))
8816 || (indirect_base0 && DECL_P (base0))
8817 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8822 && bitpos0 != bitpos1
8823 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8824 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8825 fold_overflow_warning (("assuming pointer wraparound does not "
8826 "occur when comparing P +- C1 with "
8828 WARN_STRICT_OVERFLOW_CONDITIONAL);
8833 return constant_boolean_node (bitpos0 == bitpos1, type);
8835 return constant_boolean_node (bitpos0 != bitpos1, type);
8837 return constant_boolean_node (bitpos0 < bitpos1, type);
8839 return constant_boolean_node (bitpos0 <= bitpos1, type);
8841 return constant_boolean_node (bitpos0 >= bitpos1, type);
8843 return constant_boolean_node (bitpos0 > bitpos1, type);
8847 /* We can simplify the comparison to a comparison of the variable
8848 offset parts if the constant offset parts are equal.
8849 Be careful to use signed size type here because otherwise we
8850 mess with array offsets in the wrong way. This is possible
8851 because pointer arithmetic is restricted to retain within an
8852 object and overflow on pointer differences is undefined as of
8853 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8854 else if (bitpos0 == bitpos1
8855 && ((code == EQ_EXPR || code == NE_EXPR)
8856 || (indirect_base0 && DECL_P (base0))
8857 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8859 /* By converting to signed size type we cover middle-end pointer
8860 arithmetic which operates on unsigned pointer types of size
8861 type size and ARRAY_REF offsets which are properly sign or
8862 zero extended from their type in case it is narrower than
8864 if (offset0 == NULL_TREE)
8865 offset0 = build_int_cst (ssizetype, 0);
8867 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8868 if (offset1 == NULL_TREE)
8869 offset1 = build_int_cst (ssizetype, 0);
8871 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8875 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8876 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8877 fold_overflow_warning (("assuming pointer wraparound does not "
8878 "occur when comparing P +- C1 with "
8880 WARN_STRICT_OVERFLOW_COMPARISON);
8882 return fold_build2_loc (loc, code, type, offset0, offset1);
8885 /* For non-equal bases we can simplify if they are addresses
8886 of local binding decls or constants. */
8887 else if (indirect_base0 && indirect_base1
8888 /* We know that !operand_equal_p (base0, base1, 0)
8889 because the if condition was false. But make
8890 sure two decls are not the same. */
8892 && TREE_CODE (arg0) == ADDR_EXPR
8893 && TREE_CODE (arg1) == ADDR_EXPR
8894 && (((TREE_CODE (base0) == VAR_DECL
8895 || TREE_CODE (base0) == PARM_DECL)
8896 && (targetm.binds_local_p (base0)
8897 || CONSTANT_CLASS_P (base1)))
8898 || CONSTANT_CLASS_P (base0))
8899 && (((TREE_CODE (base1) == VAR_DECL
8900 || TREE_CODE (base1) == PARM_DECL)
8901 && (targetm.binds_local_p (base1)
8902 || CONSTANT_CLASS_P (base0)))
8903 || CONSTANT_CLASS_P (base1)))
8905 if (code == EQ_EXPR)
8906 return omit_two_operands_loc (loc, type, boolean_false_node,
8908 else if (code == NE_EXPR)
8909 return omit_two_operands_loc (loc, type, boolean_true_node,
8912 /* For equal offsets we can simplify to a comparison of the
8914 else if (bitpos0 == bitpos1
8916 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8918 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8919 && ((offset0 == offset1)
8920 || (offset0 && offset1
8921 && operand_equal_p (offset0, offset1, 0))))
8924 base0 = build_fold_addr_expr_loc (loc, base0);
8926 base1 = build_fold_addr_expr_loc (loc, base1);
8927 return fold_build2_loc (loc, code, type, base0, base1);
8931 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8932 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8933 the resulting offset is smaller in absolute value than the
8935 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8936 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8937 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8938 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8939 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8940 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8941 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8943 tree const1 = TREE_OPERAND (arg0, 1);
8944 tree const2 = TREE_OPERAND (arg1, 1);
8945 tree variable1 = TREE_OPERAND (arg0, 0);
8946 tree variable2 = TREE_OPERAND (arg1, 0);
8948 const char * const warnmsg = G_("assuming signed overflow does not "
8949 "occur when combining constants around "
8952 /* Put the constant on the side where it doesn't overflow and is
8953 of lower absolute value than before. */
8954 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8955 ? MINUS_EXPR : PLUS_EXPR,
8957 if (!TREE_OVERFLOW (cst)
8958 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8960 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8961 return fold_build2_loc (loc, code, type,
8963 fold_build2_loc (loc,
8964 TREE_CODE (arg1), TREE_TYPE (arg1),
8968 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8969 ? MINUS_EXPR : PLUS_EXPR,
8971 if (!TREE_OVERFLOW (cst)
8972 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8974 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8975 return fold_build2_loc (loc, code, type,
8976 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8982 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8983 signed arithmetic case. That form is created by the compiler
8984 often enough for folding it to be of value. One example is in
8985 computing loop trip counts after Operator Strength Reduction. */
8986 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8987 && TREE_CODE (arg0) == MULT_EXPR
8988 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8989 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8990 && integer_zerop (arg1))
8992 tree const1 = TREE_OPERAND (arg0, 1);
8993 tree const2 = arg1; /* zero */
8994 tree variable1 = TREE_OPERAND (arg0, 0);
8995 enum tree_code cmp_code = code;
8997 /* Handle unfolded multiplication by zero. */
8998 if (integer_zerop (const1))
8999 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9001 fold_overflow_warning (("assuming signed overflow does not occur when "
9002 "eliminating multiplication in comparison "
9004 WARN_STRICT_OVERFLOW_COMPARISON);
9006 /* If const1 is negative we swap the sense of the comparison. */
9007 if (tree_int_cst_sgn (const1) < 0)
9008 cmp_code = swap_tree_comparison (cmp_code);
9010 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9013 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9017 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9019 tree targ0 = strip_float_extensions (arg0);
9020 tree targ1 = strip_float_extensions (arg1);
9021 tree newtype = TREE_TYPE (targ0);
9023 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9024 newtype = TREE_TYPE (targ1);
9026 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9027 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9028 return fold_build2_loc (loc, code, type,
9029 fold_convert_loc (loc, newtype, targ0),
9030 fold_convert_loc (loc, newtype, targ1));
9032 /* (-a) CMP (-b) -> b CMP a */
9033 if (TREE_CODE (arg0) == NEGATE_EXPR
9034 && TREE_CODE (arg1) == NEGATE_EXPR)
9035 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9036 TREE_OPERAND (arg0, 0));
9038 if (TREE_CODE (arg1) == REAL_CST)
9040 REAL_VALUE_TYPE cst;
9041 cst = TREE_REAL_CST (arg1);
9043 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9044 if (TREE_CODE (arg0) == NEGATE_EXPR)
9045 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9046 TREE_OPERAND (arg0, 0),
9047 build_real (TREE_TYPE (arg1),
9048 real_value_negate (&cst)));
9050 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9051 /* a CMP (-0) -> a CMP 0 */
9052 if (REAL_VALUE_MINUS_ZERO (cst))
9053 return fold_build2_loc (loc, code, type, arg0,
9054 build_real (TREE_TYPE (arg1), dconst0));
9056 /* x != NaN is always true, other ops are always false. */
9057 if (REAL_VALUE_ISNAN (cst)
9058 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9060 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9061 return omit_one_operand_loc (loc, type, tem, arg0);
9064 /* Fold comparisons against infinity. */
9065 if (REAL_VALUE_ISINF (cst)
9066 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9068 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9069 if (tem != NULL_TREE)
9074 /* If this is a comparison of a real constant with a PLUS_EXPR
9075 or a MINUS_EXPR of a real constant, we can convert it into a
9076 comparison with a revised real constant as long as no overflow
9077 occurs when unsafe_math_optimizations are enabled. */
9078 if (flag_unsafe_math_optimizations
9079 && TREE_CODE (arg1) == REAL_CST
9080 && (TREE_CODE (arg0) == PLUS_EXPR
9081 || TREE_CODE (arg0) == MINUS_EXPR)
9082 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9083 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9084 ? MINUS_EXPR : PLUS_EXPR,
9085 arg1, TREE_OPERAND (arg0, 1)))
9086 && !TREE_OVERFLOW (tem))
9087 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9089 /* Likewise, we can simplify a comparison of a real constant with
9090 a MINUS_EXPR whose first operand is also a real constant, i.e.
9091 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9092 floating-point types only if -fassociative-math is set. */
9093 if (flag_associative_math
9094 && TREE_CODE (arg1) == REAL_CST
9095 && TREE_CODE (arg0) == MINUS_EXPR
9096 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9097 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9099 && !TREE_OVERFLOW (tem))
9100 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9101 TREE_OPERAND (arg0, 1), tem);
9103 /* Fold comparisons against built-in math functions. */
9104 if (TREE_CODE (arg1) == REAL_CST
9105 && flag_unsafe_math_optimizations
9106 && ! flag_errno_math)
9108 enum built_in_function fcode = builtin_mathfn_code (arg0);
9110 if (fcode != END_BUILTINS)
9112 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9113 if (tem != NULL_TREE)
9119 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9120 && CONVERT_EXPR_P (arg0))
9122 /* If we are widening one operand of an integer comparison,
9123 see if the other operand is similarly being widened. Perhaps we
9124 can do the comparison in the narrower type. */
9125 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9129 /* Or if we are changing signedness. */
9130 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9135 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9136 constant, we can simplify it. */
9137 if (TREE_CODE (arg1) == INTEGER_CST
9138 && (TREE_CODE (arg0) == MIN_EXPR
9139 || TREE_CODE (arg0) == MAX_EXPR)
9140 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9142 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9147 /* Simplify comparison of something with itself. (For IEEE
9148 floating-point, we can only do some of these simplifications.) */
9149 if (operand_equal_p (arg0, arg1, 0))
9154 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9155 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9156 return constant_boolean_node (1, type);
9161 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9162 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9163 return constant_boolean_node (1, type);
9164 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9167 /* For NE, we can only do this simplification if integer
9168 or we don't honor IEEE floating point NaNs. */
9169 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9170 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9172 /* ... fall through ... */
9175 return constant_boolean_node (0, type);
9181 /* If we are comparing an expression that just has comparisons
9182 of two integer values, arithmetic expressions of those comparisons,
9183 and constants, we can simplify it. There are only three cases
9184 to check: the two values can either be equal, the first can be
9185 greater, or the second can be greater. Fold the expression for
9186 those three values. Since each value must be 0 or 1, we have
9187 eight possibilities, each of which corresponds to the constant 0
9188 or 1 or one of the six possible comparisons.
9190 This handles common cases like (a > b) == 0 but also handles
9191 expressions like ((x > y) - (y > x)) > 0, which supposedly
9192 occur in macroized code. */
9194 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9196 tree cval1 = 0, cval2 = 0;
9199 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9200 /* Don't handle degenerate cases here; they should already
9201 have been handled anyway. */
9202 && cval1 != 0 && cval2 != 0
9203 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9204 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9205 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9206 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9207 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9208 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9209 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9211 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9212 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9214 /* We can't just pass T to eval_subst in case cval1 or cval2
9215 was the same as ARG1. */
9218 = fold_build2_loc (loc, code, type,
9219 eval_subst (loc, arg0, cval1, maxval,
9223 = fold_build2_loc (loc, code, type,
9224 eval_subst (loc, arg0, cval1, maxval,
9228 = fold_build2_loc (loc, code, type,
9229 eval_subst (loc, arg0, cval1, minval,
9233 /* All three of these results should be 0 or 1. Confirm they are.
9234 Then use those values to select the proper code to use. */
9236 if (TREE_CODE (high_result) == INTEGER_CST
9237 && TREE_CODE (equal_result) == INTEGER_CST
9238 && TREE_CODE (low_result) == INTEGER_CST)
9240 /* Make a 3-bit mask with the high-order bit being the
9241 value for `>', the next for '=', and the low for '<'. */
9242 switch ((integer_onep (high_result) * 4)
9243 + (integer_onep (equal_result) * 2)
9244 + integer_onep (low_result))
9248 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9269 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9274 tem = save_expr (build2 (code, type, cval1, cval2));
9275 SET_EXPR_LOCATION (tem, loc);
9278 return fold_build2_loc (loc, code, type, cval1, cval2);
9283 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9284 into a single range test. */
9285 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9286 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9287 && TREE_CODE (arg1) == INTEGER_CST
9288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9289 && !integer_zerop (TREE_OPERAND (arg0, 1))
9290 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9291 && !TREE_OVERFLOW (arg1))
9293 tem = fold_div_compare (loc, code, type, arg0, arg1);
9294 if (tem != NULL_TREE)
9298 /* Fold ~X op ~Y as Y op X. */
9299 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9300 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9302 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9303 return fold_build2_loc (loc, code, type,
9304 fold_convert_loc (loc, cmp_type,
9305 TREE_OPERAND (arg1, 0)),
9306 TREE_OPERAND (arg0, 0));
9309 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9310 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9311 && TREE_CODE (arg1) == INTEGER_CST)
9313 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9314 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9315 TREE_OPERAND (arg0, 0),
9316 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9317 fold_convert_loc (loc, cmp_type, arg1)));
9324 /* Subroutine of fold_binary. Optimize complex multiplications of the
9325 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9326 argument EXPR represents the expression "z" of type TYPE. */
9329 fold_mult_zconjz (location_t loc, tree type, tree expr)
9331 tree itype = TREE_TYPE (type);
9332 tree rpart, ipart, tem;
9334 if (TREE_CODE (expr) == COMPLEX_EXPR)
9336 rpart = TREE_OPERAND (expr, 0);
9337 ipart = TREE_OPERAND (expr, 1);
9339 else if (TREE_CODE (expr) == COMPLEX_CST)
9341 rpart = TREE_REALPART (expr);
9342 ipart = TREE_IMAGPART (expr);
9346 expr = save_expr (expr);
9347 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9348 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9351 rpart = save_expr (rpart);
9352 ipart = save_expr (ipart);
9353 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9354 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9355 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9356 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9357 build_zero_cst (itype));
9361 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9362 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9363 guarantees that P and N have the same least significant log2(M) bits.
9364 N is not otherwise constrained. In particular, N is not normalized to
9365 0 <= N < M as is common. In general, the precise value of P is unknown.
9366 M is chosen as large as possible such that constant N can be determined.
9368 Returns M and sets *RESIDUE to N.
9370 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9371 account. This is not always possible due to PR 35705.
9374 static unsigned HOST_WIDE_INT
9375 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9376 bool allow_func_align)
9378 enum tree_code code;
9382 code = TREE_CODE (expr);
9383 if (code == ADDR_EXPR)
9385 unsigned int bitalign;
9386 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9387 *residue /= BITS_PER_UNIT;
9388 return bitalign / BITS_PER_UNIT;
9390 else if (code == POINTER_PLUS_EXPR)
9393 unsigned HOST_WIDE_INT modulus;
9394 enum tree_code inner_code;
9396 op0 = TREE_OPERAND (expr, 0);
9398 modulus = get_pointer_modulus_and_residue (op0, residue,
9401 op1 = TREE_OPERAND (expr, 1);
9403 inner_code = TREE_CODE (op1);
9404 if (inner_code == INTEGER_CST)
9406 *residue += TREE_INT_CST_LOW (op1);
9409 else if (inner_code == MULT_EXPR)
9411 op1 = TREE_OPERAND (op1, 1);
9412 if (TREE_CODE (op1) == INTEGER_CST)
9414 unsigned HOST_WIDE_INT align;
9416 /* Compute the greatest power-of-2 divisor of op1. */
9417 align = TREE_INT_CST_LOW (op1);
9420 /* If align is non-zero and less than *modulus, replace
9421 *modulus with align., If align is 0, then either op1 is 0
9422 or the greatest power-of-2 divisor of op1 doesn't fit in an
9423 unsigned HOST_WIDE_INT. In either case, no additional
9424 constraint is imposed. */
9426 modulus = MIN (modulus, align);
9433 /* If we get here, we were unable to determine anything useful about the
9439 /* Fold a binary expression of code CODE and type TYPE with operands
9440 OP0 and OP1. LOC is the location of the resulting expression.
9441 Return the folded expression if folding is successful. Otherwise,
9442 return NULL_TREE. */
9445 fold_binary_loc (location_t loc,
9446 enum tree_code code, tree type, tree op0, tree op1)
9448 enum tree_code_class kind = TREE_CODE_CLASS (code);
9449 tree arg0, arg1, tem;
9450 tree t1 = NULL_TREE;
9451 bool strict_overflow_p;
9453 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9454 && TREE_CODE_LENGTH (code) == 2
9456 && op1 != NULL_TREE);
9461 /* Strip any conversions that don't change the mode. This is
9462 safe for every expression, except for a comparison expression
9463 because its signedness is derived from its operands. So, in
9464 the latter case, only strip conversions that don't change the
9465 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9468 Note that this is done as an internal manipulation within the
9469 constant folder, in order to find the simplest representation
9470 of the arguments so that their form can be studied. In any
9471 cases, the appropriate type conversions should be put back in
9472 the tree that will get out of the constant folder. */
9474 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9476 STRIP_SIGN_NOPS (arg0);
9477 STRIP_SIGN_NOPS (arg1);
9485 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9486 constant but we can't do arithmetic on them. */
9487 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9488 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9489 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9490 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9491 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9492 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9494 if (kind == tcc_binary)
9496 /* Make sure type and arg0 have the same saturating flag. */
9497 gcc_assert (TYPE_SATURATING (type)
9498 == TYPE_SATURATING (TREE_TYPE (arg0)));
9499 tem = const_binop (code, arg0, arg1);
9501 else if (kind == tcc_comparison)
9502 tem = fold_relational_const (code, type, arg0, arg1);
9506 if (tem != NULL_TREE)
9508 if (TREE_TYPE (tem) != type)
9509 tem = fold_convert_loc (loc, type, tem);
9514 /* If this is a commutative operation, and ARG0 is a constant, move it
9515 to ARG1 to reduce the number of tests below. */
9516 if (commutative_tree_code (code)
9517 && tree_swap_operands_p (arg0, arg1, true))
9518 return fold_build2_loc (loc, code, type, op1, op0);
9520 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9522 First check for cases where an arithmetic operation is applied to a
9523 compound, conditional, or comparison operation. Push the arithmetic
9524 operation inside the compound or conditional to see if any folding
9525 can then be done. Convert comparison to conditional for this purpose.
9526 The also optimizes non-constant cases that used to be done in
9529 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9530 one of the operands is a comparison and the other is a comparison, a
9531 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9532 code below would make the expression more complex. Change it to a
9533 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9534 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9536 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9537 || code == EQ_EXPR || code == NE_EXPR)
9538 && ((truth_value_p (TREE_CODE (arg0))
9539 && (truth_value_p (TREE_CODE (arg1))
9540 || (TREE_CODE (arg1) == BIT_AND_EXPR
9541 && integer_onep (TREE_OPERAND (arg1, 1)))))
9542 || (truth_value_p (TREE_CODE (arg1))
9543 && (truth_value_p (TREE_CODE (arg0))
9544 || (TREE_CODE (arg0) == BIT_AND_EXPR
9545 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9547 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9548 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9551 fold_convert_loc (loc, boolean_type_node, arg0),
9552 fold_convert_loc (loc, boolean_type_node, arg1));
9554 if (code == EQ_EXPR)
9555 tem = invert_truthvalue_loc (loc, tem);
9557 return fold_convert_loc (loc, type, tem);
9560 if (TREE_CODE_CLASS (code) == tcc_binary
9561 || TREE_CODE_CLASS (code) == tcc_comparison)
9563 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9565 tem = fold_build2_loc (loc, code, type,
9566 fold_convert_loc (loc, TREE_TYPE (op0),
9567 TREE_OPERAND (arg0, 1)), op1);
9568 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9571 if (TREE_CODE (arg1) == COMPOUND_EXPR
9572 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9574 tem = fold_build2_loc (loc, code, type, op0,
9575 fold_convert_loc (loc, TREE_TYPE (op1),
9576 TREE_OPERAND (arg1, 1)));
9577 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9581 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9583 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9585 /*cond_first_p=*/1);
9586 if (tem != NULL_TREE)
9590 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9592 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9594 /*cond_first_p=*/0);
9595 if (tem != NULL_TREE)
9603 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9604 if (TREE_CODE (arg0) == ADDR_EXPR
9605 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9607 tree iref = TREE_OPERAND (arg0, 0);
9608 return fold_build2 (MEM_REF, type,
9609 TREE_OPERAND (iref, 0),
9610 int_const_binop (PLUS_EXPR, arg1,
9611 TREE_OPERAND (iref, 1)));
9614 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9615 if (TREE_CODE (arg0) == ADDR_EXPR
9616 && handled_component_p (TREE_OPERAND (arg0, 0)))
9619 HOST_WIDE_INT coffset;
9620 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9624 return fold_build2 (MEM_REF, type,
9625 build_fold_addr_expr (base),
9626 int_const_binop (PLUS_EXPR, arg1,
9627 size_int (coffset)));
9632 case POINTER_PLUS_EXPR:
9633 /* 0 +p index -> (type)index */
9634 if (integer_zerop (arg0))
9635 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9637 /* PTR +p 0 -> PTR */
9638 if (integer_zerop (arg1))
9639 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9641 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9642 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9643 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9644 return fold_convert_loc (loc, type,
9645 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9646 fold_convert_loc (loc, sizetype,
9648 fold_convert_loc (loc, sizetype,
9651 /* (PTR +p B) +p A -> PTR +p (B + A) */
9652 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9655 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9656 tree arg00 = TREE_OPERAND (arg0, 0);
9657 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9658 arg01, fold_convert_loc (loc, sizetype, arg1));
9659 return fold_convert_loc (loc, type,
9660 fold_build_pointer_plus_loc (loc,
9664 /* PTR_CST +p CST -> CST1 */
9665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9666 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9667 fold_convert_loc (loc, type, arg1));
9669 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9670 of the array. Loop optimizer sometimes produce this type of
9672 if (TREE_CODE (arg0) == ADDR_EXPR)
9674 tem = try_move_mult_to_index (loc, arg0,
9675 fold_convert_loc (loc, sizetype, arg1));
9677 return fold_convert_loc (loc, type, tem);
9683 /* A + (-B) -> A - B */
9684 if (TREE_CODE (arg1) == NEGATE_EXPR)
9685 return fold_build2_loc (loc, MINUS_EXPR, type,
9686 fold_convert_loc (loc, type, arg0),
9687 fold_convert_loc (loc, type,
9688 TREE_OPERAND (arg1, 0)));
9689 /* (-A) + B -> B - A */
9690 if (TREE_CODE (arg0) == NEGATE_EXPR
9691 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9692 return fold_build2_loc (loc, MINUS_EXPR, type,
9693 fold_convert_loc (loc, type, arg1),
9694 fold_convert_loc (loc, type,
9695 TREE_OPERAND (arg0, 0)));
9697 if (INTEGRAL_TYPE_P (type))
9699 /* Convert ~A + 1 to -A. */
9700 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9701 && integer_onep (arg1))
9702 return fold_build1_loc (loc, NEGATE_EXPR, type,
9703 fold_convert_loc (loc, type,
9704 TREE_OPERAND (arg0, 0)));
9707 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9708 && !TYPE_OVERFLOW_TRAPS (type))
9710 tree tem = TREE_OPERAND (arg0, 0);
9713 if (operand_equal_p (tem, arg1, 0))
9715 t1 = build_int_cst_type (type, -1);
9716 return omit_one_operand_loc (loc, type, t1, arg1);
9721 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9722 && !TYPE_OVERFLOW_TRAPS (type))
9724 tree tem = TREE_OPERAND (arg1, 0);
9727 if (operand_equal_p (arg0, tem, 0))
9729 t1 = build_int_cst_type (type, -1);
9730 return omit_one_operand_loc (loc, type, t1, arg0);
9734 /* X + (X / CST) * -CST is X % CST. */
9735 if (TREE_CODE (arg1) == MULT_EXPR
9736 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9737 && operand_equal_p (arg0,
9738 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9740 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9741 tree cst1 = TREE_OPERAND (arg1, 1);
9742 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9744 if (sum && integer_zerop (sum))
9745 return fold_convert_loc (loc, type,
9746 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9747 TREE_TYPE (arg0), arg0,
9752 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9753 same or one. Make sure type is not saturating.
9754 fold_plusminus_mult_expr will re-associate. */
9755 if ((TREE_CODE (arg0) == MULT_EXPR
9756 || TREE_CODE (arg1) == MULT_EXPR)
9757 && !TYPE_SATURATING (type)
9758 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9760 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9765 if (! FLOAT_TYPE_P (type))
9767 if (integer_zerop (arg1))
9768 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9770 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9771 with a constant, and the two constants have no bits in common,
9772 we should treat this as a BIT_IOR_EXPR since this may produce more
9774 if (TREE_CODE (arg0) == BIT_AND_EXPR
9775 && TREE_CODE (arg1) == BIT_AND_EXPR
9776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9777 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9778 && integer_zerop (const_binop (BIT_AND_EXPR,
9779 TREE_OPERAND (arg0, 1),
9780 TREE_OPERAND (arg1, 1))))
9782 code = BIT_IOR_EXPR;
9786 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9787 (plus (plus (mult) (mult)) (foo)) so that we can
9788 take advantage of the factoring cases below. */
9789 if (TYPE_OVERFLOW_WRAPS (type)
9790 && (((TREE_CODE (arg0) == PLUS_EXPR
9791 || TREE_CODE (arg0) == MINUS_EXPR)
9792 && TREE_CODE (arg1) == MULT_EXPR)
9793 || ((TREE_CODE (arg1) == PLUS_EXPR
9794 || TREE_CODE (arg1) == MINUS_EXPR)
9795 && TREE_CODE (arg0) == MULT_EXPR)))
9797 tree parg0, parg1, parg, marg;
9798 enum tree_code pcode;
9800 if (TREE_CODE (arg1) == MULT_EXPR)
9801 parg = arg0, marg = arg1;
9803 parg = arg1, marg = arg0;
9804 pcode = TREE_CODE (parg);
9805 parg0 = TREE_OPERAND (parg, 0);
9806 parg1 = TREE_OPERAND (parg, 1);
9810 if (TREE_CODE (parg0) == MULT_EXPR
9811 && TREE_CODE (parg1) != MULT_EXPR)
9812 return fold_build2_loc (loc, pcode, type,
9813 fold_build2_loc (loc, PLUS_EXPR, type,
9814 fold_convert_loc (loc, type,
9816 fold_convert_loc (loc, type,
9818 fold_convert_loc (loc, type, parg1));
9819 if (TREE_CODE (parg0) != MULT_EXPR
9820 && TREE_CODE (parg1) == MULT_EXPR)
9822 fold_build2_loc (loc, PLUS_EXPR, type,
9823 fold_convert_loc (loc, type, parg0),
9824 fold_build2_loc (loc, pcode, type,
9825 fold_convert_loc (loc, type, marg),
9826 fold_convert_loc (loc, type,
9832 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9833 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9834 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9836 /* Likewise if the operands are reversed. */
9837 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9838 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9840 /* Convert X + -C into X - C. */
9841 if (TREE_CODE (arg1) == REAL_CST
9842 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9844 tem = fold_negate_const (arg1, type);
9845 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9846 return fold_build2_loc (loc, MINUS_EXPR, type,
9847 fold_convert_loc (loc, type, arg0),
9848 fold_convert_loc (loc, type, tem));
9851 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9852 to __complex__ ( x, y ). This is not the same for SNaNs or
9853 if signed zeros are involved. */
9854 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9855 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9856 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9858 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9859 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9860 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9861 bool arg0rz = false, arg0iz = false;
9862 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9863 || (arg0i && (arg0iz = real_zerop (arg0i))))
9865 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9866 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9867 if (arg0rz && arg1i && real_zerop (arg1i))
9869 tree rp = arg1r ? arg1r
9870 : build1 (REALPART_EXPR, rtype, arg1);
9871 tree ip = arg0i ? arg0i
9872 : build1 (IMAGPART_EXPR, rtype, arg0);
9873 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9875 else if (arg0iz && arg1r && real_zerop (arg1r))
9877 tree rp = arg0r ? arg0r
9878 : build1 (REALPART_EXPR, rtype, arg0);
9879 tree ip = arg1i ? arg1i
9880 : build1 (IMAGPART_EXPR, rtype, arg1);
9881 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9886 if (flag_unsafe_math_optimizations
9887 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9888 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9889 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9892 /* Convert x+x into x*2.0. */
9893 if (operand_equal_p (arg0, arg1, 0)
9894 && SCALAR_FLOAT_TYPE_P (type))
9895 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9896 build_real (type, dconst2));
9898 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9899 We associate floats only if the user has specified
9900 -fassociative-math. */
9901 if (flag_associative_math
9902 && TREE_CODE (arg1) == PLUS_EXPR
9903 && TREE_CODE (arg0) != MULT_EXPR)
9905 tree tree10 = TREE_OPERAND (arg1, 0);
9906 tree tree11 = TREE_OPERAND (arg1, 1);
9907 if (TREE_CODE (tree11) == MULT_EXPR
9908 && TREE_CODE (tree10) == MULT_EXPR)
9911 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9912 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9915 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9916 We associate floats only if the user has specified
9917 -fassociative-math. */
9918 if (flag_associative_math
9919 && TREE_CODE (arg0) == PLUS_EXPR
9920 && TREE_CODE (arg1) != MULT_EXPR)
9922 tree tree00 = TREE_OPERAND (arg0, 0);
9923 tree tree01 = TREE_OPERAND (arg0, 1);
9924 if (TREE_CODE (tree01) == MULT_EXPR
9925 && TREE_CODE (tree00) == MULT_EXPR)
9928 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9929 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9935 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9936 is a rotate of A by C1 bits. */
9937 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9938 is a rotate of A by B bits. */
9940 enum tree_code code0, code1;
9942 code0 = TREE_CODE (arg0);
9943 code1 = TREE_CODE (arg1);
9944 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9945 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9946 && operand_equal_p (TREE_OPERAND (arg0, 0),
9947 TREE_OPERAND (arg1, 0), 0)
9948 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9949 TYPE_UNSIGNED (rtype))
9950 /* Only create rotates in complete modes. Other cases are not
9951 expanded properly. */
9952 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9954 tree tree01, tree11;
9955 enum tree_code code01, code11;
9957 tree01 = TREE_OPERAND (arg0, 1);
9958 tree11 = TREE_OPERAND (arg1, 1);
9959 STRIP_NOPS (tree01);
9960 STRIP_NOPS (tree11);
9961 code01 = TREE_CODE (tree01);
9962 code11 = TREE_CODE (tree11);
9963 if (code01 == INTEGER_CST
9964 && code11 == INTEGER_CST
9965 && TREE_INT_CST_HIGH (tree01) == 0
9966 && TREE_INT_CST_HIGH (tree11) == 0
9967 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9968 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9970 tem = build2_loc (loc, LROTATE_EXPR,
9971 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9972 TREE_OPERAND (arg0, 0),
9973 code0 == LSHIFT_EXPR ? tree01 : tree11);
9974 return fold_convert_loc (loc, type, tem);
9976 else if (code11 == MINUS_EXPR)
9978 tree tree110, tree111;
9979 tree110 = TREE_OPERAND (tree11, 0);
9980 tree111 = TREE_OPERAND (tree11, 1);
9981 STRIP_NOPS (tree110);
9982 STRIP_NOPS (tree111);
9983 if (TREE_CODE (tree110) == INTEGER_CST
9984 && 0 == compare_tree_int (tree110,
9986 (TREE_TYPE (TREE_OPERAND
9988 && operand_equal_p (tree01, tree111, 0))
9990 fold_convert_loc (loc, type,
9991 build2 ((code0 == LSHIFT_EXPR
9994 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9995 TREE_OPERAND (arg0, 0), tree01));
9997 else if (code01 == MINUS_EXPR)
9999 tree tree010, tree011;
10000 tree010 = TREE_OPERAND (tree01, 0);
10001 tree011 = TREE_OPERAND (tree01, 1);
10002 STRIP_NOPS (tree010);
10003 STRIP_NOPS (tree011);
10004 if (TREE_CODE (tree010) == INTEGER_CST
10005 && 0 == compare_tree_int (tree010,
10007 (TREE_TYPE (TREE_OPERAND
10009 && operand_equal_p (tree11, tree011, 0))
10010 return fold_convert_loc
10012 build2 ((code0 != LSHIFT_EXPR
10015 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10016 TREE_OPERAND (arg0, 0), tree11));
10022 /* In most languages, can't associate operations on floats through
10023 parentheses. Rather than remember where the parentheses were, we
10024 don't associate floats at all, unless the user has specified
10025 -fassociative-math.
10026 And, we need to make sure type is not saturating. */
10028 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10029 && !TYPE_SATURATING (type))
10031 tree var0, con0, lit0, minus_lit0;
10032 tree var1, con1, lit1, minus_lit1;
10035 /* Split both trees into variables, constants, and literals. Then
10036 associate each group together, the constants with literals,
10037 then the result with variables. This increases the chances of
10038 literals being recombined later and of generating relocatable
10039 expressions for the sum of a constant and literal. */
10040 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10041 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10042 code == MINUS_EXPR);
10044 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10045 if (code == MINUS_EXPR)
10048 /* With undefined overflow we can only associate constants with one
10049 variable, and constants whose association doesn't overflow. */
10050 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10051 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10058 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10059 tmp0 = TREE_OPERAND (tmp0, 0);
10060 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10061 tmp1 = TREE_OPERAND (tmp1, 0);
10062 /* The only case we can still associate with two variables
10063 is if they are the same, modulo negation. */
10064 if (!operand_equal_p (tmp0, tmp1, 0))
10068 if (ok && lit0 && lit1)
10070 tree tmp0 = fold_convert (type, lit0);
10071 tree tmp1 = fold_convert (type, lit1);
10073 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10074 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10079 /* Only do something if we found more than two objects. Otherwise,
10080 nothing has changed and we risk infinite recursion. */
10082 && (2 < ((var0 != 0) + (var1 != 0)
10083 + (con0 != 0) + (con1 != 0)
10084 + (lit0 != 0) + (lit1 != 0)
10085 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10087 var0 = associate_trees (loc, var0, var1, code, type);
10088 con0 = associate_trees (loc, con0, con1, code, type);
10089 lit0 = associate_trees (loc, lit0, lit1, code, type);
10090 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10092 /* Preserve the MINUS_EXPR if the negative part of the literal is
10093 greater than the positive part. Otherwise, the multiplicative
10094 folding code (i.e extract_muldiv) may be fooled in case
10095 unsigned constants are subtracted, like in the following
10096 example: ((X*2 + 4) - 8U)/2. */
10097 if (minus_lit0 && lit0)
10099 if (TREE_CODE (lit0) == INTEGER_CST
10100 && TREE_CODE (minus_lit0) == INTEGER_CST
10101 && tree_int_cst_lt (lit0, minus_lit0))
10103 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10109 lit0 = associate_trees (loc, lit0, minus_lit0,
10118 fold_convert_loc (loc, type,
10119 associate_trees (loc, var0, minus_lit0,
10120 MINUS_EXPR, type));
10123 con0 = associate_trees (loc, con0, minus_lit0,
10126 fold_convert_loc (loc, type,
10127 associate_trees (loc, var0, con0,
10132 con0 = associate_trees (loc, con0, lit0, code, type);
10134 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10142 /* Pointer simplifications for subtraction, simple reassociations. */
10143 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10145 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10146 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10147 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10149 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10150 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10151 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10152 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10153 return fold_build2_loc (loc, PLUS_EXPR, type,
10154 fold_build2_loc (loc, MINUS_EXPR, type,
10156 fold_build2_loc (loc, MINUS_EXPR, type,
10159 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10160 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10162 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10163 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10164 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10165 fold_convert_loc (loc, type, arg1));
10167 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10170 /* A - (-B) -> A + B */
10171 if (TREE_CODE (arg1) == NEGATE_EXPR)
10172 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10173 fold_convert_loc (loc, type,
10174 TREE_OPERAND (arg1, 0)));
10175 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10176 if (TREE_CODE (arg0) == NEGATE_EXPR
10177 && (FLOAT_TYPE_P (type)
10178 || INTEGRAL_TYPE_P (type))
10179 && negate_expr_p (arg1)
10180 && reorder_operands_p (arg0, arg1))
10181 return fold_build2_loc (loc, MINUS_EXPR, type,
10182 fold_convert_loc (loc, type,
10183 negate_expr (arg1)),
10184 fold_convert_loc (loc, type,
10185 TREE_OPERAND (arg0, 0)));
10186 /* Convert -A - 1 to ~A. */
10187 if (INTEGRAL_TYPE_P (type)
10188 && TREE_CODE (arg0) == NEGATE_EXPR
10189 && integer_onep (arg1)
10190 && !TYPE_OVERFLOW_TRAPS (type))
10191 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10192 fold_convert_loc (loc, type,
10193 TREE_OPERAND (arg0, 0)));
10195 /* Convert -1 - A to ~A. */
10196 if (INTEGRAL_TYPE_P (type)
10197 && integer_all_onesp (arg0))
10198 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10201 /* X - (X / CST) * CST is X % CST. */
10202 if (INTEGRAL_TYPE_P (type)
10203 && TREE_CODE (arg1) == MULT_EXPR
10204 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10205 && operand_equal_p (arg0,
10206 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10207 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10208 TREE_OPERAND (arg1, 1), 0))
10210 fold_convert_loc (loc, type,
10211 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10212 arg0, TREE_OPERAND (arg1, 1)));
10214 if (! FLOAT_TYPE_P (type))
10216 if (integer_zerop (arg0))
10217 return negate_expr (fold_convert_loc (loc, type, arg1));
10218 if (integer_zerop (arg1))
10219 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10221 /* Fold A - (A & B) into ~B & A. */
10222 if (!TREE_SIDE_EFFECTS (arg0)
10223 && TREE_CODE (arg1) == BIT_AND_EXPR)
10225 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10227 tree arg10 = fold_convert_loc (loc, type,
10228 TREE_OPERAND (arg1, 0));
10229 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10230 fold_build1_loc (loc, BIT_NOT_EXPR,
10232 fold_convert_loc (loc, type, arg0));
10234 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10236 tree arg11 = fold_convert_loc (loc,
10237 type, TREE_OPERAND (arg1, 1));
10238 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10239 fold_build1_loc (loc, BIT_NOT_EXPR,
10241 fold_convert_loc (loc, type, arg0));
10245 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10246 any power of 2 minus 1. */
10247 if (TREE_CODE (arg0) == BIT_AND_EXPR
10248 && TREE_CODE (arg1) == BIT_AND_EXPR
10249 && operand_equal_p (TREE_OPERAND (arg0, 0),
10250 TREE_OPERAND (arg1, 0), 0))
10252 tree mask0 = TREE_OPERAND (arg0, 1);
10253 tree mask1 = TREE_OPERAND (arg1, 1);
10254 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10256 if (operand_equal_p (tem, mask1, 0))
10258 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10259 TREE_OPERAND (arg0, 0), mask1);
10260 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10265 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10266 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10267 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10269 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10270 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10271 (-ARG1 + ARG0) reduces to -ARG1. */
10272 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10273 return negate_expr (fold_convert_loc (loc, type, arg1));
10275 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10276 __complex__ ( x, -y ). This is not the same for SNaNs or if
10277 signed zeros are involved. */
10278 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10279 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10280 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10282 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10283 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10284 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10285 bool arg0rz = false, arg0iz = false;
10286 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10287 || (arg0i && (arg0iz = real_zerop (arg0i))))
10289 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10290 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10291 if (arg0rz && arg1i && real_zerop (arg1i))
10293 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10295 : build1 (REALPART_EXPR, rtype, arg1));
10296 tree ip = arg0i ? arg0i
10297 : build1 (IMAGPART_EXPR, rtype, arg0);
10298 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10300 else if (arg0iz && arg1r && real_zerop (arg1r))
10302 tree rp = arg0r ? arg0r
10303 : build1 (REALPART_EXPR, rtype, arg0);
10304 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10306 : build1 (IMAGPART_EXPR, rtype, arg1));
10307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10312 /* Fold &x - &x. This can happen from &x.foo - &x.
10313 This is unsafe for certain floats even in non-IEEE formats.
10314 In IEEE, it is unsafe because it does wrong for NaNs.
10315 Also note that operand_equal_p is always false if an operand
10318 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10319 && operand_equal_p (arg0, arg1, 0))
10320 return build_zero_cst (type);
10322 /* A - B -> A + (-B) if B is easily negatable. */
10323 if (negate_expr_p (arg1)
10324 && ((FLOAT_TYPE_P (type)
10325 /* Avoid this transformation if B is a positive REAL_CST. */
10326 && (TREE_CODE (arg1) != REAL_CST
10327 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10328 || INTEGRAL_TYPE_P (type)))
10329 return fold_build2_loc (loc, PLUS_EXPR, type,
10330 fold_convert_loc (loc, type, arg0),
10331 fold_convert_loc (loc, type,
10332 negate_expr (arg1)));
10334 /* Try folding difference of addresses. */
10336 HOST_WIDE_INT diff;
10338 if ((TREE_CODE (arg0) == ADDR_EXPR
10339 || TREE_CODE (arg1) == ADDR_EXPR)
10340 && ptr_difference_const (arg0, arg1, &diff))
10341 return build_int_cst_type (type, diff);
10344 /* Fold &a[i] - &a[j] to i-j. */
10345 if (TREE_CODE (arg0) == ADDR_EXPR
10346 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10347 && TREE_CODE (arg1) == ADDR_EXPR
10348 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10350 tree aref0 = TREE_OPERAND (arg0, 0);
10351 tree aref1 = TREE_OPERAND (arg1, 0);
10352 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10353 TREE_OPERAND (aref1, 0), 0))
10355 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10356 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10357 tree esz = array_ref_element_size (aref0);
10358 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10359 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10360 fold_convert_loc (loc, type, esz));
10365 if (FLOAT_TYPE_P (type)
10366 && flag_unsafe_math_optimizations
10367 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10368 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10369 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10372 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10373 same or one. Make sure type is not saturating.
10374 fold_plusminus_mult_expr will re-associate. */
10375 if ((TREE_CODE (arg0) == MULT_EXPR
10376 || TREE_CODE (arg1) == MULT_EXPR)
10377 && !TYPE_SATURATING (type)
10378 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10380 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10388 /* (-A) * (-B) -> A * B */
10389 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10390 return fold_build2_loc (loc, MULT_EXPR, type,
10391 fold_convert_loc (loc, type,
10392 TREE_OPERAND (arg0, 0)),
10393 fold_convert_loc (loc, type,
10394 negate_expr (arg1)));
10395 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10396 return fold_build2_loc (loc, MULT_EXPR, type,
10397 fold_convert_loc (loc, type,
10398 negate_expr (arg0)),
10399 fold_convert_loc (loc, type,
10400 TREE_OPERAND (arg1, 0)));
10402 if (! FLOAT_TYPE_P (type))
10404 if (integer_zerop (arg1))
10405 return omit_one_operand_loc (loc, type, arg1, arg0);
10406 if (integer_onep (arg1))
10407 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10408 /* Transform x * -1 into -x. Make sure to do the negation
10409 on the original operand with conversions not stripped
10410 because we can only strip non-sign-changing conversions. */
10411 if (integer_all_onesp (arg1))
10412 return fold_convert_loc (loc, type, negate_expr (op0));
10413 /* Transform x * -C into -x * C if x is easily negatable. */
10414 if (TREE_CODE (arg1) == INTEGER_CST
10415 && tree_int_cst_sgn (arg1) == -1
10416 && negate_expr_p (arg0)
10417 && (tem = negate_expr (arg1)) != arg1
10418 && !TREE_OVERFLOW (tem))
10419 return fold_build2_loc (loc, MULT_EXPR, type,
10420 fold_convert_loc (loc, type,
10421 negate_expr (arg0)),
10424 /* (a * (1 << b)) is (a << b) */
10425 if (TREE_CODE (arg1) == LSHIFT_EXPR
10426 && integer_onep (TREE_OPERAND (arg1, 0)))
10427 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10428 TREE_OPERAND (arg1, 1));
10429 if (TREE_CODE (arg0) == LSHIFT_EXPR
10430 && integer_onep (TREE_OPERAND (arg0, 0)))
10431 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10432 TREE_OPERAND (arg0, 1));
10434 /* (A + A) * C -> A * 2 * C */
10435 if (TREE_CODE (arg0) == PLUS_EXPR
10436 && TREE_CODE (arg1) == INTEGER_CST
10437 && operand_equal_p (TREE_OPERAND (arg0, 0),
10438 TREE_OPERAND (arg0, 1), 0))
10439 return fold_build2_loc (loc, MULT_EXPR, type,
10440 omit_one_operand_loc (loc, type,
10441 TREE_OPERAND (arg0, 0),
10442 TREE_OPERAND (arg0, 1)),
10443 fold_build2_loc (loc, MULT_EXPR, type,
10444 build_int_cst (type, 2) , arg1));
10446 strict_overflow_p = false;
10447 if (TREE_CODE (arg1) == INTEGER_CST
10448 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10449 &strict_overflow_p)))
10451 if (strict_overflow_p)
10452 fold_overflow_warning (("assuming signed overflow does not "
10453 "occur when simplifying "
10455 WARN_STRICT_OVERFLOW_MISC);
10456 return fold_convert_loc (loc, type, tem);
10459 /* Optimize z * conj(z) for integer complex numbers. */
10460 if (TREE_CODE (arg0) == CONJ_EXPR
10461 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10462 return fold_mult_zconjz (loc, type, arg1);
10463 if (TREE_CODE (arg1) == CONJ_EXPR
10464 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10465 return fold_mult_zconjz (loc, type, arg0);
10469 /* Maybe fold x * 0 to 0. The expressions aren't the same
10470 when x is NaN, since x * 0 is also NaN. Nor are they the
10471 same in modes with signed zeros, since multiplying a
10472 negative value by 0 gives -0, not +0. */
10473 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10474 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10475 && real_zerop (arg1))
10476 return omit_one_operand_loc (loc, type, arg1, arg0);
10477 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10478 Likewise for complex arithmetic with signed zeros. */
10479 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10480 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10481 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10482 && real_onep (arg1))
10483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10485 /* Transform x * -1.0 into -x. */
10486 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10487 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10488 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10489 && real_minus_onep (arg1))
10490 return fold_convert_loc (loc, type, negate_expr (arg0));
10492 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10493 the result for floating point types due to rounding so it is applied
10494 only if -fassociative-math was specify. */
10495 if (flag_associative_math
10496 && TREE_CODE (arg0) == RDIV_EXPR
10497 && TREE_CODE (arg1) == REAL_CST
10498 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10500 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10503 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10504 TREE_OPERAND (arg0, 1));
10507 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10508 if (operand_equal_p (arg0, arg1, 0))
10510 tree tem = fold_strip_sign_ops (arg0);
10511 if (tem != NULL_TREE)
10513 tem = fold_convert_loc (loc, type, tem);
10514 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10518 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10519 This is not the same for NaNs or if signed zeros are
10521 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10522 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10523 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10524 && TREE_CODE (arg1) == COMPLEX_CST
10525 && real_zerop (TREE_REALPART (arg1)))
10527 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10528 if (real_onep (TREE_IMAGPART (arg1)))
10530 fold_build2_loc (loc, COMPLEX_EXPR, type,
10531 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10533 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10534 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10536 fold_build2_loc (loc, COMPLEX_EXPR, type,
10537 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10538 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10542 /* Optimize z * conj(z) for floating point complex numbers.
10543 Guarded by flag_unsafe_math_optimizations as non-finite
10544 imaginary components don't produce scalar results. */
10545 if (flag_unsafe_math_optimizations
10546 && TREE_CODE (arg0) == CONJ_EXPR
10547 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10548 return fold_mult_zconjz (loc, type, arg1);
10549 if (flag_unsafe_math_optimizations
10550 && TREE_CODE (arg1) == CONJ_EXPR
10551 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10552 return fold_mult_zconjz (loc, type, arg0);
10554 if (flag_unsafe_math_optimizations)
10556 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10557 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10559 /* Optimizations of root(...)*root(...). */
10560 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10563 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10564 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10566 /* Optimize sqrt(x)*sqrt(x) as x. */
10567 if (BUILTIN_SQRT_P (fcode0)
10568 && operand_equal_p (arg00, arg10, 0)
10569 && ! HONOR_SNANS (TYPE_MODE (type)))
10572 /* Optimize root(x)*root(y) as root(x*y). */
10573 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10574 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10575 return build_call_expr_loc (loc, rootfn, 1, arg);
10578 /* Optimize expN(x)*expN(y) as expN(x+y). */
10579 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10581 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10582 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10583 CALL_EXPR_ARG (arg0, 0),
10584 CALL_EXPR_ARG (arg1, 0));
10585 return build_call_expr_loc (loc, expfn, 1, arg);
10588 /* Optimizations of pow(...)*pow(...). */
10589 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10590 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10591 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10593 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10594 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10595 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10596 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10598 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10599 if (operand_equal_p (arg01, arg11, 0))
10601 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10602 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10604 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10607 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10608 if (operand_equal_p (arg00, arg10, 0))
10610 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10611 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10613 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10617 /* Optimize tan(x)*cos(x) as sin(x). */
10618 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10619 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10620 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10621 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10622 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10623 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10624 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10625 CALL_EXPR_ARG (arg1, 0), 0))
10627 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10629 if (sinfn != NULL_TREE)
10630 return build_call_expr_loc (loc, sinfn, 1,
10631 CALL_EXPR_ARG (arg0, 0));
10634 /* Optimize x*pow(x,c) as pow(x,c+1). */
10635 if (fcode1 == BUILT_IN_POW
10636 || fcode1 == BUILT_IN_POWF
10637 || fcode1 == BUILT_IN_POWL)
10639 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10640 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10641 if (TREE_CODE (arg11) == REAL_CST
10642 && !TREE_OVERFLOW (arg11)
10643 && operand_equal_p (arg0, arg10, 0))
10645 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10649 c = TREE_REAL_CST (arg11);
10650 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10651 arg = build_real (type, c);
10652 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10656 /* Optimize pow(x,c)*x as pow(x,c+1). */
10657 if (fcode0 == BUILT_IN_POW
10658 || fcode0 == BUILT_IN_POWF
10659 || fcode0 == BUILT_IN_POWL)
10661 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10662 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10663 if (TREE_CODE (arg01) == REAL_CST
10664 && !TREE_OVERFLOW (arg01)
10665 && operand_equal_p (arg1, arg00, 0))
10667 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10671 c = TREE_REAL_CST (arg01);
10672 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10673 arg = build_real (type, c);
10674 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10678 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10679 if (!in_gimple_form
10680 && optimize_function_for_speed_p (cfun)
10681 && operand_equal_p (arg0, arg1, 0))
10683 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10687 tree arg = build_real (type, dconst2);
10688 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10697 if (integer_all_onesp (arg1))
10698 return omit_one_operand_loc (loc, type, arg1, arg0);
10699 if (integer_zerop (arg1))
10700 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10701 if (operand_equal_p (arg0, arg1, 0))
10702 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10704 /* ~X | X is -1. */
10705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10706 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10708 t1 = build_zero_cst (type);
10709 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10710 return omit_one_operand_loc (loc, type, t1, arg1);
10713 /* X | ~X is -1. */
10714 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10717 t1 = build_zero_cst (type);
10718 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10719 return omit_one_operand_loc (loc, type, t1, arg0);
10722 /* Canonicalize (X & C1) | C2. */
10723 if (TREE_CODE (arg0) == BIT_AND_EXPR
10724 && TREE_CODE (arg1) == INTEGER_CST
10725 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10727 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10728 int width = TYPE_PRECISION (type), w;
10729 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10730 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10731 hi2 = TREE_INT_CST_HIGH (arg1);
10732 lo2 = TREE_INT_CST_LOW (arg1);
10734 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10735 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10736 return omit_one_operand_loc (loc, type, arg1,
10737 TREE_OPERAND (arg0, 0));
10739 if (width > HOST_BITS_PER_WIDE_INT)
10741 mhi = (unsigned HOST_WIDE_INT) -1
10742 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10748 mlo = (unsigned HOST_WIDE_INT) -1
10749 >> (HOST_BITS_PER_WIDE_INT - width);
10752 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10753 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10754 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10755 TREE_OPERAND (arg0, 0), arg1);
10757 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10758 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10759 mode which allows further optimizations. */
10766 for (w = BITS_PER_UNIT;
10767 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10770 unsigned HOST_WIDE_INT mask
10771 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10772 if (((lo1 | lo2) & mask) == mask
10773 && (lo1 & ~mask) == 0 && hi1 == 0)
10780 if (hi3 != hi1 || lo3 != lo1)
10781 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10782 fold_build2_loc (loc, BIT_AND_EXPR, type,
10783 TREE_OPERAND (arg0, 0),
10784 build_int_cst_wide (type,
10789 /* (X & Y) | Y is (X, Y). */
10790 if (TREE_CODE (arg0) == BIT_AND_EXPR
10791 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10792 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10793 /* (X & Y) | X is (Y, X). */
10794 if (TREE_CODE (arg0) == BIT_AND_EXPR
10795 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10796 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10797 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10798 /* X | (X & Y) is (Y, X). */
10799 if (TREE_CODE (arg1) == BIT_AND_EXPR
10800 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10801 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10802 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10803 /* X | (Y & X) is (Y, X). */
10804 if (TREE_CODE (arg1) == BIT_AND_EXPR
10805 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10806 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10807 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10809 /* (X & ~Y) | (~X & Y) is X ^ Y */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && TREE_CODE (arg1) == BIT_AND_EXPR)
10813 tree a0, a1, l0, l1, n0, n1;
10815 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10816 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10818 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10819 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10821 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10822 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10824 if ((operand_equal_p (n0, a0, 0)
10825 && operand_equal_p (n1, a1, 0))
10826 || (operand_equal_p (n0, a1, 0)
10827 && operand_equal_p (n1, a0, 0)))
10828 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10831 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10832 if (t1 != NULL_TREE)
10835 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10837 This results in more efficient code for machines without a NAND
10838 instruction. Combine will canonicalize to the first form
10839 which will allow use of NAND instructions provided by the
10840 backend if they exist. */
10841 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10842 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10845 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10846 build2 (BIT_AND_EXPR, type,
10847 fold_convert_loc (loc, type,
10848 TREE_OPERAND (arg0, 0)),
10849 fold_convert_loc (loc, type,
10850 TREE_OPERAND (arg1, 0))));
10853 /* See if this can be simplified into a rotate first. If that
10854 is unsuccessful continue in the association code. */
10858 if (integer_zerop (arg1))
10859 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10860 if (integer_all_onesp (arg1))
10861 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10862 if (operand_equal_p (arg0, arg1, 0))
10863 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10865 /* ~X ^ X is -1. */
10866 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10867 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10869 t1 = build_zero_cst (type);
10870 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10871 return omit_one_operand_loc (loc, type, t1, arg1);
10874 /* X ^ ~X is -1. */
10875 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10876 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10878 t1 = build_zero_cst (type);
10879 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10880 return omit_one_operand_loc (loc, type, t1, arg0);
10883 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10884 with a constant, and the two constants have no bits in common,
10885 we should treat this as a BIT_IOR_EXPR since this may produce more
10886 simplifications. */
10887 if (TREE_CODE (arg0) == BIT_AND_EXPR
10888 && TREE_CODE (arg1) == BIT_AND_EXPR
10889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10890 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10891 && integer_zerop (const_binop (BIT_AND_EXPR,
10892 TREE_OPERAND (arg0, 1),
10893 TREE_OPERAND (arg1, 1))))
10895 code = BIT_IOR_EXPR;
10899 /* (X | Y) ^ X -> Y & ~ X*/
10900 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10901 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10903 tree t2 = TREE_OPERAND (arg0, 1);
10904 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10906 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10907 fold_convert_loc (loc, type, t2),
10908 fold_convert_loc (loc, type, t1));
10912 /* (Y | X) ^ X -> Y & ~ X*/
10913 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10914 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10916 tree t2 = TREE_OPERAND (arg0, 0);
10917 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10919 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10920 fold_convert_loc (loc, type, t2),
10921 fold_convert_loc (loc, type, t1));
10925 /* X ^ (X | Y) -> Y & ~ X*/
10926 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10927 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10929 tree t2 = TREE_OPERAND (arg1, 1);
10930 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10932 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10933 fold_convert_loc (loc, type, t2),
10934 fold_convert_loc (loc, type, t1));
10938 /* X ^ (Y | X) -> Y & ~ X*/
10939 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10940 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10942 tree t2 = TREE_OPERAND (arg1, 0);
10943 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10945 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10946 fold_convert_loc (loc, type, t2),
10947 fold_convert_loc (loc, type, t1));
10951 /* Convert ~X ^ ~Y to X ^ Y. */
10952 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10953 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10954 return fold_build2_loc (loc, code, type,
10955 fold_convert_loc (loc, type,
10956 TREE_OPERAND (arg0, 0)),
10957 fold_convert_loc (loc, type,
10958 TREE_OPERAND (arg1, 0)));
10960 /* Convert ~X ^ C to X ^ ~C. */
10961 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10962 && TREE_CODE (arg1) == INTEGER_CST)
10963 return fold_build2_loc (loc, code, type,
10964 fold_convert_loc (loc, type,
10965 TREE_OPERAND (arg0, 0)),
10966 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10968 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10969 if (TREE_CODE (arg0) == BIT_AND_EXPR
10970 && integer_onep (TREE_OPERAND (arg0, 1))
10971 && integer_onep (arg1))
10972 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10973 build_int_cst (TREE_TYPE (arg0), 0));
10975 /* Fold (X & Y) ^ Y as ~X & Y. */
10976 if (TREE_CODE (arg0) == BIT_AND_EXPR
10977 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10981 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10982 fold_convert_loc (loc, type, arg1));
10984 /* Fold (X & Y) ^ X as ~Y & X. */
10985 if (TREE_CODE (arg0) == BIT_AND_EXPR
10986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10987 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10989 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10990 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10991 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10992 fold_convert_loc (loc, type, arg1));
10994 /* Fold X ^ (X & Y) as X & ~Y. */
10995 if (TREE_CODE (arg1) == BIT_AND_EXPR
10996 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10998 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10999 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11000 fold_convert_loc (loc, type, arg0),
11001 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11003 /* Fold X ^ (Y & X) as ~Y & X. */
11004 if (TREE_CODE (arg1) == BIT_AND_EXPR
11005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11006 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11008 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11009 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11010 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11011 fold_convert_loc (loc, type, arg0));
11014 /* See if this can be simplified into a rotate first. If that
11015 is unsuccessful continue in the association code. */
11019 if (integer_all_onesp (arg1))
11020 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11021 if (integer_zerop (arg1))
11022 return omit_one_operand_loc (loc, type, arg1, arg0);
11023 if (operand_equal_p (arg0, arg1, 0))
11024 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11026 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11027 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11028 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11029 || (TREE_CODE (arg0) == EQ_EXPR
11030 && integer_zerop (TREE_OPERAND (arg0, 1))))
11031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11032 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11034 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11035 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11036 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11037 || (TREE_CODE (arg1) == EQ_EXPR
11038 && integer_zerop (TREE_OPERAND (arg1, 1))))
11039 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11040 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11042 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11043 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11044 && TREE_CODE (arg1) == INTEGER_CST
11045 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11047 tree tmp1 = fold_convert_loc (loc, type, arg1);
11048 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11049 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11050 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11051 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11053 fold_convert_loc (loc, type,
11054 fold_build2_loc (loc, BIT_IOR_EXPR,
11055 type, tmp2, tmp3));
11058 /* (X | Y) & Y is (X, Y). */
11059 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11060 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11061 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11062 /* (X | Y) & X is (Y, X). */
11063 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11065 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11066 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11067 /* X & (X | Y) is (Y, X). */
11068 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11070 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11071 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11072 /* X & (Y | X) is (Y, X). */
11073 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11075 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11076 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11078 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11079 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11080 && integer_onep (TREE_OPERAND (arg0, 1))
11081 && integer_onep (arg1))
11083 tem = TREE_OPERAND (arg0, 0);
11084 return fold_build2_loc (loc, EQ_EXPR, type,
11085 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11086 build_int_cst (TREE_TYPE (tem), 1)),
11087 build_int_cst (TREE_TYPE (tem), 0));
11089 /* Fold ~X & 1 as (X & 1) == 0. */
11090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11091 && integer_onep (arg1))
11093 tem = TREE_OPERAND (arg0, 0);
11094 return fold_build2_loc (loc, EQ_EXPR, type,
11095 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11096 build_int_cst (TREE_TYPE (tem), 1)),
11097 build_int_cst (TREE_TYPE (tem), 0));
11099 /* Fold !X & 1 as X == 0. */
11100 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11101 && integer_onep (arg1))
11103 tem = TREE_OPERAND (arg0, 0);
11104 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11105 build_int_cst (TREE_TYPE (tem), 0));
11108 /* Fold (X ^ Y) & Y as ~X & Y. */
11109 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11110 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11112 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11113 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11114 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11115 fold_convert_loc (loc, type, arg1));
11117 /* Fold (X ^ Y) & X as ~Y & X. */
11118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11120 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11122 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11123 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11124 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11125 fold_convert_loc (loc, type, arg1));
11127 /* Fold X & (X ^ Y) as X & ~Y. */
11128 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11129 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11131 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11132 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11133 fold_convert_loc (loc, type, arg0),
11134 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11136 /* Fold X & (Y ^ X) as ~Y & X. */
11137 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11139 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11141 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11142 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11143 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11144 fold_convert_loc (loc, type, arg0));
11147 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11148 ((A & N) + B) & M -> (A + B) & M
11149 Similarly if (N & M) == 0,
11150 ((A | N) + B) & M -> (A + B) & M
11151 and for - instead of + (or unary - instead of +)
11152 and/or ^ instead of |.
11153 If B is constant and (B & M) == 0, fold into A & M. */
11154 if (host_integerp (arg1, 1))
11156 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11157 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11158 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11159 && (TREE_CODE (arg0) == PLUS_EXPR
11160 || TREE_CODE (arg0) == MINUS_EXPR
11161 || TREE_CODE (arg0) == NEGATE_EXPR)
11162 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11163 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11167 unsigned HOST_WIDE_INT cst0;
11169 /* Now we know that arg0 is (C + D) or (C - D) or
11170 -C and arg1 (M) is == (1LL << cst) - 1.
11171 Store C into PMOP[0] and D into PMOP[1]. */
11172 pmop[0] = TREE_OPERAND (arg0, 0);
11174 if (TREE_CODE (arg0) != NEGATE_EXPR)
11176 pmop[1] = TREE_OPERAND (arg0, 1);
11180 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11181 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11185 for (; which >= 0; which--)
11186 switch (TREE_CODE (pmop[which]))
11191 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11194 /* tree_low_cst not used, because we don't care about
11196 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11198 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11203 else if (cst0 != 0)
11205 /* If C or D is of the form (A & N) where
11206 (N & M) == M, or of the form (A | N) or
11207 (A ^ N) where (N & M) == 0, replace it with A. */
11208 pmop[which] = TREE_OPERAND (pmop[which], 0);
11211 /* If C or D is a N where (N & M) == 0, it can be
11212 omitted (assumed 0). */
11213 if ((TREE_CODE (arg0) == PLUS_EXPR
11214 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11215 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11216 pmop[which] = NULL;
11222 /* Only build anything new if we optimized one or both arguments
11224 if (pmop[0] != TREE_OPERAND (arg0, 0)
11225 || (TREE_CODE (arg0) != NEGATE_EXPR
11226 && pmop[1] != TREE_OPERAND (arg0, 1)))
11228 tree utype = TREE_TYPE (arg0);
11229 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11231 /* Perform the operations in a type that has defined
11232 overflow behavior. */
11233 utype = unsigned_type_for (TREE_TYPE (arg0));
11234 if (pmop[0] != NULL)
11235 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11236 if (pmop[1] != NULL)
11237 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11240 if (TREE_CODE (arg0) == NEGATE_EXPR)
11241 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11242 else if (TREE_CODE (arg0) == PLUS_EXPR)
11244 if (pmop[0] != NULL && pmop[1] != NULL)
11245 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11247 else if (pmop[0] != NULL)
11249 else if (pmop[1] != NULL)
11252 return build_int_cst (type, 0);
11254 else if (pmop[0] == NULL)
11255 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11257 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11259 /* TEM is now the new binary +, - or unary - replacement. */
11260 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11261 fold_convert_loc (loc, utype, arg1));
11262 return fold_convert_loc (loc, type, tem);
11267 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11268 if (t1 != NULL_TREE)
11270 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11271 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11272 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11275 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11277 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11278 && (~TREE_INT_CST_LOW (arg1)
11279 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11281 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11284 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11286 This results in more efficient code for machines without a NOR
11287 instruction. Combine will canonicalize to the first form
11288 which will allow use of NOR instructions provided by the
11289 backend if they exist. */
11290 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11291 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11293 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11294 build2 (BIT_IOR_EXPR, type,
11295 fold_convert_loc (loc, type,
11296 TREE_OPERAND (arg0, 0)),
11297 fold_convert_loc (loc, type,
11298 TREE_OPERAND (arg1, 0))));
11301 /* If arg0 is derived from the address of an object or function, we may
11302 be able to fold this expression using the object or function's
11304 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11306 unsigned HOST_WIDE_INT modulus, residue;
11307 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11309 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11310 integer_onep (arg1));
11312 /* This works because modulus is a power of 2. If this weren't the
11313 case, we'd have to replace it by its greatest power-of-2
11314 divisor: modulus & -modulus. */
11316 return build_int_cst (type, residue & low);
11319 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11320 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11321 if the new mask might be further optimized. */
11322 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11323 || TREE_CODE (arg0) == RSHIFT_EXPR)
11324 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11325 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11326 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11327 < TYPE_PRECISION (TREE_TYPE (arg0))
11328 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11329 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11331 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11332 unsigned HOST_WIDE_INT mask
11333 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11334 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11335 tree shift_type = TREE_TYPE (arg0);
11337 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11338 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11339 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11340 && TYPE_PRECISION (TREE_TYPE (arg0))
11341 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11343 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11344 tree arg00 = TREE_OPERAND (arg0, 0);
11345 /* See if more bits can be proven as zero because of
11347 if (TREE_CODE (arg00) == NOP_EXPR
11348 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11350 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11351 if (TYPE_PRECISION (inner_type)
11352 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11353 && TYPE_PRECISION (inner_type) < prec)
11355 prec = TYPE_PRECISION (inner_type);
11356 /* See if we can shorten the right shift. */
11358 shift_type = inner_type;
11361 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11362 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11363 zerobits <<= prec - shiftc;
11364 /* For arithmetic shift if sign bit could be set, zerobits
11365 can contain actually sign bits, so no transformation is
11366 possible, unless MASK masks them all away. In that
11367 case the shift needs to be converted into logical shift. */
11368 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11369 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11371 if ((mask & zerobits) == 0)
11372 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11378 /* ((X << 16) & 0xff00) is (X, 0). */
11379 if ((mask & zerobits) == mask)
11380 return omit_one_operand_loc (loc, type,
11381 build_int_cst (type, 0), arg0);
11383 newmask = mask | zerobits;
11384 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11388 /* Only do the transformation if NEWMASK is some integer
11390 for (prec = BITS_PER_UNIT;
11391 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11392 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11394 if (prec < HOST_BITS_PER_WIDE_INT
11395 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11399 if (shift_type != TREE_TYPE (arg0))
11401 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11402 fold_convert_loc (loc, shift_type,
11403 TREE_OPERAND (arg0, 0)),
11404 TREE_OPERAND (arg0, 1));
11405 tem = fold_convert_loc (loc, type, tem);
11409 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11410 if (!tree_int_cst_equal (newmaskt, arg1))
11411 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11419 /* Don't touch a floating-point divide by zero unless the mode
11420 of the constant can represent infinity. */
11421 if (TREE_CODE (arg1) == REAL_CST
11422 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11423 && real_zerop (arg1))
11426 /* Optimize A / A to 1.0 if we don't care about
11427 NaNs or Infinities. Skip the transformation
11428 for non-real operands. */
11429 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11430 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11431 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11432 && operand_equal_p (arg0, arg1, 0))
11434 tree r = build_real (TREE_TYPE (arg0), dconst1);
11436 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11439 /* The complex version of the above A / A optimization. */
11440 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11441 && operand_equal_p (arg0, arg1, 0))
11443 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11444 if (! HONOR_NANS (TYPE_MODE (elem_type))
11445 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11447 tree r = build_real (elem_type, dconst1);
11448 /* omit_two_operands will call fold_convert for us. */
11449 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11453 /* (-A) / (-B) -> A / B */
11454 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11455 return fold_build2_loc (loc, RDIV_EXPR, type,
11456 TREE_OPERAND (arg0, 0),
11457 negate_expr (arg1));
11458 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11459 return fold_build2_loc (loc, RDIV_EXPR, type,
11460 negate_expr (arg0),
11461 TREE_OPERAND (arg1, 0));
11463 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11464 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11465 && real_onep (arg1))
11466 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11468 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11469 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11470 && real_minus_onep (arg1))
11471 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11472 negate_expr (arg0)));
11474 /* If ARG1 is a constant, we can convert this to a multiply by the
11475 reciprocal. This does not have the same rounding properties,
11476 so only do this if -freciprocal-math. We can actually
11477 always safely do it if ARG1 is a power of two, but it's hard to
11478 tell if it is or not in a portable manner. */
11479 if (TREE_CODE (arg1) == REAL_CST)
11481 if (flag_reciprocal_math
11482 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11484 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11485 /* Find the reciprocal if optimizing and the result is exact. */
11489 r = TREE_REAL_CST (arg1);
11490 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11492 tem = build_real (type, r);
11493 return fold_build2_loc (loc, MULT_EXPR, type,
11494 fold_convert_loc (loc, type, arg0), tem);
11498 /* Convert A/B/C to A/(B*C). */
11499 if (flag_reciprocal_math
11500 && TREE_CODE (arg0) == RDIV_EXPR)
11501 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11502 fold_build2_loc (loc, MULT_EXPR, type,
11503 TREE_OPERAND (arg0, 1), arg1));
11505 /* Convert A/(B/C) to (A/B)*C. */
11506 if (flag_reciprocal_math
11507 && TREE_CODE (arg1) == RDIV_EXPR)
11508 return fold_build2_loc (loc, MULT_EXPR, type,
11509 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11510 TREE_OPERAND (arg1, 0)),
11511 TREE_OPERAND (arg1, 1));
11513 /* Convert C1/(X*C2) into (C1/C2)/X. */
11514 if (flag_reciprocal_math
11515 && TREE_CODE (arg1) == MULT_EXPR
11516 && TREE_CODE (arg0) == REAL_CST
11517 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11519 tree tem = const_binop (RDIV_EXPR, arg0,
11520 TREE_OPERAND (arg1, 1));
11522 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11523 TREE_OPERAND (arg1, 0));
11526 if (flag_unsafe_math_optimizations)
11528 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11529 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11531 /* Optimize sin(x)/cos(x) as tan(x). */
11532 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11533 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11534 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11535 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11536 CALL_EXPR_ARG (arg1, 0), 0))
11538 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11540 if (tanfn != NULL_TREE)
11541 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11544 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11545 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11546 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11547 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11548 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11549 CALL_EXPR_ARG (arg1, 0), 0))
11551 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11553 if (tanfn != NULL_TREE)
11555 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11556 CALL_EXPR_ARG (arg0, 0));
11557 return fold_build2_loc (loc, RDIV_EXPR, type,
11558 build_real (type, dconst1), tmp);
11562 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11563 NaNs or Infinities. */
11564 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11565 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11566 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11568 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11569 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11571 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11572 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11573 && operand_equal_p (arg00, arg01, 0))
11575 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11577 if (cosfn != NULL_TREE)
11578 return build_call_expr_loc (loc, cosfn, 1, arg00);
11582 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11583 NaNs or Infinities. */
11584 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11585 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11586 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11588 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11589 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11591 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11592 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11593 && operand_equal_p (arg00, arg01, 0))
11595 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11597 if (cosfn != NULL_TREE)
11599 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11600 return fold_build2_loc (loc, RDIV_EXPR, type,
11601 build_real (type, dconst1),
11607 /* Optimize pow(x,c)/x as pow(x,c-1). */
11608 if (fcode0 == BUILT_IN_POW
11609 || fcode0 == BUILT_IN_POWF
11610 || fcode0 == BUILT_IN_POWL)
11612 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11613 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11614 if (TREE_CODE (arg01) == REAL_CST
11615 && !TREE_OVERFLOW (arg01)
11616 && operand_equal_p (arg1, arg00, 0))
11618 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11622 c = TREE_REAL_CST (arg01);
11623 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11624 arg = build_real (type, c);
11625 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11629 /* Optimize a/root(b/c) into a*root(c/b). */
11630 if (BUILTIN_ROOT_P (fcode1))
11632 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11634 if (TREE_CODE (rootarg) == RDIV_EXPR)
11636 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11637 tree b = TREE_OPERAND (rootarg, 0);
11638 tree c = TREE_OPERAND (rootarg, 1);
11640 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11642 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11643 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11647 /* Optimize x/expN(y) into x*expN(-y). */
11648 if (BUILTIN_EXPONENT_P (fcode1))
11650 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11651 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11652 arg1 = build_call_expr_loc (loc,
11654 fold_convert_loc (loc, type, arg));
11655 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11658 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11659 if (fcode1 == BUILT_IN_POW
11660 || fcode1 == BUILT_IN_POWF
11661 || fcode1 == BUILT_IN_POWL)
11663 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11664 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11665 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11666 tree neg11 = fold_convert_loc (loc, type,
11667 negate_expr (arg11));
11668 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11669 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11674 case TRUNC_DIV_EXPR:
11675 /* Optimize (X & (-A)) / A where A is a power of 2,
11677 if (TREE_CODE (arg0) == BIT_AND_EXPR
11678 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11679 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11681 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11682 arg1, TREE_OPERAND (arg0, 1));
11683 if (sum && integer_zerop (sum)) {
11684 unsigned long pow2;
11686 if (TREE_INT_CST_LOW (arg1))
11687 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11689 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11690 + HOST_BITS_PER_WIDE_INT;
11692 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11693 TREE_OPERAND (arg0, 0),
11694 build_int_cst (integer_type_node, pow2));
11700 case FLOOR_DIV_EXPR:
11701 /* Simplify A / (B << N) where A and B are positive and B is
11702 a power of 2, to A >> (N + log2(B)). */
11703 strict_overflow_p = false;
11704 if (TREE_CODE (arg1) == LSHIFT_EXPR
11705 && (TYPE_UNSIGNED (type)
11706 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11708 tree sval = TREE_OPERAND (arg1, 0);
11709 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11711 tree sh_cnt = TREE_OPERAND (arg1, 1);
11712 unsigned long pow2;
11714 if (TREE_INT_CST_LOW (sval))
11715 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11717 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11718 + HOST_BITS_PER_WIDE_INT;
11720 if (strict_overflow_p)
11721 fold_overflow_warning (("assuming signed overflow does not "
11722 "occur when simplifying A / (B << N)"),
11723 WARN_STRICT_OVERFLOW_MISC);
11725 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11727 build_int_cst (TREE_TYPE (sh_cnt),
11729 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11730 fold_convert_loc (loc, type, arg0), sh_cnt);
11734 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11735 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11736 if (INTEGRAL_TYPE_P (type)
11737 && TYPE_UNSIGNED (type)
11738 && code == FLOOR_DIV_EXPR)
11739 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11743 case ROUND_DIV_EXPR:
11744 case CEIL_DIV_EXPR:
11745 case EXACT_DIV_EXPR:
11746 if (integer_onep (arg1))
11747 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11748 if (integer_zerop (arg1))
11750 /* X / -1 is -X. */
11751 if (!TYPE_UNSIGNED (type)
11752 && TREE_CODE (arg1) == INTEGER_CST
11753 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11754 && TREE_INT_CST_HIGH (arg1) == -1)
11755 return fold_convert_loc (loc, type, negate_expr (arg0));
11757 /* Convert -A / -B to A / B when the type is signed and overflow is
11759 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11760 && TREE_CODE (arg0) == NEGATE_EXPR
11761 && negate_expr_p (arg1))
11763 if (INTEGRAL_TYPE_P (type))
11764 fold_overflow_warning (("assuming signed overflow does not occur "
11765 "when distributing negation across "
11767 WARN_STRICT_OVERFLOW_MISC);
11768 return fold_build2_loc (loc, code, type,
11769 fold_convert_loc (loc, type,
11770 TREE_OPERAND (arg0, 0)),
11771 fold_convert_loc (loc, type,
11772 negate_expr (arg1)));
11774 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11775 && TREE_CODE (arg1) == NEGATE_EXPR
11776 && negate_expr_p (arg0))
11778 if (INTEGRAL_TYPE_P (type))
11779 fold_overflow_warning (("assuming signed overflow does not occur "
11780 "when distributing negation across "
11782 WARN_STRICT_OVERFLOW_MISC);
11783 return fold_build2_loc (loc, code, type,
11784 fold_convert_loc (loc, type,
11785 negate_expr (arg0)),
11786 fold_convert_loc (loc, type,
11787 TREE_OPERAND (arg1, 0)));
11790 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11791 operation, EXACT_DIV_EXPR.
11793 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11794 At one time others generated faster code, it's not clear if they do
11795 after the last round to changes to the DIV code in expmed.c. */
11796 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11797 && multiple_of_p (type, arg0, arg1))
11798 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11800 strict_overflow_p = false;
11801 if (TREE_CODE (arg1) == INTEGER_CST
11802 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11803 &strict_overflow_p)))
11805 if (strict_overflow_p)
11806 fold_overflow_warning (("assuming signed overflow does not occur "
11807 "when simplifying division"),
11808 WARN_STRICT_OVERFLOW_MISC);
11809 return fold_convert_loc (loc, type, tem);
11814 case CEIL_MOD_EXPR:
11815 case FLOOR_MOD_EXPR:
11816 case ROUND_MOD_EXPR:
11817 case TRUNC_MOD_EXPR:
11818 /* X % 1 is always zero, but be sure to preserve any side
11820 if (integer_onep (arg1))
11821 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11823 /* X % 0, return X % 0 unchanged so that we can get the
11824 proper warnings and errors. */
11825 if (integer_zerop (arg1))
11828 /* 0 % X is always zero, but be sure to preserve any side
11829 effects in X. Place this after checking for X == 0. */
11830 if (integer_zerop (arg0))
11831 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11833 /* X % -1 is zero. */
11834 if (!TYPE_UNSIGNED (type)
11835 && TREE_CODE (arg1) == INTEGER_CST
11836 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11837 && TREE_INT_CST_HIGH (arg1) == -1)
11838 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11840 /* X % -C is the same as X % C. */
11841 if (code == TRUNC_MOD_EXPR
11842 && !TYPE_UNSIGNED (type)
11843 && TREE_CODE (arg1) == INTEGER_CST
11844 && !TREE_OVERFLOW (arg1)
11845 && TREE_INT_CST_HIGH (arg1) < 0
11846 && !TYPE_OVERFLOW_TRAPS (type)
11847 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11848 && !sign_bit_p (arg1, arg1))
11849 return fold_build2_loc (loc, code, type,
11850 fold_convert_loc (loc, type, arg0),
11851 fold_convert_loc (loc, type,
11852 negate_expr (arg1)));
11854 /* X % -Y is the same as X % Y. */
11855 if (code == TRUNC_MOD_EXPR
11856 && !TYPE_UNSIGNED (type)
11857 && TREE_CODE (arg1) == NEGATE_EXPR
11858 && !TYPE_OVERFLOW_TRAPS (type))
11859 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11860 fold_convert_loc (loc, type,
11861 TREE_OPERAND (arg1, 0)));
11863 strict_overflow_p = false;
11864 if (TREE_CODE (arg1) == INTEGER_CST
11865 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11866 &strict_overflow_p)))
11868 if (strict_overflow_p)
11869 fold_overflow_warning (("assuming signed overflow does not occur "
11870 "when simplifying modulus"),
11871 WARN_STRICT_OVERFLOW_MISC);
11872 return fold_convert_loc (loc, type, tem);
11875 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11876 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11877 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11878 && (TYPE_UNSIGNED (type)
11879 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11882 /* Also optimize A % (C << N) where C is a power of 2,
11883 to A & ((C << N) - 1). */
11884 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11885 c = TREE_OPERAND (arg1, 0);
11887 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11890 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11891 build_int_cst (TREE_TYPE (arg1), 1));
11892 if (strict_overflow_p)
11893 fold_overflow_warning (("assuming signed overflow does not "
11894 "occur when simplifying "
11895 "X % (power of two)"),
11896 WARN_STRICT_OVERFLOW_MISC);
11897 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11898 fold_convert_loc (loc, type, arg0),
11899 fold_convert_loc (loc, type, mask));
11907 if (integer_all_onesp (arg0))
11908 return omit_one_operand_loc (loc, type, arg0, arg1);
11912 /* Optimize -1 >> x for arithmetic right shifts. */
11913 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11914 && tree_expr_nonnegative_p (arg1))
11915 return omit_one_operand_loc (loc, type, arg0, arg1);
11916 /* ... fall through ... */
11920 if (integer_zerop (arg1))
11921 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11922 if (integer_zerop (arg0))
11923 return omit_one_operand_loc (loc, type, arg0, arg1);
11925 /* Since negative shift count is not well-defined,
11926 don't try to compute it in the compiler. */
11927 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11930 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11931 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11932 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11933 && host_integerp (TREE_OPERAND (arg0, 1), false)
11934 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11936 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11937 + TREE_INT_CST_LOW (arg1));
11939 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11940 being well defined. */
11941 if (low >= TYPE_PRECISION (type))
11943 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11944 low = low % TYPE_PRECISION (type);
11945 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11946 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11947 TREE_OPERAND (arg0, 0));
11949 low = TYPE_PRECISION (type) - 1;
11952 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11953 build_int_cst (type, low));
11956 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11957 into x & ((unsigned)-1 >> c) for unsigned types. */
11958 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11959 || (TYPE_UNSIGNED (type)
11960 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11961 && host_integerp (arg1, false)
11962 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11963 && host_integerp (TREE_OPERAND (arg0, 1), false)
11964 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11966 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11967 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11973 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11975 lshift = build_int_cst (type, -1);
11976 lshift = int_const_binop (code, lshift, arg1);
11978 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11982 /* Rewrite an LROTATE_EXPR by a constant into an
11983 RROTATE_EXPR by a new constant. */
11984 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11986 tree tem = build_int_cst (TREE_TYPE (arg1),
11987 TYPE_PRECISION (type));
11988 tem = const_binop (MINUS_EXPR, tem, arg1);
11989 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11992 /* If we have a rotate of a bit operation with the rotate count and
11993 the second operand of the bit operation both constant,
11994 permute the two operations. */
11995 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11996 && (TREE_CODE (arg0) == BIT_AND_EXPR
11997 || TREE_CODE (arg0) == BIT_IOR_EXPR
11998 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11999 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12000 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12001 fold_build2_loc (loc, code, type,
12002 TREE_OPERAND (arg0, 0), arg1),
12003 fold_build2_loc (loc, code, type,
12004 TREE_OPERAND (arg0, 1), arg1));
12006 /* Two consecutive rotates adding up to the precision of the
12007 type can be ignored. */
12008 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12009 && TREE_CODE (arg0) == RROTATE_EXPR
12010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12011 && TREE_INT_CST_HIGH (arg1) == 0
12012 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12013 && ((TREE_INT_CST_LOW (arg1)
12014 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12015 == (unsigned int) TYPE_PRECISION (type)))
12016 return TREE_OPERAND (arg0, 0);
12018 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12019 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12020 if the latter can be further optimized. */
12021 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12022 && TREE_CODE (arg0) == BIT_AND_EXPR
12023 && TREE_CODE (arg1) == INTEGER_CST
12024 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12026 tree mask = fold_build2_loc (loc, code, type,
12027 fold_convert_loc (loc, type,
12028 TREE_OPERAND (arg0, 1)),
12030 tree shift = fold_build2_loc (loc, code, type,
12031 fold_convert_loc (loc, type,
12032 TREE_OPERAND (arg0, 0)),
12034 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12042 if (operand_equal_p (arg0, arg1, 0))
12043 return omit_one_operand_loc (loc, type, arg0, arg1);
12044 if (INTEGRAL_TYPE_P (type)
12045 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12046 return omit_one_operand_loc (loc, type, arg1, arg0);
12047 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12053 if (operand_equal_p (arg0, arg1, 0))
12054 return omit_one_operand_loc (loc, type, arg0, arg1);
12055 if (INTEGRAL_TYPE_P (type)
12056 && TYPE_MAX_VALUE (type)
12057 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12058 return omit_one_operand_loc (loc, type, arg1, arg0);
12059 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12064 case TRUTH_ANDIF_EXPR:
12065 /* Note that the operands of this must be ints
12066 and their values must be 0 or 1.
12067 ("true" is a fixed value perhaps depending on the language.) */
12068 /* If first arg is constant zero, return it. */
12069 if (integer_zerop (arg0))
12070 return fold_convert_loc (loc, type, arg0);
12071 case TRUTH_AND_EXPR:
12072 /* If either arg is constant true, drop it. */
12073 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12075 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12076 /* Preserve sequence points. */
12077 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12078 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12079 /* If second arg is constant zero, result is zero, but first arg
12080 must be evaluated. */
12081 if (integer_zerop (arg1))
12082 return omit_one_operand_loc (loc, type, arg1, arg0);
12083 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12084 case will be handled here. */
12085 if (integer_zerop (arg0))
12086 return omit_one_operand_loc (loc, type, arg0, arg1);
12088 /* !X && X is always false. */
12089 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12090 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12091 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12092 /* X && !X is always false. */
12093 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12094 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12095 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12097 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12098 means A >= Y && A != MAX, but in this case we know that
12101 if (!TREE_SIDE_EFFECTS (arg0)
12102 && !TREE_SIDE_EFFECTS (arg1))
12104 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12105 if (tem && !operand_equal_p (tem, arg0, 0))
12106 return fold_build2_loc (loc, code, type, tem, arg1);
12108 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12109 if (tem && !operand_equal_p (tem, arg1, 0))
12110 return fold_build2_loc (loc, code, type, arg0, tem);
12113 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12119 case TRUTH_ORIF_EXPR:
12120 /* Note that the operands of this must be ints
12121 and their values must be 0 or true.
12122 ("true" is a fixed value perhaps depending on the language.) */
12123 /* If first arg is constant true, return it. */
12124 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12125 return fold_convert_loc (loc, type, arg0);
12126 case TRUTH_OR_EXPR:
12127 /* If either arg is constant zero, drop it. */
12128 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12129 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12130 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12131 /* Preserve sequence points. */
12132 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12134 /* If second arg is constant true, result is true, but we must
12135 evaluate first arg. */
12136 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12137 return omit_one_operand_loc (loc, type, arg1, arg0);
12138 /* Likewise for first arg, but note this only occurs here for
12140 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12141 return omit_one_operand_loc (loc, type, arg0, arg1);
12143 /* !X || X is always true. */
12144 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12145 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12146 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12147 /* X || !X is always true. */
12148 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12149 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12150 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12152 /* (X && !Y) || (!X && Y) is X ^ Y */
12153 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12154 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12156 tree a0, a1, l0, l1, n0, n1;
12158 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12159 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12161 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12162 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12164 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12165 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12167 if ((operand_equal_p (n0, a0, 0)
12168 && operand_equal_p (n1, a1, 0))
12169 || (operand_equal_p (n0, a1, 0)
12170 && operand_equal_p (n1, a0, 0)))
12171 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12174 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12180 case TRUTH_XOR_EXPR:
12181 /* If the second arg is constant zero, drop it. */
12182 if (integer_zerop (arg1))
12183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12184 /* If the second arg is constant true, this is a logical inversion. */
12185 if (integer_onep (arg1))
12187 /* Only call invert_truthvalue if operand is a truth value. */
12188 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12189 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12191 tem = invert_truthvalue_loc (loc, arg0);
12192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12194 /* Identical arguments cancel to zero. */
12195 if (operand_equal_p (arg0, arg1, 0))
12196 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12198 /* !X ^ X is always true. */
12199 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12201 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12203 /* X ^ !X is always true. */
12204 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12206 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12215 tem = fold_comparison (loc, code, type, op0, op1);
12216 if (tem != NULL_TREE)
12219 /* bool_var != 0 becomes bool_var. */
12220 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12221 && code == NE_EXPR)
12222 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12224 /* bool_var == 1 becomes bool_var. */
12225 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12226 && code == EQ_EXPR)
12227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12229 /* bool_var != 1 becomes !bool_var. */
12230 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12231 && code == NE_EXPR)
12232 return fold_convert_loc (loc, type,
12233 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12234 TREE_TYPE (arg0), arg0));
12236 /* bool_var == 0 becomes !bool_var. */
12237 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12238 && code == EQ_EXPR)
12239 return fold_convert_loc (loc, type,
12240 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12241 TREE_TYPE (arg0), arg0));
12243 /* !exp != 0 becomes !exp */
12244 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12245 && code == NE_EXPR)
12246 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12248 /* If this is an equality comparison of the address of two non-weak,
12249 unaliased symbols neither of which are extern (since we do not
12250 have access to attributes for externs), then we know the result. */
12251 if (TREE_CODE (arg0) == ADDR_EXPR
12252 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12253 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12254 && ! lookup_attribute ("alias",
12255 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12256 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12257 && TREE_CODE (arg1) == ADDR_EXPR
12258 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12259 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12260 && ! lookup_attribute ("alias",
12261 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12262 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12264 /* We know that we're looking at the address of two
12265 non-weak, unaliased, static _DECL nodes.
12267 It is both wasteful and incorrect to call operand_equal_p
12268 to compare the two ADDR_EXPR nodes. It is wasteful in that
12269 all we need to do is test pointer equality for the arguments
12270 to the two ADDR_EXPR nodes. It is incorrect to use
12271 operand_equal_p as that function is NOT equivalent to a
12272 C equality test. It can in fact return false for two
12273 objects which would test as equal using the C equality
12275 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12276 return constant_boolean_node (equal
12277 ? code == EQ_EXPR : code != EQ_EXPR,
12281 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12282 a MINUS_EXPR of a constant, we can convert it into a comparison with
12283 a revised constant as long as no overflow occurs. */
12284 if (TREE_CODE (arg1) == INTEGER_CST
12285 && (TREE_CODE (arg0) == PLUS_EXPR
12286 || TREE_CODE (arg0) == MINUS_EXPR)
12287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12288 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12289 ? MINUS_EXPR : PLUS_EXPR,
12290 fold_convert_loc (loc, TREE_TYPE (arg0),
12292 TREE_OPERAND (arg0, 1)))
12293 && !TREE_OVERFLOW (tem))
12294 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12296 /* Similarly for a NEGATE_EXPR. */
12297 if (TREE_CODE (arg0) == NEGATE_EXPR
12298 && TREE_CODE (arg1) == INTEGER_CST
12299 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12301 && TREE_CODE (tem) == INTEGER_CST
12302 && !TREE_OVERFLOW (tem))
12303 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12305 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12306 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12307 && TREE_CODE (arg1) == INTEGER_CST
12308 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12309 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12310 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12311 fold_convert_loc (loc,
12314 TREE_OPERAND (arg0, 1)));
12316 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12317 if ((TREE_CODE (arg0) == PLUS_EXPR
12318 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12319 || TREE_CODE (arg0) == MINUS_EXPR)
12320 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12323 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12324 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12326 tree val = TREE_OPERAND (arg0, 1);
12327 return omit_two_operands_loc (loc, type,
12328 fold_build2_loc (loc, code, type,
12330 build_int_cst (TREE_TYPE (val),
12332 TREE_OPERAND (arg0, 0), arg1);
12335 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12336 if (TREE_CODE (arg0) == MINUS_EXPR
12337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12338 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12341 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12343 return omit_two_operands_loc (loc, type,
12345 ? boolean_true_node : boolean_false_node,
12346 TREE_OPERAND (arg0, 1), arg1);
12349 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12350 for !=. Don't do this for ordered comparisons due to overflow. */
12351 if (TREE_CODE (arg0) == MINUS_EXPR
12352 && integer_zerop (arg1))
12353 return fold_build2_loc (loc, code, type,
12354 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12356 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12357 if (TREE_CODE (arg0) == ABS_EXPR
12358 && (integer_zerop (arg1) || real_zerop (arg1)))
12359 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12361 /* If this is an EQ or NE comparison with zero and ARG0 is
12362 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12363 two operations, but the latter can be done in one less insn
12364 on machines that have only two-operand insns or on which a
12365 constant cannot be the first operand. */
12366 if (TREE_CODE (arg0) == BIT_AND_EXPR
12367 && integer_zerop (arg1))
12369 tree arg00 = TREE_OPERAND (arg0, 0);
12370 tree arg01 = TREE_OPERAND (arg0, 1);
12371 if (TREE_CODE (arg00) == LSHIFT_EXPR
12372 && integer_onep (TREE_OPERAND (arg00, 0)))
12374 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12375 arg01, TREE_OPERAND (arg00, 1));
12376 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12377 build_int_cst (TREE_TYPE (arg0), 1));
12378 return fold_build2_loc (loc, code, type,
12379 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12382 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12383 && integer_onep (TREE_OPERAND (arg01, 0)))
12385 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12386 arg00, TREE_OPERAND (arg01, 1));
12387 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12388 build_int_cst (TREE_TYPE (arg0), 1));
12389 return fold_build2_loc (loc, code, type,
12390 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12395 /* If this is an NE or EQ comparison of zero against the result of a
12396 signed MOD operation whose second operand is a power of 2, make
12397 the MOD operation unsigned since it is simpler and equivalent. */
12398 if (integer_zerop (arg1)
12399 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12400 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12401 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12402 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12403 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12404 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12406 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12407 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12408 fold_convert_loc (loc, newtype,
12409 TREE_OPERAND (arg0, 0)),
12410 fold_convert_loc (loc, newtype,
12411 TREE_OPERAND (arg0, 1)));
12413 return fold_build2_loc (loc, code, type, newmod,
12414 fold_convert_loc (loc, newtype, arg1));
12417 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12418 C1 is a valid shift constant, and C2 is a power of two, i.e.
12420 if (TREE_CODE (arg0) == BIT_AND_EXPR
12421 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12422 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12424 && integer_pow2p (TREE_OPERAND (arg0, 1))
12425 && integer_zerop (arg1))
12427 tree itype = TREE_TYPE (arg0);
12428 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12429 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12431 /* Check for a valid shift count. */
12432 if (TREE_INT_CST_HIGH (arg001) == 0
12433 && TREE_INT_CST_LOW (arg001) < prec)
12435 tree arg01 = TREE_OPERAND (arg0, 1);
12436 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12437 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12438 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12439 can be rewritten as (X & (C2 << C1)) != 0. */
12440 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12442 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12443 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12444 return fold_build2_loc (loc, code, type, tem,
12445 fold_convert_loc (loc, itype, arg1));
12447 /* Otherwise, for signed (arithmetic) shifts,
12448 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12449 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12450 else if (!TYPE_UNSIGNED (itype))
12451 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12452 arg000, build_int_cst (itype, 0));
12453 /* Otherwise, of unsigned (logical) shifts,
12454 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12455 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12457 return omit_one_operand_loc (loc, type,
12458 code == EQ_EXPR ? integer_one_node
12459 : integer_zero_node,
12464 /* If we have (A & C) == C where C is a power of 2, convert this into
12465 (A & C) != 0. Similarly for NE_EXPR. */
12466 if (TREE_CODE (arg0) == BIT_AND_EXPR
12467 && integer_pow2p (TREE_OPERAND (arg0, 1))
12468 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12469 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12470 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12471 integer_zero_node));
12473 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12474 bit, then fold the expression into A < 0 or A >= 0. */
12475 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12479 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12480 Similarly for NE_EXPR. */
12481 if (TREE_CODE (arg0) == BIT_AND_EXPR
12482 && TREE_CODE (arg1) == INTEGER_CST
12483 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12485 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12486 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12487 TREE_OPERAND (arg0, 1));
12489 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12490 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12492 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12493 if (integer_nonzerop (dandnotc))
12494 return omit_one_operand_loc (loc, type, rslt, arg0);
12497 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12498 Similarly for NE_EXPR. */
12499 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12500 && TREE_CODE (arg1) == INTEGER_CST
12501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12503 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12505 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12506 TREE_OPERAND (arg0, 1),
12507 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12508 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12509 if (integer_nonzerop (candnotd))
12510 return omit_one_operand_loc (loc, type, rslt, arg0);
12513 /* If this is a comparison of a field, we may be able to simplify it. */
12514 if ((TREE_CODE (arg0) == COMPONENT_REF
12515 || TREE_CODE (arg0) == BIT_FIELD_REF)
12516 /* Handle the constant case even without -O
12517 to make sure the warnings are given. */
12518 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12520 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12525 /* Optimize comparisons of strlen vs zero to a compare of the
12526 first character of the string vs zero. To wit,
12527 strlen(ptr) == 0 => *ptr == 0
12528 strlen(ptr) != 0 => *ptr != 0
12529 Other cases should reduce to one of these two (or a constant)
12530 due to the return value of strlen being unsigned. */
12531 if (TREE_CODE (arg0) == CALL_EXPR
12532 && integer_zerop (arg1))
12534 tree fndecl = get_callee_fndecl (arg0);
12537 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12538 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12539 && call_expr_nargs (arg0) == 1
12540 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12542 tree iref = build_fold_indirect_ref_loc (loc,
12543 CALL_EXPR_ARG (arg0, 0));
12544 return fold_build2_loc (loc, code, type, iref,
12545 build_int_cst (TREE_TYPE (iref), 0));
12549 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12550 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12551 if (TREE_CODE (arg0) == RSHIFT_EXPR
12552 && integer_zerop (arg1)
12553 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12555 tree arg00 = TREE_OPERAND (arg0, 0);
12556 tree arg01 = TREE_OPERAND (arg0, 1);
12557 tree itype = TREE_TYPE (arg00);
12558 if (TREE_INT_CST_HIGH (arg01) == 0
12559 && TREE_INT_CST_LOW (arg01)
12560 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12562 if (TYPE_UNSIGNED (itype))
12564 itype = signed_type_for (itype);
12565 arg00 = fold_convert_loc (loc, itype, arg00);
12567 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12568 type, arg00, build_int_cst (itype, 0));
12572 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12573 if (integer_zerop (arg1)
12574 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12575 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12576 TREE_OPERAND (arg0, 1));
12578 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12579 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12580 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12581 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12582 build_int_cst (TREE_TYPE (arg0), 0));
12583 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12584 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12586 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12587 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12588 build_int_cst (TREE_TYPE (arg0), 0));
12590 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12591 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12592 && TREE_CODE (arg1) == INTEGER_CST
12593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12594 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12595 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12596 TREE_OPERAND (arg0, 1), arg1));
12598 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12599 (X & C) == 0 when C is a single bit. */
12600 if (TREE_CODE (arg0) == BIT_AND_EXPR
12601 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12602 && integer_zerop (arg1)
12603 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12605 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12606 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12607 TREE_OPERAND (arg0, 1));
12608 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12610 fold_convert_loc (loc, TREE_TYPE (arg0),
12614 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12615 constant C is a power of two, i.e. a single bit. */
12616 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12617 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12618 && integer_zerop (arg1)
12619 && integer_pow2p (TREE_OPERAND (arg0, 1))
12620 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12621 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12623 tree arg00 = TREE_OPERAND (arg0, 0);
12624 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12625 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12628 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12629 when is C is a power of two, i.e. a single bit. */
12630 if (TREE_CODE (arg0) == BIT_AND_EXPR
12631 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12632 && integer_zerop (arg1)
12633 && integer_pow2p (TREE_OPERAND (arg0, 1))
12634 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12635 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12637 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12638 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12639 arg000, TREE_OPERAND (arg0, 1));
12640 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12641 tem, build_int_cst (TREE_TYPE (tem), 0));
12644 if (integer_zerop (arg1)
12645 && tree_expr_nonzero_p (arg0))
12647 tree res = constant_boolean_node (code==NE_EXPR, type);
12648 return omit_one_operand_loc (loc, type, res, arg0);
12651 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12652 if (TREE_CODE (arg0) == NEGATE_EXPR
12653 && TREE_CODE (arg1) == NEGATE_EXPR)
12654 return fold_build2_loc (loc, code, type,
12655 TREE_OPERAND (arg0, 0),
12656 fold_convert_loc (loc, TREE_TYPE (arg0),
12657 TREE_OPERAND (arg1, 0)));
12659 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12660 if (TREE_CODE (arg0) == BIT_AND_EXPR
12661 && TREE_CODE (arg1) == BIT_AND_EXPR)
12663 tree arg00 = TREE_OPERAND (arg0, 0);
12664 tree arg01 = TREE_OPERAND (arg0, 1);
12665 tree arg10 = TREE_OPERAND (arg1, 0);
12666 tree arg11 = TREE_OPERAND (arg1, 1);
12667 tree itype = TREE_TYPE (arg0);
12669 if (operand_equal_p (arg01, arg11, 0))
12670 return fold_build2_loc (loc, code, type,
12671 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12672 fold_build2_loc (loc,
12673 BIT_XOR_EXPR, itype,
12676 build_int_cst (itype, 0));
12678 if (operand_equal_p (arg01, arg10, 0))
12679 return fold_build2_loc (loc, code, type,
12680 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12681 fold_build2_loc (loc,
12682 BIT_XOR_EXPR, itype,
12685 build_int_cst (itype, 0));
12687 if (operand_equal_p (arg00, arg11, 0))
12688 return fold_build2_loc (loc, code, type,
12689 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12690 fold_build2_loc (loc,
12691 BIT_XOR_EXPR, itype,
12694 build_int_cst (itype, 0));
12696 if (operand_equal_p (arg00, arg10, 0))
12697 return fold_build2_loc (loc, code, type,
12698 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12699 fold_build2_loc (loc,
12700 BIT_XOR_EXPR, itype,
12703 build_int_cst (itype, 0));
12706 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12707 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12709 tree arg00 = TREE_OPERAND (arg0, 0);
12710 tree arg01 = TREE_OPERAND (arg0, 1);
12711 tree arg10 = TREE_OPERAND (arg1, 0);
12712 tree arg11 = TREE_OPERAND (arg1, 1);
12713 tree itype = TREE_TYPE (arg0);
12715 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12716 operand_equal_p guarantees no side-effects so we don't need
12717 to use omit_one_operand on Z. */
12718 if (operand_equal_p (arg01, arg11, 0))
12719 return fold_build2_loc (loc, code, type, arg00,
12720 fold_convert_loc (loc, TREE_TYPE (arg00),
12722 if (operand_equal_p (arg01, arg10, 0))
12723 return fold_build2_loc (loc, code, type, arg00,
12724 fold_convert_loc (loc, TREE_TYPE (arg00),
12726 if (operand_equal_p (arg00, arg11, 0))
12727 return fold_build2_loc (loc, code, type, arg01,
12728 fold_convert_loc (loc, TREE_TYPE (arg01),
12730 if (operand_equal_p (arg00, arg10, 0))
12731 return fold_build2_loc (loc, code, type, arg01,
12732 fold_convert_loc (loc, TREE_TYPE (arg01),
12735 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12736 if (TREE_CODE (arg01) == INTEGER_CST
12737 && TREE_CODE (arg11) == INTEGER_CST)
12739 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12740 fold_convert_loc (loc, itype, arg11));
12741 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12742 return fold_build2_loc (loc, code, type, tem,
12743 fold_convert_loc (loc, itype, arg10));
12747 /* Attempt to simplify equality/inequality comparisons of complex
12748 values. Only lower the comparison if the result is known or
12749 can be simplified to a single scalar comparison. */
12750 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12751 || TREE_CODE (arg0) == COMPLEX_CST)
12752 && (TREE_CODE (arg1) == COMPLEX_EXPR
12753 || TREE_CODE (arg1) == COMPLEX_CST))
12755 tree real0, imag0, real1, imag1;
12758 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12760 real0 = TREE_OPERAND (arg0, 0);
12761 imag0 = TREE_OPERAND (arg0, 1);
12765 real0 = TREE_REALPART (arg0);
12766 imag0 = TREE_IMAGPART (arg0);
12769 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12771 real1 = TREE_OPERAND (arg1, 0);
12772 imag1 = TREE_OPERAND (arg1, 1);
12776 real1 = TREE_REALPART (arg1);
12777 imag1 = TREE_IMAGPART (arg1);
12780 rcond = fold_binary_loc (loc, code, type, real0, real1);
12781 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12783 if (integer_zerop (rcond))
12785 if (code == EQ_EXPR)
12786 return omit_two_operands_loc (loc, type, boolean_false_node,
12788 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12792 if (code == NE_EXPR)
12793 return omit_two_operands_loc (loc, type, boolean_true_node,
12795 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12799 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12800 if (icond && TREE_CODE (icond) == INTEGER_CST)
12802 if (integer_zerop (icond))
12804 if (code == EQ_EXPR)
12805 return omit_two_operands_loc (loc, type, boolean_false_node,
12807 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12811 if (code == NE_EXPR)
12812 return omit_two_operands_loc (loc, type, boolean_true_node,
12814 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12825 tem = fold_comparison (loc, code, type, op0, op1);
12826 if (tem != NULL_TREE)
12829 /* Transform comparisons of the form X +- C CMP X. */
12830 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12831 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12832 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12833 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12834 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12837 tree arg01 = TREE_OPERAND (arg0, 1);
12838 enum tree_code code0 = TREE_CODE (arg0);
12841 if (TREE_CODE (arg01) == REAL_CST)
12842 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12844 is_positive = tree_int_cst_sgn (arg01);
12846 /* (X - c) > X becomes false. */
12847 if (code == GT_EXPR
12848 && ((code0 == MINUS_EXPR && is_positive >= 0)
12849 || (code0 == PLUS_EXPR && is_positive <= 0)))
12851 if (TREE_CODE (arg01) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12853 fold_overflow_warning (("assuming signed overflow does not "
12854 "occur when assuming that (X - c) > X "
12855 "is always false"),
12856 WARN_STRICT_OVERFLOW_ALL);
12857 return constant_boolean_node (0, type);
12860 /* Likewise (X + c) < X becomes false. */
12861 if (code == LT_EXPR
12862 && ((code0 == PLUS_EXPR && is_positive >= 0)
12863 || (code0 == MINUS_EXPR && is_positive <= 0)))
12865 if (TREE_CODE (arg01) == INTEGER_CST
12866 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12867 fold_overflow_warning (("assuming signed overflow does not "
12868 "occur when assuming that "
12869 "(X + c) < X is always false"),
12870 WARN_STRICT_OVERFLOW_ALL);
12871 return constant_boolean_node (0, type);
12874 /* Convert (X - c) <= X to true. */
12875 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12877 && ((code0 == MINUS_EXPR && is_positive >= 0)
12878 || (code0 == PLUS_EXPR && is_positive <= 0)))
12880 if (TREE_CODE (arg01) == INTEGER_CST
12881 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does not "
12883 "occur when assuming that "
12884 "(X - c) <= X is always true"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (1, type);
12889 /* Convert (X + c) >= X to true. */
12890 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12892 && ((code0 == PLUS_EXPR && is_positive >= 0)
12893 || (code0 == MINUS_EXPR && is_positive <= 0)))
12895 if (TREE_CODE (arg01) == INTEGER_CST
12896 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12897 fold_overflow_warning (("assuming signed overflow does not "
12898 "occur when assuming that "
12899 "(X + c) >= X is always true"),
12900 WARN_STRICT_OVERFLOW_ALL);
12901 return constant_boolean_node (1, type);
12904 if (TREE_CODE (arg01) == INTEGER_CST)
12906 /* Convert X + c > X and X - c < X to true for integers. */
12907 if (code == GT_EXPR
12908 && ((code0 == PLUS_EXPR && is_positive > 0)
12909 || (code0 == MINUS_EXPR && is_positive < 0)))
12911 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12912 fold_overflow_warning (("assuming signed overflow does "
12913 "not occur when assuming that "
12914 "(X + c) > X is always true"),
12915 WARN_STRICT_OVERFLOW_ALL);
12916 return constant_boolean_node (1, type);
12919 if (code == LT_EXPR
12920 && ((code0 == MINUS_EXPR && is_positive > 0)
12921 || (code0 == PLUS_EXPR && is_positive < 0)))
12923 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12924 fold_overflow_warning (("assuming signed overflow does "
12925 "not occur when assuming that "
12926 "(X - c) < X is always true"),
12927 WARN_STRICT_OVERFLOW_ALL);
12928 return constant_boolean_node (1, type);
12931 /* Convert X + c <= X and X - c >= X to false for integers. */
12932 if (code == LE_EXPR
12933 && ((code0 == PLUS_EXPR && is_positive > 0)
12934 || (code0 == MINUS_EXPR && is_positive < 0)))
12936 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12937 fold_overflow_warning (("assuming signed overflow does "
12938 "not occur when assuming that "
12939 "(X + c) <= X is always false"),
12940 WARN_STRICT_OVERFLOW_ALL);
12941 return constant_boolean_node (0, type);
12944 if (code == GE_EXPR
12945 && ((code0 == MINUS_EXPR && is_positive > 0)
12946 || (code0 == PLUS_EXPR && is_positive < 0)))
12948 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12949 fold_overflow_warning (("assuming signed overflow does "
12950 "not occur when assuming that "
12951 "(X - c) >= X is always false"),
12952 WARN_STRICT_OVERFLOW_ALL);
12953 return constant_boolean_node (0, type);
12958 /* Comparisons with the highest or lowest possible integer of
12959 the specified precision will have known values. */
12961 tree arg1_type = TREE_TYPE (arg1);
12962 unsigned int width = TYPE_PRECISION (arg1_type);
12964 if (TREE_CODE (arg1) == INTEGER_CST
12965 && width <= 2 * HOST_BITS_PER_WIDE_INT
12966 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12968 HOST_WIDE_INT signed_max_hi;
12969 unsigned HOST_WIDE_INT signed_max_lo;
12970 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12972 if (width <= HOST_BITS_PER_WIDE_INT)
12974 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12979 if (TYPE_UNSIGNED (arg1_type))
12981 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12987 max_lo = signed_max_lo;
12988 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12994 width -= HOST_BITS_PER_WIDE_INT;
12995 signed_max_lo = -1;
12996 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13001 if (TYPE_UNSIGNED (arg1_type))
13003 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13008 max_hi = signed_max_hi;
13009 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13013 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13014 && TREE_INT_CST_LOW (arg1) == max_lo)
13018 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13021 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13024 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13027 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13029 /* The GE_EXPR and LT_EXPR cases above are not normally
13030 reached because of previous transformations. */
13035 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13037 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13041 arg1 = const_binop (PLUS_EXPR, arg1,
13042 build_int_cst (TREE_TYPE (arg1), 1));
13043 return fold_build2_loc (loc, EQ_EXPR, type,
13044 fold_convert_loc (loc,
13045 TREE_TYPE (arg1), arg0),
13048 arg1 = const_binop (PLUS_EXPR, arg1,
13049 build_int_cst (TREE_TYPE (arg1), 1));
13050 return fold_build2_loc (loc, NE_EXPR, type,
13051 fold_convert_loc (loc, TREE_TYPE (arg1),
13057 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13059 && TREE_INT_CST_LOW (arg1) == min_lo)
13063 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13066 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13069 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13072 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13077 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13079 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13083 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13084 return fold_build2_loc (loc, NE_EXPR, type,
13085 fold_convert_loc (loc,
13086 TREE_TYPE (arg1), arg0),
13089 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13090 return fold_build2_loc (loc, EQ_EXPR, type,
13091 fold_convert_loc (loc, TREE_TYPE (arg1),
13098 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13099 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13100 && TYPE_UNSIGNED (arg1_type)
13101 /* We will flip the signedness of the comparison operator
13102 associated with the mode of arg1, so the sign bit is
13103 specified by this mode. Check that arg1 is the signed
13104 max associated with this sign bit. */
13105 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13106 /* signed_type does not work on pointer types. */
13107 && INTEGRAL_TYPE_P (arg1_type))
13109 /* The following case also applies to X < signed_max+1
13110 and X >= signed_max+1 because previous transformations. */
13111 if (code == LE_EXPR || code == GT_EXPR)
13114 st = signed_type_for (TREE_TYPE (arg1));
13115 return fold_build2_loc (loc,
13116 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13117 type, fold_convert_loc (loc, st, arg0),
13118 build_int_cst (st, 0));
13124 /* If we are comparing an ABS_EXPR with a constant, we can
13125 convert all the cases into explicit comparisons, but they may
13126 well not be faster than doing the ABS and one comparison.
13127 But ABS (X) <= C is a range comparison, which becomes a subtraction
13128 and a comparison, and is probably faster. */
13129 if (code == LE_EXPR
13130 && TREE_CODE (arg1) == INTEGER_CST
13131 && TREE_CODE (arg0) == ABS_EXPR
13132 && ! TREE_SIDE_EFFECTS (arg0)
13133 && (0 != (tem = negate_expr (arg1)))
13134 && TREE_CODE (tem) == INTEGER_CST
13135 && !TREE_OVERFLOW (tem))
13136 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13137 build2 (GE_EXPR, type,
13138 TREE_OPERAND (arg0, 0), tem),
13139 build2 (LE_EXPR, type,
13140 TREE_OPERAND (arg0, 0), arg1));
13142 /* Convert ABS_EXPR<x> >= 0 to true. */
13143 strict_overflow_p = false;
13144 if (code == GE_EXPR
13145 && (integer_zerop (arg1)
13146 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13147 && real_zerop (arg1)))
13148 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13150 if (strict_overflow_p)
13151 fold_overflow_warning (("assuming signed overflow does not occur "
13152 "when simplifying comparison of "
13153 "absolute value and zero"),
13154 WARN_STRICT_OVERFLOW_CONDITIONAL);
13155 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13158 /* Convert ABS_EXPR<x> < 0 to false. */
13159 strict_overflow_p = false;
13160 if (code == LT_EXPR
13161 && (integer_zerop (arg1) || real_zerop (arg1))
13162 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13164 if (strict_overflow_p)
13165 fold_overflow_warning (("assuming signed overflow does not occur "
13166 "when simplifying comparison of "
13167 "absolute value and zero"),
13168 WARN_STRICT_OVERFLOW_CONDITIONAL);
13169 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13172 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13173 and similarly for >= into !=. */
13174 if ((code == LT_EXPR || code == GE_EXPR)
13175 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13176 && TREE_CODE (arg1) == LSHIFT_EXPR
13177 && integer_onep (TREE_OPERAND (arg1, 0)))
13178 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13179 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13180 TREE_OPERAND (arg1, 1)),
13181 build_int_cst (TREE_TYPE (arg0), 0));
13183 if ((code == LT_EXPR || code == GE_EXPR)
13184 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13185 && CONVERT_EXPR_P (arg1)
13186 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13187 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13189 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13190 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13191 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13192 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13193 build_int_cst (TREE_TYPE (arg0), 0));
13198 case UNORDERED_EXPR:
13206 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13208 t1 = fold_relational_const (code, type, arg0, arg1);
13209 if (t1 != NULL_TREE)
13213 /* If the first operand is NaN, the result is constant. */
13214 if (TREE_CODE (arg0) == REAL_CST
13215 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13216 && (code != LTGT_EXPR || ! flag_trapping_math))
13218 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13219 ? integer_zero_node
13220 : integer_one_node;
13221 return omit_one_operand_loc (loc, type, t1, arg1);
13224 /* If the second operand is NaN, the result is constant. */
13225 if (TREE_CODE (arg1) == REAL_CST
13226 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13227 && (code != LTGT_EXPR || ! flag_trapping_math))
13229 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13230 ? integer_zero_node
13231 : integer_one_node;
13232 return omit_one_operand_loc (loc, type, t1, arg0);
13235 /* Simplify unordered comparison of something with itself. */
13236 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13237 && operand_equal_p (arg0, arg1, 0))
13238 return constant_boolean_node (1, type);
13240 if (code == LTGT_EXPR
13241 && !flag_trapping_math
13242 && operand_equal_p (arg0, arg1, 0))
13243 return constant_boolean_node (0, type);
13245 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13247 tree targ0 = strip_float_extensions (arg0);
13248 tree targ1 = strip_float_extensions (arg1);
13249 tree newtype = TREE_TYPE (targ0);
13251 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13252 newtype = TREE_TYPE (targ1);
13254 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13255 return fold_build2_loc (loc, code, type,
13256 fold_convert_loc (loc, newtype, targ0),
13257 fold_convert_loc (loc, newtype, targ1));
13262 case COMPOUND_EXPR:
13263 /* When pedantic, a compound expression can be neither an lvalue
13264 nor an integer constant expression. */
13265 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13267 /* Don't let (0, 0) be null pointer constant. */
13268 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13269 : fold_convert_loc (loc, type, arg1);
13270 return pedantic_non_lvalue_loc (loc, tem);
13273 if ((TREE_CODE (arg0) == REAL_CST
13274 && TREE_CODE (arg1) == REAL_CST)
13275 || (TREE_CODE (arg0) == INTEGER_CST
13276 && TREE_CODE (arg1) == INTEGER_CST))
13277 return build_complex (type, arg0, arg1);
13278 if (TREE_CODE (arg0) == REALPART_EXPR
13279 && TREE_CODE (arg1) == IMAGPART_EXPR
13280 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13281 && operand_equal_p (TREE_OPERAND (arg0, 0),
13282 TREE_OPERAND (arg1, 0), 0))
13283 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13284 TREE_OPERAND (arg1, 0));
13288 /* An ASSERT_EXPR should never be passed to fold_binary. */
13289 gcc_unreachable ();
13293 } /* switch (code) */
13296 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13297 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13301 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13303 switch (TREE_CODE (*tp))
13309 *walk_subtrees = 0;
13311 /* ... fall through ... */
13318 /* Return whether the sub-tree ST contains a label which is accessible from
13319 outside the sub-tree. */
13322 contains_label_p (tree st)
13325 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13328 /* Fold a ternary expression of code CODE and type TYPE with operands
13329 OP0, OP1, and OP2. Return the folded expression if folding is
13330 successful. Otherwise, return NULL_TREE. */
13333 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13334 tree op0, tree op1, tree op2)
13337 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13338 enum tree_code_class kind = TREE_CODE_CLASS (code);
13340 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13341 && TREE_CODE_LENGTH (code) == 3);
13343 /* Strip any conversions that don't change the mode. This is safe
13344 for every expression, except for a comparison expression because
13345 its signedness is derived from its operands. So, in the latter
13346 case, only strip conversions that don't change the signedness.
13348 Note that this is done as an internal manipulation within the
13349 constant folder, in order to find the simplest representation of
13350 the arguments so that their form can be studied. In any cases,
13351 the appropriate type conversions should be put back in the tree
13352 that will get out of the constant folder. */
13373 case COMPONENT_REF:
13374 if (TREE_CODE (arg0) == CONSTRUCTOR
13375 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13377 unsigned HOST_WIDE_INT idx;
13379 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13386 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13387 so all simple results must be passed through pedantic_non_lvalue. */
13388 if (TREE_CODE (arg0) == INTEGER_CST)
13390 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13391 tem = integer_zerop (arg0) ? op2 : op1;
13392 /* Only optimize constant conditions when the selected branch
13393 has the same type as the COND_EXPR. This avoids optimizing
13394 away "c ? x : throw", where the throw has a void type.
13395 Avoid throwing away that operand which contains label. */
13396 if ((!TREE_SIDE_EFFECTS (unused_op)
13397 || !contains_label_p (unused_op))
13398 && (! VOID_TYPE_P (TREE_TYPE (tem))
13399 || VOID_TYPE_P (type)))
13400 return pedantic_non_lvalue_loc (loc, tem);
13403 if (operand_equal_p (arg1, op2, 0))
13404 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13406 /* If we have A op B ? A : C, we may be able to convert this to a
13407 simpler expression, depending on the operation and the values
13408 of B and C. Signed zeros prevent all of these transformations,
13409 for reasons given above each one.
13411 Also try swapping the arguments and inverting the conditional. */
13412 if (COMPARISON_CLASS_P (arg0)
13413 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13414 arg1, TREE_OPERAND (arg0, 1))
13415 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13417 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13422 if (COMPARISON_CLASS_P (arg0)
13423 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13425 TREE_OPERAND (arg0, 1))
13426 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13428 location_t loc0 = expr_location_or (arg0, loc);
13429 tem = fold_truth_not_expr (loc0, arg0);
13430 if (tem && COMPARISON_CLASS_P (tem))
13432 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13438 /* If the second operand is simpler than the third, swap them
13439 since that produces better jump optimization results. */
13440 if (truth_value_p (TREE_CODE (arg0))
13441 && tree_swap_operands_p (op1, op2, false))
13443 location_t loc0 = expr_location_or (arg0, loc);
13444 /* See if this can be inverted. If it can't, possibly because
13445 it was a floating-point inequality comparison, don't do
13447 tem = fold_truth_not_expr (loc0, arg0);
13449 return fold_build3_loc (loc, code, type, tem, op2, op1);
13452 /* Convert A ? 1 : 0 to simply A. */
13453 if (integer_onep (op1)
13454 && integer_zerop (op2)
13455 /* If we try to convert OP0 to our type, the
13456 call to fold will try to move the conversion inside
13457 a COND, which will recurse. In that case, the COND_EXPR
13458 is probably the best choice, so leave it alone. */
13459 && type == TREE_TYPE (arg0))
13460 return pedantic_non_lvalue_loc (loc, arg0);
13462 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13463 over COND_EXPR in cases such as floating point comparisons. */
13464 if (integer_zerop (op1)
13465 && integer_onep (op2)
13466 && truth_value_p (TREE_CODE (arg0)))
13467 return pedantic_non_lvalue_loc (loc,
13468 fold_convert_loc (loc, type,
13469 invert_truthvalue_loc (loc,
13472 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13473 if (TREE_CODE (arg0) == LT_EXPR
13474 && integer_zerop (TREE_OPERAND (arg0, 1))
13475 && integer_zerop (op2)
13476 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13478 /* sign_bit_p only checks ARG1 bits within A's precision.
13479 If <sign bit of A> has wider type than A, bits outside
13480 of A's precision in <sign bit of A> need to be checked.
13481 If they are all 0, this optimization needs to be done
13482 in unsigned A's type, if they are all 1 in signed A's type,
13483 otherwise this can't be done. */
13484 if (TYPE_PRECISION (TREE_TYPE (tem))
13485 < TYPE_PRECISION (TREE_TYPE (arg1))
13486 && TYPE_PRECISION (TREE_TYPE (tem))
13487 < TYPE_PRECISION (type))
13489 unsigned HOST_WIDE_INT mask_lo;
13490 HOST_WIDE_INT mask_hi;
13491 int inner_width, outer_width;
13494 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13495 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13496 if (outer_width > TYPE_PRECISION (type))
13497 outer_width = TYPE_PRECISION (type);
13499 if (outer_width > HOST_BITS_PER_WIDE_INT)
13501 mask_hi = ((unsigned HOST_WIDE_INT) -1
13502 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13508 mask_lo = ((unsigned HOST_WIDE_INT) -1
13509 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13511 if (inner_width > HOST_BITS_PER_WIDE_INT)
13513 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13514 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13518 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13519 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13521 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13522 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13524 tem_type = signed_type_for (TREE_TYPE (tem));
13525 tem = fold_convert_loc (loc, tem_type, tem);
13527 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13528 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13530 tem_type = unsigned_type_for (TREE_TYPE (tem));
13531 tem = fold_convert_loc (loc, tem_type, tem);
13539 fold_convert_loc (loc, type,
13540 fold_build2_loc (loc, BIT_AND_EXPR,
13541 TREE_TYPE (tem), tem,
13542 fold_convert_loc (loc,
13547 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13548 already handled above. */
13549 if (TREE_CODE (arg0) == BIT_AND_EXPR
13550 && integer_onep (TREE_OPERAND (arg0, 1))
13551 && integer_zerop (op2)
13552 && integer_pow2p (arg1))
13554 tree tem = TREE_OPERAND (arg0, 0);
13556 if (TREE_CODE (tem) == RSHIFT_EXPR
13557 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13558 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13559 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13560 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13561 TREE_OPERAND (tem, 0), arg1);
13564 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13565 is probably obsolete because the first operand should be a
13566 truth value (that's why we have the two cases above), but let's
13567 leave it in until we can confirm this for all front-ends. */
13568 if (integer_zerop (op2)
13569 && TREE_CODE (arg0) == NE_EXPR
13570 && integer_zerop (TREE_OPERAND (arg0, 1))
13571 && integer_pow2p (arg1)
13572 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13573 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13574 arg1, OEP_ONLY_CONST))
13575 return pedantic_non_lvalue_loc (loc,
13576 fold_convert_loc (loc, type,
13577 TREE_OPERAND (arg0, 0)));
13579 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13580 if (integer_zerop (op2)
13581 && truth_value_p (TREE_CODE (arg0))
13582 && truth_value_p (TREE_CODE (arg1)))
13583 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13584 fold_convert_loc (loc, type, arg0),
13587 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13588 if (integer_onep (op2)
13589 && truth_value_p (TREE_CODE (arg0))
13590 && truth_value_p (TREE_CODE (arg1)))
13592 location_t loc0 = expr_location_or (arg0, loc);
13593 /* Only perform transformation if ARG0 is easily inverted. */
13594 tem = fold_truth_not_expr (loc0, arg0);
13596 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13597 fold_convert_loc (loc, type, tem),
13601 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13602 if (integer_zerop (arg1)
13603 && truth_value_p (TREE_CODE (arg0))
13604 && truth_value_p (TREE_CODE (op2)))
13606 location_t loc0 = expr_location_or (arg0, loc);
13607 /* Only perform transformation if ARG0 is easily inverted. */
13608 tem = fold_truth_not_expr (loc0, arg0);
13610 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13611 fold_convert_loc (loc, type, tem),
13615 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13616 if (integer_onep (arg1)
13617 && truth_value_p (TREE_CODE (arg0))
13618 && truth_value_p (TREE_CODE (op2)))
13619 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13620 fold_convert_loc (loc, type, arg0),
13626 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13627 of fold_ternary on them. */
13628 gcc_unreachable ();
13630 case BIT_FIELD_REF:
13631 if ((TREE_CODE (arg0) == VECTOR_CST
13632 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13633 && type == TREE_TYPE (TREE_TYPE (arg0)))
13635 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13636 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13639 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13640 && (idx % width) == 0
13641 && (idx = idx / width)
13642 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13644 tree elements = NULL_TREE;
13646 if (TREE_CODE (arg0) == VECTOR_CST)
13647 elements = TREE_VECTOR_CST_ELTS (arg0);
13650 unsigned HOST_WIDE_INT idx;
13653 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13654 elements = tree_cons (NULL_TREE, value, elements);
13656 while (idx-- > 0 && elements)
13657 elements = TREE_CHAIN (elements);
13659 return TREE_VALUE (elements);
13661 return build_zero_cst (type);
13665 /* A bit-field-ref that referenced the full argument can be stripped. */
13666 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13667 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13668 && integer_zerop (op2))
13669 return fold_convert_loc (loc, type, arg0);
13674 /* For integers we can decompose the FMA if possible. */
13675 if (TREE_CODE (arg0) == INTEGER_CST
13676 && TREE_CODE (arg1) == INTEGER_CST)
13677 return fold_build2_loc (loc, PLUS_EXPR, type,
13678 const_binop (MULT_EXPR, arg0, arg1), arg2);
13679 if (integer_zerop (arg2))
13680 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13682 return fold_fma (loc, type, arg0, arg1, arg2);
13686 } /* switch (code) */
13689 /* Perform constant folding and related simplification of EXPR.
13690 The related simplifications include x*1 => x, x*0 => 0, etc.,
13691 and application of the associative law.
13692 NOP_EXPR conversions may be removed freely (as long as we
13693 are careful not to change the type of the overall expression).
13694 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13695 but we can constant-fold them if they have constant operands. */
13697 #ifdef ENABLE_FOLD_CHECKING
13698 # define fold(x) fold_1 (x)
13699 static tree fold_1 (tree);
13705 const tree t = expr;
13706 enum tree_code code = TREE_CODE (t);
13707 enum tree_code_class kind = TREE_CODE_CLASS (code);
13709 location_t loc = EXPR_LOCATION (expr);
13711 /* Return right away if a constant. */
13712 if (kind == tcc_constant)
13715 /* CALL_EXPR-like objects with variable numbers of operands are
13716 treated specially. */
13717 if (kind == tcc_vl_exp)
13719 if (code == CALL_EXPR)
13721 tem = fold_call_expr (loc, expr, false);
13722 return tem ? tem : expr;
13727 if (IS_EXPR_CODE_CLASS (kind))
13729 tree type = TREE_TYPE (t);
13730 tree op0, op1, op2;
13732 switch (TREE_CODE_LENGTH (code))
13735 op0 = TREE_OPERAND (t, 0);
13736 tem = fold_unary_loc (loc, code, type, op0);
13737 return tem ? tem : expr;
13739 op0 = TREE_OPERAND (t, 0);
13740 op1 = TREE_OPERAND (t, 1);
13741 tem = fold_binary_loc (loc, code, type, op0, op1);
13742 return tem ? tem : expr;
13744 op0 = TREE_OPERAND (t, 0);
13745 op1 = TREE_OPERAND (t, 1);
13746 op2 = TREE_OPERAND (t, 2);
13747 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13748 return tem ? tem : expr;
13758 tree op0 = TREE_OPERAND (t, 0);
13759 tree op1 = TREE_OPERAND (t, 1);
13761 if (TREE_CODE (op1) == INTEGER_CST
13762 && TREE_CODE (op0) == CONSTRUCTOR
13763 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13765 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13766 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13767 unsigned HOST_WIDE_INT begin = 0;
13769 /* Find a matching index by means of a binary search. */
13770 while (begin != end)
13772 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13773 tree index = VEC_index (constructor_elt, elts, middle)->index;
13775 if (TREE_CODE (index) == INTEGER_CST
13776 && tree_int_cst_lt (index, op1))
13777 begin = middle + 1;
13778 else if (TREE_CODE (index) == INTEGER_CST
13779 && tree_int_cst_lt (op1, index))
13781 else if (TREE_CODE (index) == RANGE_EXPR
13782 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13783 begin = middle + 1;
13784 else if (TREE_CODE (index) == RANGE_EXPR
13785 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13788 return VEC_index (constructor_elt, elts, middle)->value;
13796 return fold (DECL_INITIAL (t));
13800 } /* switch (code) */
13803 #ifdef ENABLE_FOLD_CHECKING
13806 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13807 static void fold_check_failed (const_tree, const_tree);
13808 void print_fold_checksum (const_tree);
13810 /* When --enable-checking=fold, compute a digest of expr before
13811 and after actual fold call to see if fold did not accidentally
13812 change original expr. */
13818 struct md5_ctx ctx;
13819 unsigned char checksum_before[16], checksum_after[16];
13822 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13823 md5_init_ctx (&ctx);
13824 fold_checksum_tree (expr, &ctx, ht);
13825 md5_finish_ctx (&ctx, checksum_before);
13828 ret = fold_1 (expr);
13830 md5_init_ctx (&ctx);
13831 fold_checksum_tree (expr, &ctx, ht);
13832 md5_finish_ctx (&ctx, checksum_after);
13835 if (memcmp (checksum_before, checksum_after, 16))
13836 fold_check_failed (expr, ret);
13842 print_fold_checksum (const_tree expr)
13844 struct md5_ctx ctx;
13845 unsigned char checksum[16], cnt;
13848 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13849 md5_init_ctx (&ctx);
13850 fold_checksum_tree (expr, &ctx, ht);
13851 md5_finish_ctx (&ctx, checksum);
13853 for (cnt = 0; cnt < 16; ++cnt)
13854 fprintf (stderr, "%02x", checksum[cnt]);
13855 putc ('\n', stderr);
13859 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13861 internal_error ("fold check: original tree changed by fold");
13865 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13868 enum tree_code code;
13869 union tree_node buf;
13874 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13875 <= sizeof (struct tree_function_decl))
13876 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13879 slot = (void **) htab_find_slot (ht, expr, INSERT);
13882 *slot = CONST_CAST_TREE (expr);
13883 code = TREE_CODE (expr);
13884 if (TREE_CODE_CLASS (code) == tcc_declaration
13885 && DECL_ASSEMBLER_NAME_SET_P (expr))
13887 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13888 memcpy ((char *) &buf, expr, tree_size (expr));
13889 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13890 expr = (tree) &buf;
13892 else if (TREE_CODE_CLASS (code) == tcc_type
13893 && (TYPE_POINTER_TO (expr)
13894 || TYPE_REFERENCE_TO (expr)
13895 || TYPE_CACHED_VALUES_P (expr)
13896 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13897 || TYPE_NEXT_VARIANT (expr)))
13899 /* Allow these fields to be modified. */
13901 memcpy ((char *) &buf, expr, tree_size (expr));
13902 expr = tmp = (tree) &buf;
13903 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13904 TYPE_POINTER_TO (tmp) = NULL;
13905 TYPE_REFERENCE_TO (tmp) = NULL;
13906 TYPE_NEXT_VARIANT (tmp) = NULL;
13907 if (TYPE_CACHED_VALUES_P (tmp))
13909 TYPE_CACHED_VALUES_P (tmp) = 0;
13910 TYPE_CACHED_VALUES (tmp) = NULL;
13913 md5_process_bytes (expr, tree_size (expr), ctx);
13914 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13915 if (TREE_CODE_CLASS (code) != tcc_type
13916 && TREE_CODE_CLASS (code) != tcc_declaration
13917 && code != TREE_LIST
13918 && code != SSA_NAME
13919 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13920 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13921 switch (TREE_CODE_CLASS (code))
13927 md5_process_bytes (TREE_STRING_POINTER (expr),
13928 TREE_STRING_LENGTH (expr), ctx);
13931 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13932 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13935 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13941 case tcc_exceptional:
13945 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13946 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13947 expr = TREE_CHAIN (expr);
13948 goto recursive_label;
13951 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13952 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13958 case tcc_expression:
13959 case tcc_reference:
13960 case tcc_comparison:
13963 case tcc_statement:
13965 len = TREE_OPERAND_LENGTH (expr);
13966 for (i = 0; i < len; ++i)
13967 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13969 case tcc_declaration:
13970 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13971 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13972 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13974 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13975 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13976 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13977 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13978 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13980 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13981 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13983 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13985 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13986 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13987 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13991 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13992 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13993 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13994 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13995 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13996 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13997 if (INTEGRAL_TYPE_P (expr)
13998 || SCALAR_FLOAT_TYPE_P (expr))
14000 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14001 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14003 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14004 if (TREE_CODE (expr) == RECORD_TYPE
14005 || TREE_CODE (expr) == UNION_TYPE
14006 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14007 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14008 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14015 /* Helper function for outputting the checksum of a tree T. When
14016 debugging with gdb, you can "define mynext" to be "next" followed
14017 by "call debug_fold_checksum (op0)", then just trace down till the
14020 DEBUG_FUNCTION void
14021 debug_fold_checksum (const_tree t)
14024 unsigned char checksum[16];
14025 struct md5_ctx ctx;
14026 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14028 md5_init_ctx (&ctx);
14029 fold_checksum_tree (t, &ctx, ht);
14030 md5_finish_ctx (&ctx, checksum);
14033 for (i = 0; i < 16; i++)
14034 fprintf (stderr, "%d ", checksum[i]);
14036 fprintf (stderr, "\n");
14041 /* Fold a unary tree expression with code CODE of type TYPE with an
14042 operand OP0. LOC is the location of the resulting expression.
14043 Return a folded expression if successful. Otherwise, return a tree
14044 expression with code CODE of type TYPE with an operand OP0. */
14047 fold_build1_stat_loc (location_t loc,
14048 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14051 #ifdef ENABLE_FOLD_CHECKING
14052 unsigned char checksum_before[16], checksum_after[16];
14053 struct md5_ctx ctx;
14056 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14057 md5_init_ctx (&ctx);
14058 fold_checksum_tree (op0, &ctx, ht);
14059 md5_finish_ctx (&ctx, checksum_before);
14063 tem = fold_unary_loc (loc, code, type, op0);
14065 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14067 #ifdef ENABLE_FOLD_CHECKING
14068 md5_init_ctx (&ctx);
14069 fold_checksum_tree (op0, &ctx, ht);
14070 md5_finish_ctx (&ctx, checksum_after);
14073 if (memcmp (checksum_before, checksum_after, 16))
14074 fold_check_failed (op0, tem);
14079 /* Fold a binary tree expression with code CODE of type TYPE with
14080 operands OP0 and OP1. LOC is the location of the resulting
14081 expression. Return a folded expression if successful. Otherwise,
14082 return a tree expression with code CODE of type TYPE with operands
14086 fold_build2_stat_loc (location_t loc,
14087 enum tree_code code, tree type, tree op0, tree op1
14091 #ifdef ENABLE_FOLD_CHECKING
14092 unsigned char checksum_before_op0[16],
14093 checksum_before_op1[16],
14094 checksum_after_op0[16],
14095 checksum_after_op1[16];
14096 struct md5_ctx ctx;
14099 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14100 md5_init_ctx (&ctx);
14101 fold_checksum_tree (op0, &ctx, ht);
14102 md5_finish_ctx (&ctx, checksum_before_op0);
14105 md5_init_ctx (&ctx);
14106 fold_checksum_tree (op1, &ctx, ht);
14107 md5_finish_ctx (&ctx, checksum_before_op1);
14111 tem = fold_binary_loc (loc, code, type, op0, op1);
14113 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14115 #ifdef ENABLE_FOLD_CHECKING
14116 md5_init_ctx (&ctx);
14117 fold_checksum_tree (op0, &ctx, ht);
14118 md5_finish_ctx (&ctx, checksum_after_op0);
14121 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14122 fold_check_failed (op0, tem);
14124 md5_init_ctx (&ctx);
14125 fold_checksum_tree (op1, &ctx, ht);
14126 md5_finish_ctx (&ctx, checksum_after_op1);
14129 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14130 fold_check_failed (op1, tem);
14135 /* Fold a ternary tree expression with code CODE of type TYPE with
14136 operands OP0, OP1, and OP2. Return a folded expression if
14137 successful. Otherwise, return a tree expression with code CODE of
14138 type TYPE with operands OP0, OP1, and OP2. */
14141 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14142 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14145 #ifdef ENABLE_FOLD_CHECKING
14146 unsigned char checksum_before_op0[16],
14147 checksum_before_op1[16],
14148 checksum_before_op2[16],
14149 checksum_after_op0[16],
14150 checksum_after_op1[16],
14151 checksum_after_op2[16];
14152 struct md5_ctx ctx;
14155 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14156 md5_init_ctx (&ctx);
14157 fold_checksum_tree (op0, &ctx, ht);
14158 md5_finish_ctx (&ctx, checksum_before_op0);
14161 md5_init_ctx (&ctx);
14162 fold_checksum_tree (op1, &ctx, ht);
14163 md5_finish_ctx (&ctx, checksum_before_op1);
14166 md5_init_ctx (&ctx);
14167 fold_checksum_tree (op2, &ctx, ht);
14168 md5_finish_ctx (&ctx, checksum_before_op2);
14172 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14173 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14175 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14177 #ifdef ENABLE_FOLD_CHECKING
14178 md5_init_ctx (&ctx);
14179 fold_checksum_tree (op0, &ctx, ht);
14180 md5_finish_ctx (&ctx, checksum_after_op0);
14183 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14184 fold_check_failed (op0, tem);
14186 md5_init_ctx (&ctx);
14187 fold_checksum_tree (op1, &ctx, ht);
14188 md5_finish_ctx (&ctx, checksum_after_op1);
14191 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14192 fold_check_failed (op1, tem);
14194 md5_init_ctx (&ctx);
14195 fold_checksum_tree (op2, &ctx, ht);
14196 md5_finish_ctx (&ctx, checksum_after_op2);
14199 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14200 fold_check_failed (op2, tem);
14205 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14206 arguments in ARGARRAY, and a null static chain.
14207 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14208 of type TYPE from the given operands as constructed by build_call_array. */
14211 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14212 int nargs, tree *argarray)
14215 #ifdef ENABLE_FOLD_CHECKING
14216 unsigned char checksum_before_fn[16],
14217 checksum_before_arglist[16],
14218 checksum_after_fn[16],
14219 checksum_after_arglist[16];
14220 struct md5_ctx ctx;
14224 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14225 md5_init_ctx (&ctx);
14226 fold_checksum_tree (fn, &ctx, ht);
14227 md5_finish_ctx (&ctx, checksum_before_fn);
14230 md5_init_ctx (&ctx);
14231 for (i = 0; i < nargs; i++)
14232 fold_checksum_tree (argarray[i], &ctx, ht);
14233 md5_finish_ctx (&ctx, checksum_before_arglist);
14237 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14239 #ifdef ENABLE_FOLD_CHECKING
14240 md5_init_ctx (&ctx);
14241 fold_checksum_tree (fn, &ctx, ht);
14242 md5_finish_ctx (&ctx, checksum_after_fn);
14245 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14246 fold_check_failed (fn, tem);
14248 md5_init_ctx (&ctx);
14249 for (i = 0; i < nargs; i++)
14250 fold_checksum_tree (argarray[i], &ctx, ht);
14251 md5_finish_ctx (&ctx, checksum_after_arglist);
14254 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14255 fold_check_failed (NULL_TREE, tem);
14260 /* Perform constant folding and related simplification of initializer
14261 expression EXPR. These behave identically to "fold_buildN" but ignore
14262 potential run-time traps and exceptions that fold must preserve. */
14264 #define START_FOLD_INIT \
14265 int saved_signaling_nans = flag_signaling_nans;\
14266 int saved_trapping_math = flag_trapping_math;\
14267 int saved_rounding_math = flag_rounding_math;\
14268 int saved_trapv = flag_trapv;\
14269 int saved_folding_initializer = folding_initializer;\
14270 flag_signaling_nans = 0;\
14271 flag_trapping_math = 0;\
14272 flag_rounding_math = 0;\
14274 folding_initializer = 1;
14276 #define END_FOLD_INIT \
14277 flag_signaling_nans = saved_signaling_nans;\
14278 flag_trapping_math = saved_trapping_math;\
14279 flag_rounding_math = saved_rounding_math;\
14280 flag_trapv = saved_trapv;\
14281 folding_initializer = saved_folding_initializer;
14284 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14285 tree type, tree op)
14290 result = fold_build1_loc (loc, code, type, op);
14297 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14298 tree type, tree op0, tree op1)
14303 result = fold_build2_loc (loc, code, type, op0, op1);
14310 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14311 tree type, tree op0, tree op1, tree op2)
14316 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14323 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14324 int nargs, tree *argarray)
14329 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14335 #undef START_FOLD_INIT
14336 #undef END_FOLD_INIT
14338 /* Determine if first argument is a multiple of second argument. Return 0 if
14339 it is not, or we cannot easily determined it to be.
14341 An example of the sort of thing we care about (at this point; this routine
14342 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14343 fold cases do now) is discovering that
14345 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14351 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14353 This code also handles discovering that
14355 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14357 is a multiple of 8 so we don't have to worry about dealing with a
14358 possible remainder.
14360 Note that we *look* inside a SAVE_EXPR only to determine how it was
14361 calculated; it is not safe for fold to do much of anything else with the
14362 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14363 at run time. For example, the latter example above *cannot* be implemented
14364 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14365 evaluation time of the original SAVE_EXPR is not necessarily the same at
14366 the time the new expression is evaluated. The only optimization of this
14367 sort that would be valid is changing
14369 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14373 SAVE_EXPR (I) * SAVE_EXPR (J)
14375 (where the same SAVE_EXPR (J) is used in the original and the
14376 transformed version). */
14379 multiple_of_p (tree type, const_tree top, const_tree bottom)
14381 if (operand_equal_p (top, bottom, 0))
14384 if (TREE_CODE (type) != INTEGER_TYPE)
14387 switch (TREE_CODE (top))
14390 /* Bitwise and provides a power of two multiple. If the mask is
14391 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14392 if (!integer_pow2p (bottom))
14397 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14398 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14402 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14403 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14406 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14410 op1 = TREE_OPERAND (top, 1);
14411 /* const_binop may not detect overflow correctly,
14412 so check for it explicitly here. */
14413 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14414 > TREE_INT_CST_LOW (op1)
14415 && TREE_INT_CST_HIGH (op1) == 0
14416 && 0 != (t1 = fold_convert (type,
14417 const_binop (LSHIFT_EXPR,
14420 && !TREE_OVERFLOW (t1))
14421 return multiple_of_p (type, t1, bottom);
14426 /* Can't handle conversions from non-integral or wider integral type. */
14427 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14428 || (TYPE_PRECISION (type)
14429 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14432 /* .. fall through ... */
14435 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14438 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14439 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14442 if (TREE_CODE (bottom) != INTEGER_CST
14443 || integer_zerop (bottom)
14444 || (TYPE_UNSIGNED (type)
14445 && (tree_int_cst_sgn (top) < 0
14446 || tree_int_cst_sgn (bottom) < 0)))
14448 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14456 /* Return true if CODE or TYPE is known to be non-negative. */
14459 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14461 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14462 && truth_value_p (code))
14463 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14464 have a signed:1 type (where the value is -1 and 0). */
14469 /* Return true if (CODE OP0) is known to be non-negative. If the return
14470 value is based on the assumption that signed overflow is undefined,
14471 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14472 *STRICT_OVERFLOW_P. */
14475 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14476 bool *strict_overflow_p)
14478 if (TYPE_UNSIGNED (type))
14484 /* We can't return 1 if flag_wrapv is set because
14485 ABS_EXPR<INT_MIN> = INT_MIN. */
14486 if (!INTEGRAL_TYPE_P (type))
14488 if (TYPE_OVERFLOW_UNDEFINED (type))
14490 *strict_overflow_p = true;
14495 case NON_LVALUE_EXPR:
14497 case FIX_TRUNC_EXPR:
14498 return tree_expr_nonnegative_warnv_p (op0,
14499 strict_overflow_p);
14503 tree inner_type = TREE_TYPE (op0);
14504 tree outer_type = type;
14506 if (TREE_CODE (outer_type) == REAL_TYPE)
14508 if (TREE_CODE (inner_type) == REAL_TYPE)
14509 return tree_expr_nonnegative_warnv_p (op0,
14510 strict_overflow_p);
14511 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14513 if (TYPE_UNSIGNED (inner_type))
14515 return tree_expr_nonnegative_warnv_p (op0,
14516 strict_overflow_p);
14519 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14521 if (TREE_CODE (inner_type) == REAL_TYPE)
14522 return tree_expr_nonnegative_warnv_p (op0,
14523 strict_overflow_p);
14524 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14525 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14526 && TYPE_UNSIGNED (inner_type);
14532 return tree_simple_nonnegative_warnv_p (code, type);
14535 /* We don't know sign of `t', so be conservative and return false. */
14539 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14540 value is based on the assumption that signed overflow is undefined,
14541 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14542 *STRICT_OVERFLOW_P. */
14545 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14546 tree op1, bool *strict_overflow_p)
14548 if (TYPE_UNSIGNED (type))
14553 case POINTER_PLUS_EXPR:
14555 if (FLOAT_TYPE_P (type))
14556 return (tree_expr_nonnegative_warnv_p (op0,
14558 && tree_expr_nonnegative_warnv_p (op1,
14559 strict_overflow_p));
14561 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14562 both unsigned and at least 2 bits shorter than the result. */
14563 if (TREE_CODE (type) == INTEGER_TYPE
14564 && TREE_CODE (op0) == NOP_EXPR
14565 && TREE_CODE (op1) == NOP_EXPR)
14567 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14568 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14569 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14570 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14572 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14573 TYPE_PRECISION (inner2)) + 1;
14574 return prec < TYPE_PRECISION (type);
14580 if (FLOAT_TYPE_P (type))
14582 /* x * x for floating point x is always non-negative. */
14583 if (operand_equal_p (op0, op1, 0))
14585 return (tree_expr_nonnegative_warnv_p (op0,
14587 && tree_expr_nonnegative_warnv_p (op1,
14588 strict_overflow_p));
14591 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14592 both unsigned and their total bits is shorter than the result. */
14593 if (TREE_CODE (type) == INTEGER_TYPE
14594 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14595 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14597 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14598 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14600 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14601 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14604 bool unsigned0 = TYPE_UNSIGNED (inner0);
14605 bool unsigned1 = TYPE_UNSIGNED (inner1);
14607 if (TREE_CODE (op0) == INTEGER_CST)
14608 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14610 if (TREE_CODE (op1) == INTEGER_CST)
14611 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14613 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14614 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14616 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14617 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14618 : TYPE_PRECISION (inner0);
14620 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14621 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14622 : TYPE_PRECISION (inner1);
14624 return precision0 + precision1 < TYPE_PRECISION (type);
14631 return (tree_expr_nonnegative_warnv_p (op0,
14633 || tree_expr_nonnegative_warnv_p (op1,
14634 strict_overflow_p));
14640 case TRUNC_DIV_EXPR:
14641 case CEIL_DIV_EXPR:
14642 case FLOOR_DIV_EXPR:
14643 case ROUND_DIV_EXPR:
14644 return (tree_expr_nonnegative_warnv_p (op0,
14646 && tree_expr_nonnegative_warnv_p (op1,
14647 strict_overflow_p));
14649 case TRUNC_MOD_EXPR:
14650 case CEIL_MOD_EXPR:
14651 case FLOOR_MOD_EXPR:
14652 case ROUND_MOD_EXPR:
14653 return tree_expr_nonnegative_warnv_p (op0,
14654 strict_overflow_p);
14656 return tree_simple_nonnegative_warnv_p (code, type);
14659 /* We don't know sign of `t', so be conservative and return false. */
14663 /* Return true if T is known to be non-negative. If the return
14664 value is based on the assumption that signed overflow is undefined,
14665 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14666 *STRICT_OVERFLOW_P. */
14669 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14671 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14674 switch (TREE_CODE (t))
14677 return tree_int_cst_sgn (t) >= 0;
14680 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14683 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14686 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14688 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14689 strict_overflow_p));
14691 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14694 /* We don't know sign of `t', so be conservative and return false. */
14698 /* Return true if T is known to be non-negative. If the return
14699 value is based on the assumption that signed overflow is undefined,
14700 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14701 *STRICT_OVERFLOW_P. */
14704 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14705 tree arg0, tree arg1, bool *strict_overflow_p)
14707 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14708 switch (DECL_FUNCTION_CODE (fndecl))
14710 CASE_FLT_FN (BUILT_IN_ACOS):
14711 CASE_FLT_FN (BUILT_IN_ACOSH):
14712 CASE_FLT_FN (BUILT_IN_CABS):
14713 CASE_FLT_FN (BUILT_IN_COSH):
14714 CASE_FLT_FN (BUILT_IN_ERFC):
14715 CASE_FLT_FN (BUILT_IN_EXP):
14716 CASE_FLT_FN (BUILT_IN_EXP10):
14717 CASE_FLT_FN (BUILT_IN_EXP2):
14718 CASE_FLT_FN (BUILT_IN_FABS):
14719 CASE_FLT_FN (BUILT_IN_FDIM):
14720 CASE_FLT_FN (BUILT_IN_HYPOT):
14721 CASE_FLT_FN (BUILT_IN_POW10):
14722 CASE_INT_FN (BUILT_IN_FFS):
14723 CASE_INT_FN (BUILT_IN_PARITY):
14724 CASE_INT_FN (BUILT_IN_POPCOUNT):
14725 case BUILT_IN_BSWAP32:
14726 case BUILT_IN_BSWAP64:
14730 CASE_FLT_FN (BUILT_IN_SQRT):
14731 /* sqrt(-0.0) is -0.0. */
14732 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14734 return tree_expr_nonnegative_warnv_p (arg0,
14735 strict_overflow_p);
14737 CASE_FLT_FN (BUILT_IN_ASINH):
14738 CASE_FLT_FN (BUILT_IN_ATAN):
14739 CASE_FLT_FN (BUILT_IN_ATANH):
14740 CASE_FLT_FN (BUILT_IN_CBRT):
14741 CASE_FLT_FN (BUILT_IN_CEIL):
14742 CASE_FLT_FN (BUILT_IN_ERF):
14743 CASE_FLT_FN (BUILT_IN_EXPM1):
14744 CASE_FLT_FN (BUILT_IN_FLOOR):
14745 CASE_FLT_FN (BUILT_IN_FMOD):
14746 CASE_FLT_FN (BUILT_IN_FREXP):
14747 CASE_FLT_FN (BUILT_IN_ICEIL):
14748 CASE_FLT_FN (BUILT_IN_IFLOOR):
14749 CASE_FLT_FN (BUILT_IN_IRINT):
14750 CASE_FLT_FN (BUILT_IN_IROUND):
14751 CASE_FLT_FN (BUILT_IN_LCEIL):
14752 CASE_FLT_FN (BUILT_IN_LDEXP):
14753 CASE_FLT_FN (BUILT_IN_LFLOOR):
14754 CASE_FLT_FN (BUILT_IN_LLCEIL):
14755 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14756 CASE_FLT_FN (BUILT_IN_LLRINT):
14757 CASE_FLT_FN (BUILT_IN_LLROUND):
14758 CASE_FLT_FN (BUILT_IN_LRINT):
14759 CASE_FLT_FN (BUILT_IN_LROUND):
14760 CASE_FLT_FN (BUILT_IN_MODF):
14761 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14762 CASE_FLT_FN (BUILT_IN_RINT):
14763 CASE_FLT_FN (BUILT_IN_ROUND):
14764 CASE_FLT_FN (BUILT_IN_SCALB):
14765 CASE_FLT_FN (BUILT_IN_SCALBLN):
14766 CASE_FLT_FN (BUILT_IN_SCALBN):
14767 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14768 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14769 CASE_FLT_FN (BUILT_IN_SINH):
14770 CASE_FLT_FN (BUILT_IN_TANH):
14771 CASE_FLT_FN (BUILT_IN_TRUNC):
14772 /* True if the 1st argument is nonnegative. */
14773 return tree_expr_nonnegative_warnv_p (arg0,
14774 strict_overflow_p);
14776 CASE_FLT_FN (BUILT_IN_FMAX):
14777 /* True if the 1st OR 2nd arguments are nonnegative. */
14778 return (tree_expr_nonnegative_warnv_p (arg0,
14780 || (tree_expr_nonnegative_warnv_p (arg1,
14781 strict_overflow_p)));
14783 CASE_FLT_FN (BUILT_IN_FMIN):
14784 /* True if the 1st AND 2nd arguments are nonnegative. */
14785 return (tree_expr_nonnegative_warnv_p (arg0,
14787 && (tree_expr_nonnegative_warnv_p (arg1,
14788 strict_overflow_p)));
14790 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14791 /* True if the 2nd argument is nonnegative. */
14792 return tree_expr_nonnegative_warnv_p (arg1,
14793 strict_overflow_p);
14795 CASE_FLT_FN (BUILT_IN_POWI):
14796 /* True if the 1st argument is nonnegative or the second
14797 argument is an even integer. */
14798 if (TREE_CODE (arg1) == INTEGER_CST
14799 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14801 return tree_expr_nonnegative_warnv_p (arg0,
14802 strict_overflow_p);
14804 CASE_FLT_FN (BUILT_IN_POW):
14805 /* True if the 1st argument is nonnegative or the second
14806 argument is an even integer valued real. */
14807 if (TREE_CODE (arg1) == REAL_CST)
14812 c = TREE_REAL_CST (arg1);
14813 n = real_to_integer (&c);
14816 REAL_VALUE_TYPE cint;
14817 real_from_integer (&cint, VOIDmode, n,
14818 n < 0 ? -1 : 0, 0);
14819 if (real_identical (&c, &cint))
14823 return tree_expr_nonnegative_warnv_p (arg0,
14824 strict_overflow_p);
14829 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14833 /* Return true if T is known to be non-negative. If the return
14834 value is based on the assumption that signed overflow is undefined,
14835 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14836 *STRICT_OVERFLOW_P. */
14839 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14841 enum tree_code code = TREE_CODE (t);
14842 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14849 tree temp = TARGET_EXPR_SLOT (t);
14850 t = TARGET_EXPR_INITIAL (t);
14852 /* If the initializer is non-void, then it's a normal expression
14853 that will be assigned to the slot. */
14854 if (!VOID_TYPE_P (t))
14855 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14857 /* Otherwise, the initializer sets the slot in some way. One common
14858 way is an assignment statement at the end of the initializer. */
14861 if (TREE_CODE (t) == BIND_EXPR)
14862 t = expr_last (BIND_EXPR_BODY (t));
14863 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14864 || TREE_CODE (t) == TRY_CATCH_EXPR)
14865 t = expr_last (TREE_OPERAND (t, 0));
14866 else if (TREE_CODE (t) == STATEMENT_LIST)
14871 if (TREE_CODE (t) == MODIFY_EXPR
14872 && TREE_OPERAND (t, 0) == temp)
14873 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14874 strict_overflow_p);
14881 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14882 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14884 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14885 get_callee_fndecl (t),
14888 strict_overflow_p);
14890 case COMPOUND_EXPR:
14892 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14893 strict_overflow_p);
14895 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14896 strict_overflow_p);
14898 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14899 strict_overflow_p);
14902 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14906 /* We don't know sign of `t', so be conservative and return false. */
14910 /* Return true if T is known to be non-negative. If the return
14911 value is based on the assumption that signed overflow is undefined,
14912 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14913 *STRICT_OVERFLOW_P. */
14916 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14918 enum tree_code code;
14919 if (t == error_mark_node)
14922 code = TREE_CODE (t);
14923 switch (TREE_CODE_CLASS (code))
14926 case tcc_comparison:
14927 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14929 TREE_OPERAND (t, 0),
14930 TREE_OPERAND (t, 1),
14931 strict_overflow_p);
14934 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14936 TREE_OPERAND (t, 0),
14937 strict_overflow_p);
14940 case tcc_declaration:
14941 case tcc_reference:
14942 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14950 case TRUTH_AND_EXPR:
14951 case TRUTH_OR_EXPR:
14952 case TRUTH_XOR_EXPR:
14953 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14955 TREE_OPERAND (t, 0),
14956 TREE_OPERAND (t, 1),
14957 strict_overflow_p);
14958 case TRUTH_NOT_EXPR:
14959 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14961 TREE_OPERAND (t, 0),
14962 strict_overflow_p);
14969 case WITH_SIZE_EXPR:
14971 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14974 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14978 /* Return true if `t' is known to be non-negative. Handle warnings
14979 about undefined signed overflow. */
14982 tree_expr_nonnegative_p (tree t)
14984 bool ret, strict_overflow_p;
14986 strict_overflow_p = false;
14987 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14988 if (strict_overflow_p)
14989 fold_overflow_warning (("assuming signed overflow does not occur when "
14990 "determining that expression is always "
14992 WARN_STRICT_OVERFLOW_MISC);
14997 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14998 For floating point we further ensure that T is not denormal.
14999 Similar logic is present in nonzero_address in rtlanal.h.
15001 If the return value is based on the assumption that signed overflow
15002 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15003 change *STRICT_OVERFLOW_P. */
15006 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15007 bool *strict_overflow_p)
15012 return tree_expr_nonzero_warnv_p (op0,
15013 strict_overflow_p);
15017 tree inner_type = TREE_TYPE (op0);
15018 tree outer_type = type;
15020 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15021 && tree_expr_nonzero_warnv_p (op0,
15022 strict_overflow_p));
15026 case NON_LVALUE_EXPR:
15027 return tree_expr_nonzero_warnv_p (op0,
15028 strict_overflow_p);
15037 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15038 For floating point we further ensure that T is not denormal.
15039 Similar logic is present in nonzero_address in rtlanal.h.
15041 If the return value is based on the assumption that signed overflow
15042 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15043 change *STRICT_OVERFLOW_P. */
15046 tree_binary_nonzero_warnv_p (enum tree_code code,
15049 tree op1, bool *strict_overflow_p)
15051 bool sub_strict_overflow_p;
15054 case POINTER_PLUS_EXPR:
15056 if (TYPE_OVERFLOW_UNDEFINED (type))
15058 /* With the presence of negative values it is hard
15059 to say something. */
15060 sub_strict_overflow_p = false;
15061 if (!tree_expr_nonnegative_warnv_p (op0,
15062 &sub_strict_overflow_p)
15063 || !tree_expr_nonnegative_warnv_p (op1,
15064 &sub_strict_overflow_p))
15066 /* One of operands must be positive and the other non-negative. */
15067 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15068 overflows, on a twos-complement machine the sum of two
15069 nonnegative numbers can never be zero. */
15070 return (tree_expr_nonzero_warnv_p (op0,
15072 || tree_expr_nonzero_warnv_p (op1,
15073 strict_overflow_p));
15078 if (TYPE_OVERFLOW_UNDEFINED (type))
15080 if (tree_expr_nonzero_warnv_p (op0,
15082 && tree_expr_nonzero_warnv_p (op1,
15083 strict_overflow_p))
15085 *strict_overflow_p = true;
15092 sub_strict_overflow_p = false;
15093 if (tree_expr_nonzero_warnv_p (op0,
15094 &sub_strict_overflow_p)
15095 && tree_expr_nonzero_warnv_p (op1,
15096 &sub_strict_overflow_p))
15098 if (sub_strict_overflow_p)
15099 *strict_overflow_p = true;
15104 sub_strict_overflow_p = false;
15105 if (tree_expr_nonzero_warnv_p (op0,
15106 &sub_strict_overflow_p))
15108 if (sub_strict_overflow_p)
15109 *strict_overflow_p = true;
15111 /* When both operands are nonzero, then MAX must be too. */
15112 if (tree_expr_nonzero_warnv_p (op1,
15113 strict_overflow_p))
15116 /* MAX where operand 0 is positive is positive. */
15117 return tree_expr_nonnegative_warnv_p (op0,
15118 strict_overflow_p);
15120 /* MAX where operand 1 is positive is positive. */
15121 else if (tree_expr_nonzero_warnv_p (op1,
15122 &sub_strict_overflow_p)
15123 && tree_expr_nonnegative_warnv_p (op1,
15124 &sub_strict_overflow_p))
15126 if (sub_strict_overflow_p)
15127 *strict_overflow_p = true;
15133 return (tree_expr_nonzero_warnv_p (op1,
15135 || tree_expr_nonzero_warnv_p (op0,
15136 strict_overflow_p));
15145 /* Return true when T is an address and is known to be nonzero.
15146 For floating point we further ensure that T is not denormal.
15147 Similar logic is present in nonzero_address in rtlanal.h.
15149 If the return value is based on the assumption that signed overflow
15150 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15151 change *STRICT_OVERFLOW_P. */
15154 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15156 bool sub_strict_overflow_p;
15157 switch (TREE_CODE (t))
15160 return !integer_zerop (t);
15164 tree base = TREE_OPERAND (t, 0);
15165 if (!DECL_P (base))
15166 base = get_base_address (base);
15171 /* Weak declarations may link to NULL. Other things may also be NULL
15172 so protect with -fdelete-null-pointer-checks; but not variables
15173 allocated on the stack. */
15175 && (flag_delete_null_pointer_checks
15176 || (DECL_CONTEXT (base)
15177 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15178 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15179 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15181 /* Constants are never weak. */
15182 if (CONSTANT_CLASS_P (base))
15189 sub_strict_overflow_p = false;
15190 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15191 &sub_strict_overflow_p)
15192 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15193 &sub_strict_overflow_p))
15195 if (sub_strict_overflow_p)
15196 *strict_overflow_p = true;
15207 /* Return true when T is an address and is known to be nonzero.
15208 For floating point we further ensure that T is not denormal.
15209 Similar logic is present in nonzero_address in rtlanal.h.
15211 If the return value is based on the assumption that signed overflow
15212 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15213 change *STRICT_OVERFLOW_P. */
15216 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15218 tree type = TREE_TYPE (t);
15219 enum tree_code code;
15221 /* Doing something useful for floating point would need more work. */
15222 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15225 code = TREE_CODE (t);
15226 switch (TREE_CODE_CLASS (code))
15229 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15230 strict_overflow_p);
15232 case tcc_comparison:
15233 return tree_binary_nonzero_warnv_p (code, type,
15234 TREE_OPERAND (t, 0),
15235 TREE_OPERAND (t, 1),
15236 strict_overflow_p);
15238 case tcc_declaration:
15239 case tcc_reference:
15240 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15248 case TRUTH_NOT_EXPR:
15249 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15250 strict_overflow_p);
15252 case TRUTH_AND_EXPR:
15253 case TRUTH_OR_EXPR:
15254 case TRUTH_XOR_EXPR:
15255 return tree_binary_nonzero_warnv_p (code, type,
15256 TREE_OPERAND (t, 0),
15257 TREE_OPERAND (t, 1),
15258 strict_overflow_p);
15265 case WITH_SIZE_EXPR:
15267 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15269 case COMPOUND_EXPR:
15272 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15273 strict_overflow_p);
15276 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15277 strict_overflow_p);
15280 return alloca_call_p (t);
15288 /* Return true when T is an address and is known to be nonzero.
15289 Handle warnings about undefined signed overflow. */
15292 tree_expr_nonzero_p (tree t)
15294 bool ret, strict_overflow_p;
15296 strict_overflow_p = false;
15297 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15298 if (strict_overflow_p)
15299 fold_overflow_warning (("assuming signed overflow does not occur when "
15300 "determining that expression is always "
15302 WARN_STRICT_OVERFLOW_MISC);
15306 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15307 attempt to fold the expression to a constant without modifying TYPE,
15310 If the expression could be simplified to a constant, then return
15311 the constant. If the expression would not be simplified to a
15312 constant, then return NULL_TREE. */
15315 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15317 tree tem = fold_binary (code, type, op0, op1);
15318 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15321 /* Given the components of a unary expression CODE, TYPE and OP0,
15322 attempt to fold the expression to a constant without modifying
15325 If the expression could be simplified to a constant, then return
15326 the constant. If the expression would not be simplified to a
15327 constant, then return NULL_TREE. */
15330 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15332 tree tem = fold_unary (code, type, op0);
15333 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15336 /* If EXP represents referencing an element in a constant string
15337 (either via pointer arithmetic or array indexing), return the
15338 tree representing the value accessed, otherwise return NULL. */
15341 fold_read_from_constant_string (tree exp)
15343 if ((TREE_CODE (exp) == INDIRECT_REF
15344 || TREE_CODE (exp) == ARRAY_REF)
15345 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15347 tree exp1 = TREE_OPERAND (exp, 0);
15350 location_t loc = EXPR_LOCATION (exp);
15352 if (TREE_CODE (exp) == INDIRECT_REF)
15353 string = string_constant (exp1, &index);
15356 tree low_bound = array_ref_low_bound (exp);
15357 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15359 /* Optimize the special-case of a zero lower bound.
15361 We convert the low_bound to sizetype to avoid some problems
15362 with constant folding. (E.g. suppose the lower bound is 1,
15363 and its mode is QI. Without the conversion,l (ARRAY
15364 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15365 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15366 if (! integer_zerop (low_bound))
15367 index = size_diffop_loc (loc, index,
15368 fold_convert_loc (loc, sizetype, low_bound));
15374 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15375 && TREE_CODE (string) == STRING_CST
15376 && TREE_CODE (index) == INTEGER_CST
15377 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15378 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15380 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15381 return build_int_cst_type (TREE_TYPE (exp),
15382 (TREE_STRING_POINTER (string)
15383 [TREE_INT_CST_LOW (index)]));
15388 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15389 an integer constant, real, or fixed-point constant.
15391 TYPE is the type of the result. */
15394 fold_negate_const (tree arg0, tree type)
15396 tree t = NULL_TREE;
15398 switch (TREE_CODE (arg0))
15402 double_int val = tree_to_double_int (arg0);
15403 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15405 t = force_fit_type_double (type, val, 1,
15406 (overflow | TREE_OVERFLOW (arg0))
15407 && !TYPE_UNSIGNED (type));
15412 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15417 FIXED_VALUE_TYPE f;
15418 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15419 &(TREE_FIXED_CST (arg0)), NULL,
15420 TYPE_SATURATING (type));
15421 t = build_fixed (type, f);
15422 /* Propagate overflow flags. */
15423 if (overflow_p | TREE_OVERFLOW (arg0))
15424 TREE_OVERFLOW (t) = 1;
15429 gcc_unreachable ();
15435 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15436 an integer constant or real constant.
15438 TYPE is the type of the result. */
15441 fold_abs_const (tree arg0, tree type)
15443 tree t = NULL_TREE;
15445 switch (TREE_CODE (arg0))
15449 double_int val = tree_to_double_int (arg0);
15451 /* If the value is unsigned or non-negative, then the absolute value
15452 is the same as the ordinary value. */
15453 if (TYPE_UNSIGNED (type)
15454 || !double_int_negative_p (val))
15457 /* If the value is negative, then the absolute value is
15463 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15464 t = force_fit_type_double (type, val, -1,
15465 overflow | TREE_OVERFLOW (arg0));
15471 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15472 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15478 gcc_unreachable ();
15484 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15485 constant. TYPE is the type of the result. */
15488 fold_not_const (const_tree arg0, tree type)
15492 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15494 val = double_int_not (tree_to_double_int (arg0));
15495 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15498 /* Given CODE, a relational operator, the target type, TYPE and two
15499 constant operands OP0 and OP1, return the result of the
15500 relational operation. If the result is not a compile time
15501 constant, then return NULL_TREE. */
15504 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15506 int result, invert;
15508 /* From here on, the only cases we handle are when the result is
15509 known to be a constant. */
15511 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15513 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15514 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15516 /* Handle the cases where either operand is a NaN. */
15517 if (real_isnan (c0) || real_isnan (c1))
15527 case UNORDERED_EXPR:
15541 if (flag_trapping_math)
15547 gcc_unreachable ();
15550 return constant_boolean_node (result, type);
15553 return constant_boolean_node (real_compare (code, c0, c1), type);
15556 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15558 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15559 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15560 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15563 /* Handle equality/inequality of complex constants. */
15564 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15566 tree rcond = fold_relational_const (code, type,
15567 TREE_REALPART (op0),
15568 TREE_REALPART (op1));
15569 tree icond = fold_relational_const (code, type,
15570 TREE_IMAGPART (op0),
15571 TREE_IMAGPART (op1));
15572 if (code == EQ_EXPR)
15573 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15574 else if (code == NE_EXPR)
15575 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15580 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15582 To compute GT, swap the arguments and do LT.
15583 To compute GE, do LT and invert the result.
15584 To compute LE, swap the arguments, do LT and invert the result.
15585 To compute NE, do EQ and invert the result.
15587 Therefore, the code below must handle only EQ and LT. */
15589 if (code == LE_EXPR || code == GT_EXPR)
15594 code = swap_tree_comparison (code);
15597 /* Note that it is safe to invert for real values here because we
15598 have already handled the one case that it matters. */
15601 if (code == NE_EXPR || code == GE_EXPR)
15604 code = invert_tree_comparison (code, false);
15607 /* Compute a result for LT or EQ if args permit;
15608 Otherwise return T. */
15609 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15611 if (code == EQ_EXPR)
15612 result = tree_int_cst_equal (op0, op1);
15613 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15614 result = INT_CST_LT_UNSIGNED (op0, op1);
15616 result = INT_CST_LT (op0, op1);
15623 return constant_boolean_node (result, type);
15626 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15627 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15631 fold_build_cleanup_point_expr (tree type, tree expr)
15633 /* If the expression does not have side effects then we don't have to wrap
15634 it with a cleanup point expression. */
15635 if (!TREE_SIDE_EFFECTS (expr))
15638 /* If the expression is a return, check to see if the expression inside the
15639 return has no side effects or the right hand side of the modify expression
15640 inside the return. If either don't have side effects set we don't need to
15641 wrap the expression in a cleanup point expression. Note we don't check the
15642 left hand side of the modify because it should always be a return decl. */
15643 if (TREE_CODE (expr) == RETURN_EXPR)
15645 tree op = TREE_OPERAND (expr, 0);
15646 if (!op || !TREE_SIDE_EFFECTS (op))
15648 op = TREE_OPERAND (op, 1);
15649 if (!TREE_SIDE_EFFECTS (op))
15653 return build1 (CLEANUP_POINT_EXPR, type, expr);
15656 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15657 of an indirection through OP0, or NULL_TREE if no simplification is
15661 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15667 subtype = TREE_TYPE (sub);
15668 if (!POINTER_TYPE_P (subtype))
15671 if (TREE_CODE (sub) == ADDR_EXPR)
15673 tree op = TREE_OPERAND (sub, 0);
15674 tree optype = TREE_TYPE (op);
15675 /* *&CONST_DECL -> to the value of the const decl. */
15676 if (TREE_CODE (op) == CONST_DECL)
15677 return DECL_INITIAL (op);
15678 /* *&p => p; make sure to handle *&"str"[cst] here. */
15679 if (type == optype)
15681 tree fop = fold_read_from_constant_string (op);
15687 /* *(foo *)&fooarray => fooarray[0] */
15688 else if (TREE_CODE (optype) == ARRAY_TYPE
15689 && type == TREE_TYPE (optype)
15690 && (!in_gimple_form
15691 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15693 tree type_domain = TYPE_DOMAIN (optype);
15694 tree min_val = size_zero_node;
15695 if (type_domain && TYPE_MIN_VALUE (type_domain))
15696 min_val = TYPE_MIN_VALUE (type_domain);
15698 && TREE_CODE (min_val) != INTEGER_CST)
15700 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15701 NULL_TREE, NULL_TREE);
15703 /* *(foo *)&complexfoo => __real__ complexfoo */
15704 else if (TREE_CODE (optype) == COMPLEX_TYPE
15705 && type == TREE_TYPE (optype))
15706 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15707 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15708 else if (TREE_CODE (optype) == VECTOR_TYPE
15709 && type == TREE_TYPE (optype))
15711 tree part_width = TYPE_SIZE (type);
15712 tree index = bitsize_int (0);
15713 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15717 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15718 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15720 tree op00 = TREE_OPERAND (sub, 0);
15721 tree op01 = TREE_OPERAND (sub, 1);
15724 if (TREE_CODE (op00) == ADDR_EXPR)
15727 op00 = TREE_OPERAND (op00, 0);
15728 op00type = TREE_TYPE (op00);
15730 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15731 if (TREE_CODE (op00type) == VECTOR_TYPE
15732 && type == TREE_TYPE (op00type))
15734 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15735 tree part_width = TYPE_SIZE (type);
15736 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15737 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15738 tree index = bitsize_int (indexi);
15740 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15741 return fold_build3_loc (loc,
15742 BIT_FIELD_REF, type, op00,
15743 part_width, index);
15746 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15747 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15748 && type == TREE_TYPE (op00type))
15750 tree size = TYPE_SIZE_UNIT (type);
15751 if (tree_int_cst_equal (size, op01))
15752 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15754 /* ((foo *)&fooarray)[1] => fooarray[1] */
15755 else if (TREE_CODE (op00type) == ARRAY_TYPE
15756 && type == TREE_TYPE (op00type))
15758 tree type_domain = TYPE_DOMAIN (op00type);
15759 tree min_val = size_zero_node;
15760 if (type_domain && TYPE_MIN_VALUE (type_domain))
15761 min_val = TYPE_MIN_VALUE (type_domain);
15762 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15763 TYPE_SIZE_UNIT (type));
15764 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15765 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15766 NULL_TREE, NULL_TREE);
15771 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15772 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15773 && type == TREE_TYPE (TREE_TYPE (subtype))
15774 && (!in_gimple_form
15775 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15778 tree min_val = size_zero_node;
15779 sub = build_fold_indirect_ref_loc (loc, sub);
15780 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15781 if (type_domain && TYPE_MIN_VALUE (type_domain))
15782 min_val = TYPE_MIN_VALUE (type_domain);
15784 && TREE_CODE (min_val) != INTEGER_CST)
15786 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15793 /* Builds an expression for an indirection through T, simplifying some
15797 build_fold_indirect_ref_loc (location_t loc, tree t)
15799 tree type = TREE_TYPE (TREE_TYPE (t));
15800 tree sub = fold_indirect_ref_1 (loc, type, t);
15805 return build1_loc (loc, INDIRECT_REF, type, t);
15808 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15811 fold_indirect_ref_loc (location_t loc, tree t)
15813 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15821 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15822 whose result is ignored. The type of the returned tree need not be
15823 the same as the original expression. */
15826 fold_ignored_result (tree t)
15828 if (!TREE_SIDE_EFFECTS (t))
15829 return integer_zero_node;
15832 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15835 t = TREE_OPERAND (t, 0);
15839 case tcc_comparison:
15840 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15841 t = TREE_OPERAND (t, 0);
15842 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15843 t = TREE_OPERAND (t, 1);
15848 case tcc_expression:
15849 switch (TREE_CODE (t))
15851 case COMPOUND_EXPR:
15852 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15854 t = TREE_OPERAND (t, 0);
15858 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15859 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15861 t = TREE_OPERAND (t, 0);
15874 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15875 This can only be applied to objects of a sizetype. */
15878 round_up_loc (location_t loc, tree value, int divisor)
15880 tree div = NULL_TREE;
15882 gcc_assert (divisor > 0);
15886 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15887 have to do anything. Only do this when we are not given a const,
15888 because in that case, this check is more expensive than just
15890 if (TREE_CODE (value) != INTEGER_CST)
15892 div = build_int_cst (TREE_TYPE (value), divisor);
15894 if (multiple_of_p (TREE_TYPE (value), value, div))
15898 /* If divisor is a power of two, simplify this to bit manipulation. */
15899 if (divisor == (divisor & -divisor))
15901 if (TREE_CODE (value) == INTEGER_CST)
15903 double_int val = tree_to_double_int (value);
15906 if ((val.low & (divisor - 1)) == 0)
15909 overflow_p = TREE_OVERFLOW (value);
15910 val.low &= ~(divisor - 1);
15911 val.low += divisor;
15919 return force_fit_type_double (TREE_TYPE (value), val,
15926 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15927 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15928 t = build_int_cst (TREE_TYPE (value), -divisor);
15929 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15935 div = build_int_cst (TREE_TYPE (value), divisor);
15936 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15937 value = size_binop_loc (loc, MULT_EXPR, value, div);
15943 /* Likewise, but round down. */
15946 round_down_loc (location_t loc, tree value, int divisor)
15948 tree div = NULL_TREE;
15950 gcc_assert (divisor > 0);
15954 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15955 have to do anything. Only do this when we are not given a const,
15956 because in that case, this check is more expensive than just
15958 if (TREE_CODE (value) != INTEGER_CST)
15960 div = build_int_cst (TREE_TYPE (value), divisor);
15962 if (multiple_of_p (TREE_TYPE (value), value, div))
15966 /* If divisor is a power of two, simplify this to bit manipulation. */
15967 if (divisor == (divisor & -divisor))
15971 t = build_int_cst (TREE_TYPE (value), -divisor);
15972 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15977 div = build_int_cst (TREE_TYPE (value), divisor);
15978 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15979 value = size_binop_loc (loc, MULT_EXPR, value, div);
15985 /* Returns the pointer to the base of the object addressed by EXP and
15986 extracts the information about the offset of the access, storing it
15987 to PBITPOS and POFFSET. */
15990 split_address_to_core_and_offset (tree exp,
15991 HOST_WIDE_INT *pbitpos, tree *poffset)
15994 enum machine_mode mode;
15995 int unsignedp, volatilep;
15996 HOST_WIDE_INT bitsize;
15997 location_t loc = EXPR_LOCATION (exp);
15999 if (TREE_CODE (exp) == ADDR_EXPR)
16001 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16002 poffset, &mode, &unsignedp, &volatilep,
16004 core = build_fold_addr_expr_loc (loc, core);
16010 *poffset = NULL_TREE;
16016 /* Returns true if addresses of E1 and E2 differ by a constant, false
16017 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16020 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16023 HOST_WIDE_INT bitpos1, bitpos2;
16024 tree toffset1, toffset2, tdiff, type;
16026 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16027 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16029 if (bitpos1 % BITS_PER_UNIT != 0
16030 || bitpos2 % BITS_PER_UNIT != 0
16031 || !operand_equal_p (core1, core2, 0))
16034 if (toffset1 && toffset2)
16036 type = TREE_TYPE (toffset1);
16037 if (type != TREE_TYPE (toffset2))
16038 toffset2 = fold_convert (type, toffset2);
16040 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16041 if (!cst_and_fits_in_hwi (tdiff))
16044 *diff = int_cst_value (tdiff);
16046 else if (toffset1 || toffset2)
16048 /* If only one of the offsets is non-constant, the difference cannot
16055 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16059 /* Simplify the floating point expression EXP when the sign of the
16060 result is not significant. Return NULL_TREE if no simplification
16064 fold_strip_sign_ops (tree exp)
16067 location_t loc = EXPR_LOCATION (exp);
16069 switch (TREE_CODE (exp))
16073 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16074 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16078 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16080 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16081 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16082 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16083 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16084 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16085 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16088 case COMPOUND_EXPR:
16089 arg0 = TREE_OPERAND (exp, 0);
16090 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16092 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16096 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16097 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16099 return fold_build3_loc (loc,
16100 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16101 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16102 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16107 const enum built_in_function fcode = builtin_mathfn_code (exp);
16110 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16111 /* Strip copysign function call, return the 1st argument. */
16112 arg0 = CALL_EXPR_ARG (exp, 0);
16113 arg1 = CALL_EXPR_ARG (exp, 1);
16114 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16117 /* Strip sign ops from the argument of "odd" math functions. */
16118 if (negate_mathfn_p (fcode))
16120 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16122 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);