1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
61 #include "tree-flow.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer = 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t, location_t loc)
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x, location_t loc)
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
162 SET_EXPR_LOCATION (x, loc);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
242 if (fold_deferred_overflow_warning != NULL
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
252 if (!issue || warnmsg == NULL)
255 if (gimple_no_warning_p (stmt))
258 /* Use the smallest code level when deciding to issue the
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
263 if (!issue_strict_overflow_warning (code))
267 locus = input_location;
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings > 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 if (fold_deferring_overflow_warnings > 0)
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code)
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t)
360 unsigned HOST_WIDE_INT val;
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
373 if (TREE_INT_CST_LOW (t) != 0)
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t)
396 type = TREE_TYPE (t);
399 switch (TREE_CODE (t))
402 if (TYPE_OVERFLOW_WRAPS (type))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
429 return negate_expr_p (TREE_OPERAND (t, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
487 else if (negate_expr_p (TREE_OPERAND (t, 0)))
489 return negate_expr_p (TREE_OPERAND (t, 1));
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
495 tree tem = strip_float_extensions (t);
497 return negate_expr_p (tem);
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 tree op1 = TREE_OPERAND (t, 1);
512 if (TREE_INT_CST_HIGH (op1) == 0
513 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
514 == TREE_INT_CST_LOW (op1))
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
531 fold_negate_expr (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_int_cst (type, 1));
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || !TYPE_OVERFLOW_TRAPS (type))
553 tem = fold_negate_const (t, type);
554 /* Two's complement FP formats, such as c4x, may overflow. */
555 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 tem = fold_negate_const (t, type);
565 tree rpart = negate_expr (TREE_REALPART (t));
566 tree ipart = negate_expr (TREE_IMAGPART (t));
568 if ((TREE_CODE (rpart) == REAL_CST
569 && TREE_CODE (ipart) == REAL_CST)
570 || (TREE_CODE (rpart) == INTEGER_CST
571 && TREE_CODE (ipart) == INTEGER_CST))
572 return build_complex (type, rpart, ipart);
577 if (negate_expr_p (t))
578 return fold_build2_loc (loc, COMPLEX_EXPR, type,
579 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
580 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
584 if (negate_expr_p (t))
585 return fold_build1_loc (loc, CONJ_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
590 return TREE_OPERAND (t, 0);
593 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
594 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
596 /* -(A + B) -> (-B) - A. */
597 if (negate_expr_p (TREE_OPERAND (t, 1))
598 && reorder_operands_p (TREE_OPERAND (t, 0),
599 TREE_OPERAND (t, 1)))
601 tem = negate_expr (TREE_OPERAND (t, 1));
602 return fold_build2_loc (loc, MINUS_EXPR, type,
603 tem, TREE_OPERAND (t, 0));
606 /* -(A + B) -> (-A) - B. */
607 if (negate_expr_p (TREE_OPERAND (t, 0)))
609 tem = negate_expr (TREE_OPERAND (t, 0));
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 tem, TREE_OPERAND (t, 1));
617 /* - (A - B) -> B - A */
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
620 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
626 if (TYPE_UNSIGNED (type))
632 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
634 tem = TREE_OPERAND (t, 1);
635 if (negate_expr_p (tem))
636 return fold_build2_loc (loc, TREE_CODE (t), type,
637 TREE_OPERAND (t, 0), negate_expr (tem));
638 tem = TREE_OPERAND (t, 0);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 negate_expr (tem), TREE_OPERAND (t, 1));
650 /* In general we can't negate A / B, because if A is INT_MIN and
651 B is 1, we may turn this into INT_MIN / -1 which is undefined
652 and actually traps on some architectures. But if overflow is
653 undefined, we can negate, because - (INT_MIN / 1) is an
655 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
657 const char * const warnmsg = G_("assuming signed overflow does not "
658 "occur when negating a division");
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
662 if (INTEGRAL_TYPE_P (type)
663 && (TREE_CODE (tem) != INTEGER_CST
664 || integer_onep (tem)))
665 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0), negate_expr (tem));
669 /* If overflow is undefined then we have to be careful because
670 we ask whether it's ok to associate the negate with the
671 division which is not ok for example for
672 -((a - b) / c) where (-(a - b)) / c may invoke undefined
673 overflow because of negating INT_MIN. So do not use
674 negate_expr_p here but open-code the two important cases. */
675 tem = TREE_OPERAND (t, 0);
676 if ((INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) == NEGATE_EXPR
678 || (TREE_CODE (tem) == INTEGER_CST
679 && may_negate_without_overflow_p (tem))))
680 || !INTEGRAL_TYPE_P (type))
681 return fold_build2_loc (loc, TREE_CODE (t), type,
682 negate_expr (tem), TREE_OPERAND (t, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
752 tem = fold_negate_expr (loc, t);
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
833 *minus_litp = *litp, *litp = 0;
835 *conp = negate_expr (*conp);
837 var = negate_expr (var);
839 else if (TREE_CODE (in) == BIT_NOT_EXPR
840 && code == PLUS_EXPR)
842 /* -X - 1 is folded to ~X, undo that here. */
843 *minus_litp = build_one_cst (TREE_TYPE (in));
844 var = negate_expr (TREE_OPERAND (in, 0));
846 else if (TREE_CONSTANT (in))
854 *minus_litp = *litp, *litp = 0;
855 else if (*minus_litp)
856 *litp = *minus_litp, *minus_litp = 0;
857 *conp = negate_expr (*conp);
858 var = negate_expr (var);
864 /* Re-associate trees split by the above function. T1 and T2 are
865 either expressions to associate or null. Return the new
866 expression, if any. LOC is the location of the new expression. If
867 we build an operation, do it in TYPE and with CODE. */
870 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
877 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
878 try to fold this since we will have infinite recursion. But do
879 deal with any NEGATE_EXPRs. */
880 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
881 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
883 if (code == PLUS_EXPR)
885 if (TREE_CODE (t1) == NEGATE_EXPR)
886 return build2_loc (loc, MINUS_EXPR, type,
887 fold_convert_loc (loc, type, t2),
888 fold_convert_loc (loc, type,
889 TREE_OPERAND (t1, 0)));
890 else if (TREE_CODE (t2) == NEGATE_EXPR)
891 return build2_loc (loc, MINUS_EXPR, type,
892 fold_convert_loc (loc, type, t1),
893 fold_convert_loc (loc, type,
894 TREE_OPERAND (t2, 0)));
895 else if (integer_zerop (t2))
896 return fold_convert_loc (loc, type, t1);
898 else if (code == MINUS_EXPR)
900 if (integer_zerop (t2))
901 return fold_convert_loc (loc, type, t1);
904 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
909 fold_convert_loc (loc, type, t2));
912 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
913 for use in int_const_binop, size_binop and size_diffop. */
916 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
918 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
920 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
935 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
936 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
937 && TYPE_MODE (type1) == TYPE_MODE (type2);
941 /* Combine two integer constants ARG1 and ARG2 under operation CODE
942 to produce a new constant. Return NULL_TREE if we don't know how
943 to evaluate CODE at compile-time. */
946 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
949 double_int op1, op2, res, tmp;
951 tree type = TREE_TYPE (arg1);
952 bool uns = TYPE_UNSIGNED (type);
953 bool overflow = false;
955 op1 = tree_to_double_int (arg1);
956 op2 = tree_to_double_int (arg2);
973 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
977 /* It's unclear from the C standard whether shifts can overflow.
978 The following code ignores overflow; perhaps a C standard
979 interpretation ruling is needed. */
980 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
984 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
988 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
992 res = op1.add_with_sign (op2, false, &overflow);
996 res = op1.sub_with_overflow (op2, &overflow);
1000 res = op1.mul_with_sign (op2, false, &overflow);
1003 case MULT_HIGHPART_EXPR:
1004 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1008 bool dummy_overflow;
1009 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1010 is performed in twice the precision of arguments. */
1011 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1012 res = tmp.rshift (TYPE_PRECISION (type),
1013 2 * TYPE_PRECISION (type), !uns);
1017 case TRUNC_DIV_EXPR:
1018 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1019 case EXACT_DIV_EXPR:
1020 /* This is a shortcut for a common special case. */
1021 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1022 && !TREE_OVERFLOW (arg1)
1023 && !TREE_OVERFLOW (arg2)
1024 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1026 if (code == CEIL_DIV_EXPR)
1027 op1.low += op2.low - 1;
1029 res.low = op1.low / op2.low, res.high = 0;
1033 /* ... fall through ... */
1035 case ROUND_DIV_EXPR:
1043 if (op1 == op2 && !op1.is_zero ())
1045 res = double_int_one;
1048 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1051 case TRUNC_MOD_EXPR:
1052 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1053 /* This is a shortcut for a common special case. */
1054 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1055 && !TREE_OVERFLOW (arg1)
1056 && !TREE_OVERFLOW (arg2)
1057 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1059 if (code == CEIL_MOD_EXPR)
1060 op1.low += op2.low - 1;
1061 res.low = op1.low % op2.low, res.high = 0;
1065 /* ... fall through ... */
1067 case ROUND_MOD_EXPR:
1070 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1074 res = op1.min (op2, uns);
1078 res = op1.max (op2, uns);
1085 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1087 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1093 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1095 return int_const_binop_1 (code, arg1, arg2, 1);
1098 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1099 constant. We assume ARG1 and ARG2 have the same data type, or at least
1100 are the same kind of constant and the same machine mode. Return zero if
1101 combining the constants is not allowed in the current operating mode. */
1104 const_binop (enum tree_code code, tree arg1, tree arg2)
1106 /* Sanity check for the recursive cases. */
1113 if (TREE_CODE (arg1) == INTEGER_CST)
1114 return int_const_binop (code, arg1, arg2);
1116 if (TREE_CODE (arg1) == REAL_CST)
1118 enum machine_mode mode;
1121 REAL_VALUE_TYPE value;
1122 REAL_VALUE_TYPE result;
1126 /* The following codes are handled by real_arithmetic. */
1141 d1 = TREE_REAL_CST (arg1);
1142 d2 = TREE_REAL_CST (arg2);
1144 type = TREE_TYPE (arg1);
1145 mode = TYPE_MODE (type);
1147 /* Don't perform operation if we honor signaling NaNs and
1148 either operand is a NaN. */
1149 if (HONOR_SNANS (mode)
1150 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1153 /* Don't perform operation if it would raise a division
1154 by zero exception. */
1155 if (code == RDIV_EXPR
1156 && REAL_VALUES_EQUAL (d2, dconst0)
1157 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1160 /* If either operand is a NaN, just return it. Otherwise, set up
1161 for floating-point trap; we return an overflow. */
1162 if (REAL_VALUE_ISNAN (d1))
1164 else if (REAL_VALUE_ISNAN (d2))
1167 inexact = real_arithmetic (&value, code, &d1, &d2);
1168 real_convert (&result, mode, &value);
1170 /* Don't constant fold this floating point operation if
1171 the result has overflowed and flag_trapping_math. */
1172 if (flag_trapping_math
1173 && MODE_HAS_INFINITIES (mode)
1174 && REAL_VALUE_ISINF (result)
1175 && !REAL_VALUE_ISINF (d1)
1176 && !REAL_VALUE_ISINF (d2))
1179 /* Don't constant fold this floating point operation if the
1180 result may dependent upon the run-time rounding mode and
1181 flag_rounding_math is set, or if GCC's software emulation
1182 is unable to accurately represent the result. */
1183 if ((flag_rounding_math
1184 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1185 && (inexact || !real_identical (&result, &value)))
1188 t = build_real (type, result);
1190 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1194 if (TREE_CODE (arg1) == FIXED_CST)
1196 FIXED_VALUE_TYPE f1;
1197 FIXED_VALUE_TYPE f2;
1198 FIXED_VALUE_TYPE result;
1203 /* The following codes are handled by fixed_arithmetic. */
1209 case TRUNC_DIV_EXPR:
1210 f2 = TREE_FIXED_CST (arg2);
1215 f2.data.high = TREE_INT_CST_HIGH (arg2);
1216 f2.data.low = TREE_INT_CST_LOW (arg2);
1224 f1 = TREE_FIXED_CST (arg1);
1225 type = TREE_TYPE (arg1);
1226 sat_p = TYPE_SATURATING (type);
1227 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1228 t = build_fixed (type, result);
1229 /* Propagate overflow flags. */
1230 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1231 TREE_OVERFLOW (t) = 1;
1235 if (TREE_CODE (arg1) == COMPLEX_CST)
1237 tree type = TREE_TYPE (arg1);
1238 tree r1 = TREE_REALPART (arg1);
1239 tree i1 = TREE_IMAGPART (arg1);
1240 tree r2 = TREE_REALPART (arg2);
1241 tree i2 = TREE_IMAGPART (arg2);
1248 real = const_binop (code, r1, r2);
1249 imag = const_binop (code, i1, i2);
1253 if (COMPLEX_FLOAT_TYPE_P (type))
1254 return do_mpc_arg2 (arg1, arg2, type,
1255 /* do_nonfinite= */ folding_initializer,
1258 real = const_binop (MINUS_EXPR,
1259 const_binop (MULT_EXPR, r1, r2),
1260 const_binop (MULT_EXPR, i1, i2));
1261 imag = const_binop (PLUS_EXPR,
1262 const_binop (MULT_EXPR, r1, i2),
1263 const_binop (MULT_EXPR, i1, r2));
1267 if (COMPLEX_FLOAT_TYPE_P (type))
1268 return do_mpc_arg2 (arg1, arg2, type,
1269 /* do_nonfinite= */ folding_initializer,
1272 case TRUNC_DIV_EXPR:
1274 case FLOOR_DIV_EXPR:
1275 case ROUND_DIV_EXPR:
1276 if (flag_complex_method == 0)
1278 /* Keep this algorithm in sync with
1279 tree-complex.c:expand_complex_div_straight().
1281 Expand complex division to scalars, straightforward algorithm.
1282 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1286 = const_binop (PLUS_EXPR,
1287 const_binop (MULT_EXPR, r2, r2),
1288 const_binop (MULT_EXPR, i2, i2));
1290 = const_binop (PLUS_EXPR,
1291 const_binop (MULT_EXPR, r1, r2),
1292 const_binop (MULT_EXPR, i1, i2));
1294 = const_binop (MINUS_EXPR,
1295 const_binop (MULT_EXPR, i1, r2),
1296 const_binop (MULT_EXPR, r1, i2));
1298 real = const_binop (code, t1, magsquared);
1299 imag = const_binop (code, t2, magsquared);
1303 /* Keep this algorithm in sync with
1304 tree-complex.c:expand_complex_div_wide().
1306 Expand complex division to scalars, modified algorithm to minimize
1307 overflow with wide input ranges. */
1308 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1309 fold_abs_const (r2, TREE_TYPE (type)),
1310 fold_abs_const (i2, TREE_TYPE (type)));
1312 if (integer_nonzerop (compare))
1314 /* In the TRUE branch, we compute
1316 div = (br * ratio) + bi;
1317 tr = (ar * ratio) + ai;
1318 ti = (ai * ratio) - ar;
1321 tree ratio = const_binop (code, r2, i2);
1322 tree div = const_binop (PLUS_EXPR, i2,
1323 const_binop (MULT_EXPR, r2, ratio));
1324 real = const_binop (MULT_EXPR, r1, ratio);
1325 real = const_binop (PLUS_EXPR, real, i1);
1326 real = const_binop (code, real, div);
1328 imag = const_binop (MULT_EXPR, i1, ratio);
1329 imag = const_binop (MINUS_EXPR, imag, r1);
1330 imag = const_binop (code, imag, div);
1334 /* In the FALSE branch, we compute
1336 divisor = (d * ratio) + c;
1337 tr = (b * ratio) + a;
1338 ti = b - (a * ratio);
1341 tree ratio = const_binop (code, i2, r2);
1342 tree div = const_binop (PLUS_EXPR, r2,
1343 const_binop (MULT_EXPR, i2, ratio));
1345 real = const_binop (MULT_EXPR, i1, ratio);
1346 real = const_binop (PLUS_EXPR, real, r1);
1347 real = const_binop (code, real, div);
1349 imag = const_binop (MULT_EXPR, r1, ratio);
1350 imag = const_binop (MINUS_EXPR, i1, imag);
1351 imag = const_binop (code, imag, div);
1361 return build_complex (type, real, imag);
1364 if (TREE_CODE (arg1) == VECTOR_CST
1365 && TREE_CODE (arg2) == VECTOR_CST)
1367 tree type = TREE_TYPE(arg1);
1368 int count = TYPE_VECTOR_SUBPARTS (type), i;
1369 tree *elts = XALLOCAVEC (tree, count);
1371 for (i = 0; i < count; i++)
1373 tree elem1 = VECTOR_CST_ELT (arg1, i);
1374 tree elem2 = VECTOR_CST_ELT (arg2, i);
1376 elts[i] = const_binop (code, elem1, elem2);
1378 /* It is possible that const_binop cannot handle the given
1379 code and return NULL_TREE */
1380 if(elts[i] == NULL_TREE)
1384 return build_vector (type, elts);
1387 /* Shifts allow a scalar offset for a vector. */
1388 if (TREE_CODE (arg1) == VECTOR_CST
1389 && TREE_CODE (arg2) == INTEGER_CST)
1391 tree type = TREE_TYPE (arg1);
1392 int count = TYPE_VECTOR_SUBPARTS (type), i;
1393 tree *elts = XALLOCAVEC (tree, count);
1395 if (code == VEC_LSHIFT_EXPR
1396 || code == VEC_RSHIFT_EXPR)
1398 if (!host_integerp (arg2, 1))
1401 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1402 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1403 unsigned HOST_WIDE_INT innerc
1404 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1405 if (shiftc >= outerc || (shiftc % innerc) != 0)
1407 int offset = shiftc / innerc;
1408 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1409 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1410 for !BYTES_BIG_ENDIAN picks first vector element, but
1411 for BYTES_BIG_ENDIAN last element from the vector. */
1412 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1414 tree zero = build_zero_cst (TREE_TYPE (type));
1415 for (i = 0; i < count; i++)
1417 if (i + offset < 0 || i + offset >= count)
1420 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1426 return build_vector (type, elts);
1431 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1432 indicates which particular sizetype to create. */
1435 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1437 return build_int_cst (sizetype_tab[(int) kind], number);
1440 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1441 is a tree code. The type of the result is taken from the operands.
1442 Both must be equivalent integer types, ala int_binop_types_match_p.
1443 If the operands are constant, so is the result. */
1446 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1448 tree type = TREE_TYPE (arg0);
1450 if (arg0 == error_mark_node || arg1 == error_mark_node)
1451 return error_mark_node;
1453 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1456 /* Handle the special case of two integer constants faster. */
1457 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1459 /* And some specific cases even faster than that. */
1460 if (code == PLUS_EXPR)
1462 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1464 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1467 else if (code == MINUS_EXPR)
1469 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1472 else if (code == MULT_EXPR)
1474 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1478 /* Handle general case of two integer constants. For sizetype
1479 constant calculations we always want to know about overflow,
1480 even in the unsigned case. */
1481 return int_const_binop_1 (code, arg0, arg1, -1);
1484 return fold_build2_loc (loc, code, type, arg0, arg1);
1487 /* Given two values, either both of sizetype or both of bitsizetype,
1488 compute the difference between the two values. Return the value
1489 in signed type corresponding to the type of the operands. */
1492 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1494 tree type = TREE_TYPE (arg0);
1497 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1500 /* If the type is already signed, just do the simple thing. */
1501 if (!TYPE_UNSIGNED (type))
1502 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1504 if (type == sizetype)
1506 else if (type == bitsizetype)
1507 ctype = sbitsizetype;
1509 ctype = signed_type_for (type);
1511 /* If either operand is not a constant, do the conversions to the signed
1512 type and subtract. The hardware will do the right thing with any
1513 overflow in the subtraction. */
1514 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1515 return size_binop_loc (loc, MINUS_EXPR,
1516 fold_convert_loc (loc, ctype, arg0),
1517 fold_convert_loc (loc, ctype, arg1));
1519 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1520 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1521 overflow) and negate (which can't either). Special-case a result
1522 of zero while we're here. */
1523 if (tree_int_cst_equal (arg0, arg1))
1524 return build_int_cst (ctype, 0);
1525 else if (tree_int_cst_lt (arg1, arg0))
1526 return fold_convert_loc (loc, ctype,
1527 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1529 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1530 fold_convert_loc (loc, ctype,
1531 size_binop_loc (loc,
1536 /* A subroutine of fold_convert_const handling conversions of an
1537 INTEGER_CST to another integer type. */
1540 fold_convert_const_int_from_int (tree type, const_tree arg1)
1544 /* Given an integer constant, make new constant with new type,
1545 appropriately sign-extended or truncated. */
1546 t = force_fit_type_double (type, tree_to_double_int (arg1),
1547 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1548 (TREE_INT_CST_HIGH (arg1) < 0
1549 && (TYPE_UNSIGNED (type)
1550 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1551 | TREE_OVERFLOW (arg1));
1556 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1557 to an integer type. */
1560 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1565 /* The following code implements the floating point to integer
1566 conversion rules required by the Java Language Specification,
1567 that IEEE NaNs are mapped to zero and values that overflow
1568 the target precision saturate, i.e. values greater than
1569 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1570 are mapped to INT_MIN. These semantics are allowed by the
1571 C and C++ standards that simply state that the behavior of
1572 FP-to-integer conversion is unspecified upon overflow. */
1576 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1580 case FIX_TRUNC_EXPR:
1581 real_trunc (&r, VOIDmode, &x);
1588 /* If R is NaN, return zero and show we have an overflow. */
1589 if (REAL_VALUE_ISNAN (r))
1592 val = double_int_zero;
1595 /* See if R is less than the lower bound or greater than the
1600 tree lt = TYPE_MIN_VALUE (type);
1601 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1602 if (REAL_VALUES_LESS (r, l))
1605 val = tree_to_double_int (lt);
1611 tree ut = TYPE_MAX_VALUE (type);
1614 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1615 if (REAL_VALUES_LESS (u, r))
1618 val = tree_to_double_int (ut);
1624 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1626 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1630 /* A subroutine of fold_convert_const handling conversions of a
1631 FIXED_CST to an integer type. */
1634 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1637 double_int temp, temp_trunc;
1640 /* Right shift FIXED_CST to temp by fbit. */
1641 temp = TREE_FIXED_CST (arg1).data;
1642 mode = TREE_FIXED_CST (arg1).mode;
1643 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1645 temp = temp.rshift (GET_MODE_FBIT (mode),
1646 HOST_BITS_PER_DOUBLE_INT,
1647 SIGNED_FIXED_POINT_MODE_P (mode));
1649 /* Left shift temp to temp_trunc by fbit. */
1650 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1651 HOST_BITS_PER_DOUBLE_INT,
1652 SIGNED_FIXED_POINT_MODE_P (mode));
1656 temp = double_int_zero;
1657 temp_trunc = double_int_zero;
1660 /* If FIXED_CST is negative, we need to round the value toward 0.
1661 By checking if the fractional bits are not zero to add 1 to temp. */
1662 if (SIGNED_FIXED_POINT_MODE_P (mode)
1663 && temp_trunc.is_negative ()
1664 && TREE_FIXED_CST (arg1).data != temp_trunc)
1665 temp += double_int_one;
1667 /* Given a fixed-point constant, make new constant with new type,
1668 appropriately sign-extended or truncated. */
1669 t = force_fit_type_double (type, temp, -1,
1670 (temp.is_negative ()
1671 && (TYPE_UNSIGNED (type)
1672 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1673 | TREE_OVERFLOW (arg1));
1678 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1679 to another floating point type. */
1682 fold_convert_const_real_from_real (tree type, const_tree arg1)
1684 REAL_VALUE_TYPE value;
1687 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1688 t = build_real (type, value);
1690 /* If converting an infinity or NAN to a representation that doesn't
1691 have one, set the overflow bit so that we can produce some kind of
1692 error message at the appropriate point if necessary. It's not the
1693 most user-friendly message, but it's better than nothing. */
1694 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1695 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1696 TREE_OVERFLOW (t) = 1;
1697 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1698 && !MODE_HAS_NANS (TYPE_MODE (type)))
1699 TREE_OVERFLOW (t) = 1;
1700 /* Regular overflow, conversion produced an infinity in a mode that
1701 can't represent them. */
1702 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1703 && REAL_VALUE_ISINF (value)
1704 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1705 TREE_OVERFLOW (t) = 1;
1707 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1711 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1712 to a floating point type. */
1715 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1717 REAL_VALUE_TYPE value;
1720 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1721 t = build_real (type, value);
1723 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1727 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1728 to another fixed-point type. */
1731 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1733 FIXED_VALUE_TYPE value;
1737 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1738 TYPE_SATURATING (type));
1739 t = build_fixed (type, value);
1741 /* Propagate overflow flags. */
1742 if (overflow_p | TREE_OVERFLOW (arg1))
1743 TREE_OVERFLOW (t) = 1;
1747 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1748 to a fixed-point type. */
1751 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1753 FIXED_VALUE_TYPE value;
1757 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1758 TREE_INT_CST (arg1),
1759 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1760 TYPE_SATURATING (type));
1761 t = build_fixed (type, value);
1763 /* Propagate overflow flags. */
1764 if (overflow_p | TREE_OVERFLOW (arg1))
1765 TREE_OVERFLOW (t) = 1;
1769 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1770 to a fixed-point type. */
1773 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1775 FIXED_VALUE_TYPE value;
1779 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1780 &TREE_REAL_CST (arg1),
1781 TYPE_SATURATING (type));
1782 t = build_fixed (type, value);
1784 /* Propagate overflow flags. */
1785 if (overflow_p | TREE_OVERFLOW (arg1))
1786 TREE_OVERFLOW (t) = 1;
1790 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1791 type TYPE. If no simplification can be done return NULL_TREE. */
1794 fold_convert_const (enum tree_code code, tree type, tree arg1)
1796 if (TREE_TYPE (arg1) == type)
1799 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1800 || TREE_CODE (type) == OFFSET_TYPE)
1802 if (TREE_CODE (arg1) == INTEGER_CST)
1803 return fold_convert_const_int_from_int (type, arg1);
1804 else if (TREE_CODE (arg1) == REAL_CST)
1805 return fold_convert_const_int_from_real (code, type, arg1);
1806 else if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_int_from_fixed (type, arg1);
1809 else if (TREE_CODE (type) == REAL_TYPE)
1811 if (TREE_CODE (arg1) == INTEGER_CST)
1812 return build_real_from_int_cst (type, arg1);
1813 else if (TREE_CODE (arg1) == REAL_CST)
1814 return fold_convert_const_real_from_real (type, arg1);
1815 else if (TREE_CODE (arg1) == FIXED_CST)
1816 return fold_convert_const_real_from_fixed (type, arg1);
1818 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1820 if (TREE_CODE (arg1) == FIXED_CST)
1821 return fold_convert_const_fixed_from_fixed (type, arg1);
1822 else if (TREE_CODE (arg1) == INTEGER_CST)
1823 return fold_convert_const_fixed_from_int (type, arg1);
1824 else if (TREE_CODE (arg1) == REAL_CST)
1825 return fold_convert_const_fixed_from_real (type, arg1);
1830 /* Construct a vector of zero elements of vector type TYPE. */
1833 build_zero_vector (tree type)
1837 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1838 return build_vector_from_val (type, t);
1841 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1844 fold_convertible_p (const_tree type, const_tree arg)
1846 tree orig = TREE_TYPE (arg);
1851 if (TREE_CODE (arg) == ERROR_MARK
1852 || TREE_CODE (type) == ERROR_MARK
1853 || TREE_CODE (orig) == ERROR_MARK)
1856 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1859 switch (TREE_CODE (type))
1861 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1862 case POINTER_TYPE: case REFERENCE_TYPE:
1864 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1865 || TREE_CODE (orig) == OFFSET_TYPE)
1867 return (TREE_CODE (orig) == VECTOR_TYPE
1868 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1871 case FIXED_POINT_TYPE:
1875 return TREE_CODE (type) == TREE_CODE (orig);
1882 /* Convert expression ARG to type TYPE. Used by the middle-end for
1883 simple conversions in preference to calling the front-end's convert. */
1886 fold_convert_loc (location_t loc, tree type, tree arg)
1888 tree orig = TREE_TYPE (arg);
1894 if (TREE_CODE (arg) == ERROR_MARK
1895 || TREE_CODE (type) == ERROR_MARK
1896 || TREE_CODE (orig) == ERROR_MARK)
1897 return error_mark_node;
1899 switch (TREE_CODE (type))
1902 case REFERENCE_TYPE:
1903 /* Handle conversions between pointers to different address spaces. */
1904 if (POINTER_TYPE_P (orig)
1905 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1906 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1907 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1910 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1912 if (TREE_CODE (arg) == INTEGER_CST)
1914 tem = fold_convert_const (NOP_EXPR, type, arg);
1915 if (tem != NULL_TREE)
1918 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1919 || TREE_CODE (orig) == OFFSET_TYPE)
1920 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1921 if (TREE_CODE (orig) == COMPLEX_TYPE)
1922 return fold_convert_loc (loc, type,
1923 fold_build1_loc (loc, REALPART_EXPR,
1924 TREE_TYPE (orig), arg));
1925 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1926 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1927 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1930 if (TREE_CODE (arg) == INTEGER_CST)
1932 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1933 if (tem != NULL_TREE)
1936 else if (TREE_CODE (arg) == REAL_CST)
1938 tem = fold_convert_const (NOP_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1942 else if (TREE_CODE (arg) == FIXED_CST)
1944 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1949 switch (TREE_CODE (orig))
1952 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1953 case POINTER_TYPE: case REFERENCE_TYPE:
1954 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1959 case FIXED_POINT_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1970 case FIXED_POINT_TYPE:
1971 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1972 || TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 goto fold_convert_exit;
1979 switch (TREE_CODE (orig))
1981 case FIXED_POINT_TYPE:
1986 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1989 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 return fold_convert_loc (loc, type, tem);
1997 switch (TREE_CODE (orig))
2000 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2001 case POINTER_TYPE: case REFERENCE_TYPE:
2003 case FIXED_POINT_TYPE:
2004 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2005 fold_convert_loc (loc, TREE_TYPE (type), arg),
2006 fold_convert_loc (loc, TREE_TYPE (type),
2007 integer_zero_node));
2012 if (TREE_CODE (arg) == COMPLEX_EXPR)
2014 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2015 TREE_OPERAND (arg, 0));
2016 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2017 TREE_OPERAND (arg, 1));
2018 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2021 arg = save_expr (arg);
2022 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2023 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2024 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2025 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2026 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2034 if (integer_zerop (arg))
2035 return build_zero_vector (type);
2036 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2037 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2038 || TREE_CODE (orig) == VECTOR_TYPE);
2039 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2042 tem = fold_ignored_result (arg);
2043 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2046 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2047 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2051 protected_set_expr_location_unshare (tem, loc);
2055 /* Return false if expr can be assumed not to be an lvalue, true
2059 maybe_lvalue_p (const_tree x)
2061 /* We only need to wrap lvalue tree codes. */
2062 switch (TREE_CODE (x))
2075 case ARRAY_RANGE_REF:
2081 case PREINCREMENT_EXPR:
2082 case PREDECREMENT_EXPR:
2084 case TRY_CATCH_EXPR:
2085 case WITH_CLEANUP_EXPR:
2094 /* Assume the worst for front-end tree codes. */
2095 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2103 /* Return an expr equal to X but certainly not valid as an lvalue. */
2106 non_lvalue_loc (location_t loc, tree x)
2108 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2113 if (! maybe_lvalue_p (x))
2115 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2118 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2119 Zero means allow extended lvalues. */
2121 int pedantic_lvalues;
2123 /* When pedantic, return an expr equal to X but certainly not valid as a
2124 pedantic lvalue. Otherwise, return X. */
2127 pedantic_non_lvalue_loc (location_t loc, tree x)
2129 if (pedantic_lvalues)
2130 return non_lvalue_loc (loc, x);
2132 return protected_set_expr_location_unshare (x, loc);
2135 /* Given a tree comparison code, return the code that is the logical inverse.
2136 It is generally not safe to do this for floating-point comparisons, except
2137 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2138 ERROR_MARK in this case. */
2141 invert_tree_comparison (enum tree_code code, bool honor_nans)
2143 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2144 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2154 return honor_nans ? UNLE_EXPR : LE_EXPR;
2156 return honor_nans ? UNLT_EXPR : LT_EXPR;
2158 return honor_nans ? UNGE_EXPR : GE_EXPR;
2160 return honor_nans ? UNGT_EXPR : GT_EXPR;
2174 return UNORDERED_EXPR;
2175 case UNORDERED_EXPR:
2176 return ORDERED_EXPR;
2182 /* Similar, but return the comparison that results if the operands are
2183 swapped. This is safe for floating-point. */
2186 swap_tree_comparison (enum tree_code code)
2193 case UNORDERED_EXPR:
2219 /* Convert a comparison tree code from an enum tree_code representation
2220 into a compcode bit-based encoding. This function is the inverse of
2221 compcode_to_comparison. */
2223 static enum comparison_code
2224 comparison_to_compcode (enum tree_code code)
2241 return COMPCODE_ORD;
2242 case UNORDERED_EXPR:
2243 return COMPCODE_UNORD;
2245 return COMPCODE_UNLT;
2247 return COMPCODE_UNEQ;
2249 return COMPCODE_UNLE;
2251 return COMPCODE_UNGT;
2253 return COMPCODE_LTGT;
2255 return COMPCODE_UNGE;
2261 /* Convert a compcode bit-based encoding of a comparison operator back
2262 to GCC's enum tree_code representation. This function is the
2263 inverse of comparison_to_compcode. */
2265 static enum tree_code
2266 compcode_to_comparison (enum comparison_code code)
2283 return ORDERED_EXPR;
2284 case COMPCODE_UNORD:
2285 return UNORDERED_EXPR;
2303 /* Return a tree for the comparison which is the combination of
2304 doing the AND or OR (depending on CODE) of the two operations LCODE
2305 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2306 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2307 if this makes the transformation invalid. */
2310 combine_comparisons (location_t loc,
2311 enum tree_code code, enum tree_code lcode,
2312 enum tree_code rcode, tree truth_type,
2313 tree ll_arg, tree lr_arg)
2315 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2316 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2317 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2322 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2323 compcode = lcompcode & rcompcode;
2326 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2327 compcode = lcompcode | rcompcode;
2336 /* Eliminate unordered comparisons, as well as LTGT and ORD
2337 which are not used unless the mode has NaNs. */
2338 compcode &= ~COMPCODE_UNORD;
2339 if (compcode == COMPCODE_LTGT)
2340 compcode = COMPCODE_NE;
2341 else if (compcode == COMPCODE_ORD)
2342 compcode = COMPCODE_TRUE;
2344 else if (flag_trapping_math)
2346 /* Check that the original operation and the optimized ones will trap
2347 under the same condition. */
2348 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2349 && (lcompcode != COMPCODE_EQ)
2350 && (lcompcode != COMPCODE_ORD);
2351 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2352 && (rcompcode != COMPCODE_EQ)
2353 && (rcompcode != COMPCODE_ORD);
2354 bool trap = (compcode & COMPCODE_UNORD) == 0
2355 && (compcode != COMPCODE_EQ)
2356 && (compcode != COMPCODE_ORD);
2358 /* In a short-circuited boolean expression the LHS might be
2359 such that the RHS, if evaluated, will never trap. For
2360 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2361 if neither x nor y is NaN. (This is a mixed blessing: for
2362 example, the expression above will never trap, hence
2363 optimizing it to x < y would be invalid). */
2364 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2365 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2368 /* If the comparison was short-circuited, and only the RHS
2369 trapped, we may now generate a spurious trap. */
2371 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2374 /* If we changed the conditions that cause a trap, we lose. */
2375 if ((ltrap || rtrap) != trap)
2379 if (compcode == COMPCODE_TRUE)
2380 return constant_boolean_node (true, truth_type);
2381 else if (compcode == COMPCODE_FALSE)
2382 return constant_boolean_node (false, truth_type);
2385 enum tree_code tcode;
2387 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2388 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2392 /* Return nonzero if two operands (typically of the same tree node)
2393 are necessarily equal. If either argument has side-effects this
2394 function returns zero. FLAGS modifies behavior as follows:
2396 If OEP_ONLY_CONST is set, only return nonzero for constants.
2397 This function tests whether the operands are indistinguishable;
2398 it does not test whether they are equal using C's == operation.
2399 The distinction is important for IEEE floating point, because
2400 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2401 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2403 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2404 even though it may hold multiple values during a function.
2405 This is because a GCC tree node guarantees that nothing else is
2406 executed between the evaluation of its "operands" (which may often
2407 be evaluated in arbitrary order). Hence if the operands themselves
2408 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2409 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2410 unset means assuming isochronic (or instantaneous) tree equivalence.
2411 Unless comparing arbitrary expression trees, such as from different
2412 statements, this flag can usually be left unset.
2414 If OEP_PURE_SAME is set, then pure functions with identical arguments
2415 are considered the same. It is used when the caller has other ways
2416 to ensure that global memory is unchanged in between. */
2419 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2421 /* If either is ERROR_MARK, they aren't equal. */
2422 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2423 || TREE_TYPE (arg0) == error_mark_node
2424 || TREE_TYPE (arg1) == error_mark_node)
2427 /* Similar, if either does not have a type (like a released SSA name),
2428 they aren't equal. */
2429 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2432 /* Check equality of integer constants before bailing out due to
2433 precision differences. */
2434 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2435 return tree_int_cst_equal (arg0, arg1);
2437 /* If both types don't have the same signedness, then we can't consider
2438 them equal. We must check this before the STRIP_NOPS calls
2439 because they may change the signedness of the arguments. As pointers
2440 strictly don't have a signedness, require either two pointers or
2441 two non-pointers as well. */
2442 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2443 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2446 /* We cannot consider pointers to different address space equal. */
2447 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2448 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2449 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2452 /* If both types don't have the same precision, then it is not safe
2454 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2460 /* In case both args are comparisons but with different comparison
2461 code, try to swap the comparison operands of one arg to produce
2462 a match and compare that variant. */
2463 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2464 && COMPARISON_CLASS_P (arg0)
2465 && COMPARISON_CLASS_P (arg1))
2467 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2469 if (TREE_CODE (arg0) == swap_code)
2470 return operand_equal_p (TREE_OPERAND (arg0, 0),
2471 TREE_OPERAND (arg1, 1), flags)
2472 && operand_equal_p (TREE_OPERAND (arg0, 1),
2473 TREE_OPERAND (arg1, 0), flags);
2476 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2477 /* This is needed for conversions and for COMPONENT_REF.
2478 Might as well play it safe and always test this. */
2479 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2480 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2481 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2484 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2485 We don't care about side effects in that case because the SAVE_EXPR
2486 takes care of that for us. In all other cases, two expressions are
2487 equal if they have no side effects. If we have two identical
2488 expressions with side effects that should be treated the same due
2489 to the only side effects being identical SAVE_EXPR's, that will
2490 be detected in the recursive calls below.
2491 If we are taking an invariant address of two identical objects
2492 they are necessarily equal as well. */
2493 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2494 && (TREE_CODE (arg0) == SAVE_EXPR
2495 || (flags & OEP_CONSTANT_ADDRESS_OF)
2496 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2499 /* Next handle constant cases, those for which we can return 1 even
2500 if ONLY_CONST is set. */
2501 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2502 switch (TREE_CODE (arg0))
2505 return tree_int_cst_equal (arg0, arg1);
2508 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2509 TREE_FIXED_CST (arg1));
2512 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2513 TREE_REAL_CST (arg1)))
2517 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2519 /* If we do not distinguish between signed and unsigned zero,
2520 consider them equal. */
2521 if (real_zerop (arg0) && real_zerop (arg1))
2530 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2533 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2535 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2536 VECTOR_CST_ELT (arg1, i), flags))
2543 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2545 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2549 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2550 && ! memcmp (TREE_STRING_POINTER (arg0),
2551 TREE_STRING_POINTER (arg1),
2552 TREE_STRING_LENGTH (arg0)));
2555 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2556 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2557 ? OEP_CONSTANT_ADDRESS_OF : 0);
2562 if (flags & OEP_ONLY_CONST)
2565 /* Define macros to test an operand from arg0 and arg1 for equality and a
2566 variant that allows null and views null as being different from any
2567 non-null value. In the latter case, if either is null, the both
2568 must be; otherwise, do the normal comparison. */
2569 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2570 TREE_OPERAND (arg1, N), flags)
2572 #define OP_SAME_WITH_NULL(N) \
2573 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2574 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2576 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2579 /* Two conversions are equal only if signedness and modes match. */
2580 switch (TREE_CODE (arg0))
2583 case FIX_TRUNC_EXPR:
2584 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2585 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2595 case tcc_comparison:
2597 if (OP_SAME (0) && OP_SAME (1))
2600 /* For commutative ops, allow the other order. */
2601 return (commutative_tree_code (TREE_CODE (arg0))
2602 && operand_equal_p (TREE_OPERAND (arg0, 0),
2603 TREE_OPERAND (arg1, 1), flags)
2604 && operand_equal_p (TREE_OPERAND (arg0, 1),
2605 TREE_OPERAND (arg1, 0), flags));
2608 /* If either of the pointer (or reference) expressions we are
2609 dereferencing contain a side effect, these cannot be equal,
2610 but their addresses can be. */
2611 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2612 && (TREE_SIDE_EFFECTS (arg0)
2613 || TREE_SIDE_EFFECTS (arg1)))
2616 switch (TREE_CODE (arg0))
2619 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2626 case TARGET_MEM_REF:
2627 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2628 /* Require equal extra operands and then fall through to MEM_REF
2629 handling of the two common operands. */
2630 if (!OP_SAME_WITH_NULL (2)
2631 || !OP_SAME_WITH_NULL (3)
2632 || !OP_SAME_WITH_NULL (4))
2636 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2637 /* Require equal access sizes, and similar pointer types.
2638 We can have incomplete types for array references of
2639 variable-sized arrays from the Fortran frontent
2641 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2642 || (TYPE_SIZE (TREE_TYPE (arg0))
2643 && TYPE_SIZE (TREE_TYPE (arg1))
2644 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2645 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2646 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2647 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2648 && OP_SAME (0) && OP_SAME (1));
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null.
2653 Compare the array index by value if it is constant first as we
2654 may have different types but same value here. */
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 1))
2661 && OP_SAME_WITH_NULL (2)
2662 && OP_SAME_WITH_NULL (3));
2665 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2666 may be NULL when we're called to compare MEM_EXPRs. */
2667 if (!OP_SAME_WITH_NULL (0))
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2675 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2676 return OP_SAME (1) && OP_SAME (2);
2682 case tcc_expression:
2683 switch (TREE_CODE (arg0))
2686 case TRUTH_NOT_EXPR:
2689 case TRUTH_ANDIF_EXPR:
2690 case TRUTH_ORIF_EXPR:
2691 return OP_SAME (0) && OP_SAME (1);
2694 case WIDEN_MULT_PLUS_EXPR:
2695 case WIDEN_MULT_MINUS_EXPR:
2698 /* The multiplcation operands are commutative. */
2701 case TRUTH_AND_EXPR:
2703 case TRUTH_XOR_EXPR:
2704 if (OP_SAME (0) && OP_SAME (1))
2707 /* Otherwise take into account this is a commutative operation. */
2708 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2709 TREE_OPERAND (arg1, 1), flags)
2710 && operand_equal_p (TREE_OPERAND (arg0, 1),
2711 TREE_OPERAND (arg1, 0), flags));
2716 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2723 switch (TREE_CODE (arg0))
2726 /* If the CALL_EXPRs call different functions, then they
2727 clearly can not be equal. */
2728 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2733 unsigned int cef = call_expr_flags (arg0);
2734 if (flags & OEP_PURE_SAME)
2735 cef &= ECF_CONST | ECF_PURE;
2742 /* Now see if all the arguments are the same. */
2744 const_call_expr_arg_iterator iter0, iter1;
2746 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2747 a1 = first_const_call_expr_arg (arg1, &iter1);
2749 a0 = next_const_call_expr_arg (&iter0),
2750 a1 = next_const_call_expr_arg (&iter1))
2751 if (! operand_equal_p (a0, a1, flags))
2754 /* If we get here and both argument lists are exhausted
2755 then the CALL_EXPRs are equal. */
2756 return ! (a0 || a1);
2762 case tcc_declaration:
2763 /* Consider __builtin_sqrt equal to sqrt. */
2764 return (TREE_CODE (arg0) == FUNCTION_DECL
2765 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2766 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2767 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2774 #undef OP_SAME_WITH_NULL
2777 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2778 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2780 When in doubt, return 0. */
2783 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2785 int unsignedp1, unsignedpo;
2786 tree primarg0, primarg1, primother;
2787 unsigned int correct_width;
2789 if (operand_equal_p (arg0, arg1, 0))
2792 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2793 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2796 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2797 and see if the inner values are the same. This removes any
2798 signedness comparison, which doesn't matter here. */
2799 primarg0 = arg0, primarg1 = arg1;
2800 STRIP_NOPS (primarg0);
2801 STRIP_NOPS (primarg1);
2802 if (operand_equal_p (primarg0, primarg1, 0))
2805 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2806 actual comparison operand, ARG0.
2808 First throw away any conversions to wider types
2809 already present in the operands. */
2811 primarg1 = get_narrower (arg1, &unsignedp1);
2812 primother = get_narrower (other, &unsignedpo);
2814 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2815 if (unsignedp1 == unsignedpo
2816 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2817 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2819 tree type = TREE_TYPE (arg0);
2821 /* Make sure shorter operand is extended the right way
2822 to match the longer operand. */
2823 primarg1 = fold_convert (signed_or_unsigned_type_for
2824 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2826 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2833 /* See if ARG is an expression that is either a comparison or is performing
2834 arithmetic on comparisons. The comparisons must only be comparing
2835 two different values, which will be stored in *CVAL1 and *CVAL2; if
2836 they are nonzero it means that some operands have already been found.
2837 No variables may be used anywhere else in the expression except in the
2838 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2839 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2841 If this is true, return 1. Otherwise, return zero. */
2844 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2846 enum tree_code code = TREE_CODE (arg);
2847 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2849 /* We can handle some of the tcc_expression cases here. */
2850 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2852 else if (tclass == tcc_expression
2853 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2854 || code == COMPOUND_EXPR))
2855 tclass = tcc_binary;
2857 else if (tclass == tcc_expression && code == SAVE_EXPR
2858 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2860 /* If we've already found a CVAL1 or CVAL2, this expression is
2861 two complex to handle. */
2862 if (*cval1 || *cval2)
2872 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2875 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2876 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2877 cval1, cval2, save_p));
2882 case tcc_expression:
2883 if (code == COND_EXPR)
2884 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2885 cval1, cval2, save_p)
2886 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2887 cval1, cval2, save_p)
2888 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2889 cval1, cval2, save_p));
2892 case tcc_comparison:
2893 /* First see if we can handle the first operand, then the second. For
2894 the second operand, we know *CVAL1 can't be zero. It must be that
2895 one side of the comparison is each of the values; test for the
2896 case where this isn't true by failing if the two operands
2899 if (operand_equal_p (TREE_OPERAND (arg, 0),
2900 TREE_OPERAND (arg, 1), 0))
2904 *cval1 = TREE_OPERAND (arg, 0);
2905 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2907 else if (*cval2 == 0)
2908 *cval2 = TREE_OPERAND (arg, 0);
2909 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2914 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2916 else if (*cval2 == 0)
2917 *cval2 = TREE_OPERAND (arg, 1);
2918 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2930 /* ARG is a tree that is known to contain just arithmetic operations and
2931 comparisons. Evaluate the operations in the tree substituting NEW0 for
2932 any occurrence of OLD0 as an operand of a comparison and likewise for
2936 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2937 tree old1, tree new1)
2939 tree type = TREE_TYPE (arg);
2940 enum tree_code code = TREE_CODE (arg);
2941 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2943 /* We can handle some of the tcc_expression cases here. */
2944 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2946 else if (tclass == tcc_expression
2947 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2948 tclass = tcc_binary;
2953 return fold_build1_loc (loc, code, type,
2954 eval_subst (loc, TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1));
2958 return fold_build2_loc (loc, code, type,
2959 eval_subst (loc, TREE_OPERAND (arg, 0),
2960 old0, new0, old1, new1),
2961 eval_subst (loc, TREE_OPERAND (arg, 1),
2962 old0, new0, old1, new1));
2964 case tcc_expression:
2968 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2972 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2976 return fold_build3_loc (loc, code, type,
2977 eval_subst (loc, TREE_OPERAND (arg, 0),
2978 old0, new0, old1, new1),
2979 eval_subst (loc, TREE_OPERAND (arg, 1),
2980 old0, new0, old1, new1),
2981 eval_subst (loc, TREE_OPERAND (arg, 2),
2982 old0, new0, old1, new1));
2986 /* Fall through - ??? */
2988 case tcc_comparison:
2990 tree arg0 = TREE_OPERAND (arg, 0);
2991 tree arg1 = TREE_OPERAND (arg, 1);
2993 /* We need to check both for exact equality and tree equality. The
2994 former will be true if the operand has a side-effect. In that
2995 case, we know the operand occurred exactly once. */
2997 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2999 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3002 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3004 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3007 return fold_build2_loc (loc, code, type, arg0, arg1);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED was previously an operand of the expression
3017 but is now not needed (e.g., we folded OMITTED * 0).
3019 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3020 the conversion of RESULT to TYPE. */
3023 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3025 tree t = fold_convert_loc (loc, type, result);
3027 /* If the resulting operand is an empty statement, just return the omitted
3028 statement casted to void. */
3029 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3030 return build1_loc (loc, NOP_EXPR, void_type_node,
3031 fold_ignored_result (omitted));
3033 if (TREE_SIDE_EFFECTS (omitted))
3034 return build2_loc (loc, COMPOUND_EXPR, type,
3035 fold_ignored_result (omitted), t);
3037 return non_lvalue_loc (loc, t);
3040 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3043 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3046 tree t = fold_convert_loc (loc, type, result);
3048 /* If the resulting operand is an empty statement, just return the omitted
3049 statement casted to void. */
3050 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3051 return build1_loc (loc, NOP_EXPR, void_type_node,
3052 fold_ignored_result (omitted));
3054 if (TREE_SIDE_EFFECTS (omitted))
3055 return build2_loc (loc, COMPOUND_EXPR, type,
3056 fold_ignored_result (omitted), t);
3058 return pedantic_non_lvalue_loc (loc, t);
3061 /* Return a tree for the case when the result of an expression is RESULT
3062 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3063 of the expression but are now not needed.
3065 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3066 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3067 evaluated before OMITTED2. Otherwise, if neither has side effects,
3068 just do the conversion of RESULT to TYPE. */
3071 omit_two_operands_loc (location_t loc, tree type, tree result,
3072 tree omitted1, tree omitted2)
3074 tree t = fold_convert_loc (loc, type, result);
3076 if (TREE_SIDE_EFFECTS (omitted2))
3077 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3078 if (TREE_SIDE_EFFECTS (omitted1))
3079 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3081 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3085 /* Return a simplified tree node for the truth-negation of ARG. This
3086 never alters ARG itself. We assume that ARG is an operation that
3087 returns a truth value (0 or 1).
3089 FIXME: one would think we would fold the result, but it causes
3090 problems with the dominator optimizer. */
3093 fold_truth_not_expr (location_t loc, tree arg)
3095 tree type = TREE_TYPE (arg);
3096 enum tree_code code = TREE_CODE (arg);
3097 location_t loc1, loc2;
3099 /* If this is a comparison, we can simply invert it, except for
3100 floating-point non-equality comparisons, in which case we just
3101 enclose a TRUTH_NOT_EXPR around what we have. */
3103 if (TREE_CODE_CLASS (code) == tcc_comparison)
3105 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3106 if (FLOAT_TYPE_P (op_type)
3107 && flag_trapping_math
3108 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3109 && code != NE_EXPR && code != EQ_EXPR)
3112 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3113 if (code == ERROR_MARK)
3116 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3117 TREE_OPERAND (arg, 1));
3123 return constant_boolean_node (integer_zerop (arg), type);
3125 case TRUTH_AND_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_OR_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3133 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3134 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3135 return build2_loc (loc, TRUTH_AND_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3139 case TRUTH_XOR_EXPR:
3140 /* Here we can invert either operand. We invert the first operand
3141 unless the second operand is a TRUTH_NOT_EXPR in which case our
3142 result is the XOR of the first operand with the inside of the
3143 negation of the second operand. */
3145 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3146 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3147 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3149 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3150 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3151 TREE_OPERAND (arg, 1));
3153 case TRUTH_ANDIF_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3155 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3156 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3158 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3160 case TRUTH_ORIF_EXPR:
3161 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3162 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3163 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3164 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3165 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3167 case TRUTH_NOT_EXPR:
3168 return TREE_OPERAND (arg, 0);
3172 tree arg1 = TREE_OPERAND (arg, 1);
3173 tree arg2 = TREE_OPERAND (arg, 2);
3175 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3178 /* A COND_EXPR may have a throw as one operand, which
3179 then has void type. Just leave void operands
3181 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3182 VOID_TYPE_P (TREE_TYPE (arg1))
3183 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3184 VOID_TYPE_P (TREE_TYPE (arg2))
3185 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3189 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3190 return build2_loc (loc, COMPOUND_EXPR, type,
3191 TREE_OPERAND (arg, 0),
3192 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3194 case NON_LVALUE_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3199 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3200 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3202 /* ... fall through ... */
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 return build1_loc (loc, TREE_CODE (arg), type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
3212 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3215 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3217 case CLEANUP_POINT_EXPR:
3218 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3219 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3227 /* Return a simplified tree node for the truth-negation of ARG. This
3228 never alters ARG itself. We assume that ARG is an operation that
3229 returns a truth value (0 or 1).
3231 FIXME: one would think we would fold the result, but it causes
3232 problems with the dominator optimizer. */
3235 invert_truthvalue_loc (location_t loc, tree arg)
3239 if (TREE_CODE (arg) == ERROR_MARK)
3242 tem = fold_truth_not_expr (loc, arg);
3244 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3249 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3250 operands are another bit-wise operation with a common input. If so,
3251 distribute the bit operations to save an operation and possibly two if
3252 constants are involved. For example, convert
3253 (A | B) & (A | C) into A | (B & C)
3254 Further simplification will occur if B and C are constants.
3256 If this optimization cannot be done, 0 will be returned. */
3259 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3260 tree arg0, tree arg1)
3265 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3266 || TREE_CODE (arg0) == code
3267 || (TREE_CODE (arg0) != BIT_AND_EXPR
3268 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3271 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3273 common = TREE_OPERAND (arg0, 0);
3274 left = TREE_OPERAND (arg0, 1);
3275 right = TREE_OPERAND (arg1, 1);
3277 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3279 common = TREE_OPERAND (arg0, 0);
3280 left = TREE_OPERAND (arg0, 1);
3281 right = TREE_OPERAND (arg1, 0);
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3285 common = TREE_OPERAND (arg0, 1);
3286 left = TREE_OPERAND (arg0, 0);
3287 right = TREE_OPERAND (arg1, 1);
3289 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3291 common = TREE_OPERAND (arg0, 1);
3292 left = TREE_OPERAND (arg0, 0);
3293 right = TREE_OPERAND (arg1, 0);
3298 common = fold_convert_loc (loc, type, common);
3299 left = fold_convert_loc (loc, type, left);
3300 right = fold_convert_loc (loc, type, right);
3301 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3302 fold_build2_loc (loc, code, type, left, right));
3305 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3306 with code CODE. This optimization is unsafe. */
3308 distribute_real_division (location_t loc, enum tree_code code, tree type,
3309 tree arg0, tree arg1)
3311 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3312 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3314 /* (A / C) +- (B / C) -> (A +- B) / C. */
3316 && operand_equal_p (TREE_OPERAND (arg0, 1),
3317 TREE_OPERAND (arg1, 1), 0))
3318 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3319 fold_build2_loc (loc, code, type,
3320 TREE_OPERAND (arg0, 0),
3321 TREE_OPERAND (arg1, 0)),
3322 TREE_OPERAND (arg0, 1));
3324 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3326 TREE_OPERAND (arg1, 0), 0)
3327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3328 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3330 REAL_VALUE_TYPE r0, r1;
3331 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3332 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3334 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3336 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3337 real_arithmetic (&r0, code, &r0, &r1);
3338 return fold_build2_loc (loc, MULT_EXPR, type,
3339 TREE_OPERAND (arg0, 0),
3340 build_real (type, r0));
3346 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3347 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3350 make_bit_field_ref (location_t loc, tree inner, tree type,
3351 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3353 tree result, bftype;
3357 tree size = TYPE_SIZE (TREE_TYPE (inner));
3358 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3359 || POINTER_TYPE_P (TREE_TYPE (inner)))
3360 && host_integerp (size, 0)
3361 && tree_low_cst (size, 0) == bitsize)
3362 return fold_convert_loc (loc, type, inner);
3366 if (TYPE_PRECISION (bftype) != bitsize
3367 || TYPE_UNSIGNED (bftype) == !unsignedp)
3368 bftype = build_nonstandard_integer_type (bitsize, 0);
3370 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3371 size_int (bitsize), bitsize_int (bitpos));
3374 result = fold_convert_loc (loc, type, result);
3379 /* Optimize a bit-field compare.
3381 There are two cases: First is a compare against a constant and the
3382 second is a comparison of two items where the fields are at the same
3383 bit position relative to the start of a chunk (byte, halfword, word)
3384 large enough to contain it. In these cases we can avoid the shift
3385 implicit in bitfield extractions.
3387 For constants, we emit a compare of the shifted constant with the
3388 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3389 compared. For two fields at the same position, we do the ANDs with the
3390 similar mask and compare the result of the ANDs.
3392 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3393 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3394 are the left and right operands of the comparison, respectively.
3396 If the optimization described above can be done, we return the resulting
3397 tree. Otherwise we return zero. */
3400 optimize_bit_field_compare (location_t loc, enum tree_code code,
3401 tree compare_type, tree lhs, tree rhs)
3403 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3404 tree type = TREE_TYPE (lhs);
3405 tree signed_type, unsigned_type;
3406 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3407 enum machine_mode lmode, rmode, nmode;
3408 int lunsignedp, runsignedp;
3409 int lvolatilep = 0, rvolatilep = 0;
3410 tree linner, rinner = NULL_TREE;
3414 /* In the strict volatile bitfields case, doing code changes here may prevent
3415 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3416 if (flag_strict_volatile_bitfields > 0)
3419 /* Get all the information about the extractions being done. If the bit size
3420 if the same as the size of the underlying object, we aren't doing an
3421 extraction at all and so can do nothing. We also don't want to
3422 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3423 then will no longer be able to replace it. */
3424 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3425 &lunsignedp, &lvolatilep, false);
3426 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3427 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3432 /* If this is not a constant, we can only do something if bit positions,
3433 sizes, and signedness are the same. */
3434 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3435 &runsignedp, &rvolatilep, false);
3437 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3438 || lunsignedp != runsignedp || offset != 0
3439 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3443 /* See if we can find a mode to refer to this field. We should be able to,
3444 but fail if we can't. */
3446 && GET_MODE_BITSIZE (lmode) > 0
3447 && flag_strict_volatile_bitfields > 0)
3450 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3451 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3452 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3453 TYPE_ALIGN (TREE_TYPE (rinner))),
3454 word_mode, lvolatilep || rvolatilep);
3455 if (nmode == VOIDmode)
3458 /* Set signed and unsigned types of the precision of this mode for the
3460 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3461 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3463 /* Compute the bit position and size for the new reference and our offset
3464 within it. If the new reference is the same size as the original, we
3465 won't optimize anything, so return zero. */
3466 nbitsize = GET_MODE_BITSIZE (nmode);
3467 nbitpos = lbitpos & ~ (nbitsize - 1);
3469 if (nbitsize == lbitsize)
3472 if (BYTES_BIG_ENDIAN)
3473 lbitpos = nbitsize - lbitsize - lbitpos;
3475 /* Make the mask to be used against the extracted field. */
3476 mask = build_int_cst_type (unsigned_type, -1);
3477 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3478 mask = const_binop (RSHIFT_EXPR, mask,
3479 size_int (nbitsize - lbitsize - lbitpos));
3482 /* If not comparing with constant, just rework the comparison
3484 return fold_build2_loc (loc, code, compare_type,
3485 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3486 make_bit_field_ref (loc, linner,
3491 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3492 make_bit_field_ref (loc, rinner,
3498 /* Otherwise, we are handling the constant case. See if the constant is too
3499 big for the field. Warn and return a tree of for 0 (false) if so. We do
3500 this not only for its own sake, but to avoid having to test for this
3501 error case below. If we didn't, we might generate wrong code.
3503 For unsigned fields, the constant shifted right by the field length should
3504 be all zero. For signed fields, the high-order bits should agree with
3509 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3510 fold_convert_loc (loc,
3511 unsigned_type, rhs),
3512 size_int (lbitsize))))
3514 warning (0, "comparison is always %d due to width of bit-field",
3516 return constant_boolean_node (code == NE_EXPR, compare_type);
3521 tree tem = const_binop (RSHIFT_EXPR,
3522 fold_convert_loc (loc, signed_type, rhs),
3523 size_int (lbitsize - 1));
3524 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3526 warning (0, "comparison is always %d due to width of bit-field",
3528 return constant_boolean_node (code == NE_EXPR, compare_type);
3532 /* Single-bit compares should always be against zero. */
3533 if (lbitsize == 1 && ! integer_zerop (rhs))
3535 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3536 rhs = build_int_cst (type, 0);
3539 /* Make a new bitfield reference, shift the constant over the
3540 appropriate number of bits and mask it with the computed mask
3541 (in case this was a signed field). If we changed it, make a new one. */
3542 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3545 TREE_SIDE_EFFECTS (lhs) = 1;
3546 TREE_THIS_VOLATILE (lhs) = 1;
3549 rhs = const_binop (BIT_AND_EXPR,
3550 const_binop (LSHIFT_EXPR,
3551 fold_convert_loc (loc, unsigned_type, rhs),
3552 size_int (lbitpos)),
3555 lhs = build2_loc (loc, code, compare_type,
3556 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3560 /* Subroutine for fold_truth_andor_1: decode a field reference.
3562 If EXP is a comparison reference, we return the innermost reference.
3564 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3565 set to the starting bit number.
3567 If the innermost field can be completely contained in a mode-sized
3568 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3570 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3571 otherwise it is not changed.
3573 *PUNSIGNEDP is set to the signedness of the field.
3575 *PMASK is set to the mask used. This is either contained in a
3576 BIT_AND_EXPR or derived from the width of the field.
3578 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3580 Return 0 if this is not a component reference or is one that we can't
3581 do anything with. */
3584 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3585 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3586 int *punsignedp, int *pvolatilep,
3587 tree *pmask, tree *pand_mask)
3589 tree outer_type = 0;
3591 tree mask, inner, offset;
3593 unsigned int precision;
3595 /* All the optimizations using this function assume integer fields.
3596 There are problems with FP fields since the type_for_size call
3597 below can fail for, e.g., XFmode. */
3598 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3601 /* We are interested in the bare arrangement of bits, so strip everything
3602 that doesn't affect the machine mode. However, record the type of the
3603 outermost expression if it may matter below. */
3604 if (CONVERT_EXPR_P (exp)
3605 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3606 outer_type = TREE_TYPE (exp);
3609 if (TREE_CODE (exp) == BIT_AND_EXPR)
3611 and_mask = TREE_OPERAND (exp, 1);
3612 exp = TREE_OPERAND (exp, 0);
3613 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3614 if (TREE_CODE (and_mask) != INTEGER_CST)
3618 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3619 punsignedp, pvolatilep, false);
3620 if ((inner == exp && and_mask == 0)
3621 || *pbitsize < 0 || offset != 0
3622 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3625 /* If the number of bits in the reference is the same as the bitsize of
3626 the outer type, then the outer type gives the signedness. Otherwise
3627 (in case of a small bitfield) the signedness is unchanged. */
3628 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3629 *punsignedp = TYPE_UNSIGNED (outer_type);
3631 /* Compute the mask to access the bitfield. */
3632 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3633 precision = TYPE_PRECISION (unsigned_type);
3635 mask = build_int_cst_type (unsigned_type, -1);
3637 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3638 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3640 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3642 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3643 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3646 *pand_mask = and_mask;
3650 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3654 all_ones_mask_p (const_tree mask, int size)
3656 tree type = TREE_TYPE (mask);
3657 unsigned int precision = TYPE_PRECISION (type);
3660 tmask = build_int_cst_type (signed_type_for (type), -1);
3663 tree_int_cst_equal (mask,
3664 const_binop (RSHIFT_EXPR,
3665 const_binop (LSHIFT_EXPR, tmask,
3666 size_int (precision - size)),
3667 size_int (precision - size)));
3670 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3671 represents the sign bit of EXP's type. If EXP represents a sign
3672 or zero extension, also test VAL against the unextended type.
3673 The return value is the (sub)expression whose sign bit is VAL,
3674 or NULL_TREE otherwise. */
3677 sign_bit_p (tree exp, const_tree val)
3679 unsigned HOST_WIDE_INT mask_lo, lo;
3680 HOST_WIDE_INT mask_hi, hi;
3684 /* Tree EXP must have an integral type. */
3685 t = TREE_TYPE (exp);
3686 if (! INTEGRAL_TYPE_P (t))
3689 /* Tree VAL must be an integer constant. */
3690 if (TREE_CODE (val) != INTEGER_CST
3691 || TREE_OVERFLOW (val))
3694 width = TYPE_PRECISION (t);
3695 if (width > HOST_BITS_PER_WIDE_INT)
3697 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3700 mask_hi = ((unsigned HOST_WIDE_INT) -1
3701 >> (HOST_BITS_PER_DOUBLE_INT - width));
3707 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3710 mask_lo = ((unsigned HOST_WIDE_INT) -1
3711 >> (HOST_BITS_PER_WIDE_INT - width));
3714 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3715 treat VAL as if it were unsigned. */
3716 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3717 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3720 /* Handle extension from a narrower type. */
3721 if (TREE_CODE (exp) == NOP_EXPR
3722 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3723 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3728 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3729 to be evaluated unconditionally. */
3732 simple_operand_p (const_tree exp)
3734 /* Strip any conversions that don't change the machine mode. */
3737 return (CONSTANT_CLASS_P (exp)
3738 || TREE_CODE (exp) == SSA_NAME
3740 && ! TREE_ADDRESSABLE (exp)
3741 && ! TREE_THIS_VOLATILE (exp)
3742 && ! DECL_NONLOCAL (exp)
3743 /* Don't regard global variables as simple. They may be
3744 allocated in ways unknown to the compiler (shared memory,
3745 #pragma weak, etc). */
3746 && ! TREE_PUBLIC (exp)
3747 && ! DECL_EXTERNAL (exp)
3748 /* Loading a static variable is unduly expensive, but global
3749 registers aren't expensive. */
3750 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3753 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3754 to be evaluated unconditionally.
3755 I addition to simple_operand_p, we assume that comparisons, conversions,
3756 and logic-not operations are simple, if their operands are simple, too. */
3759 simple_operand_p_2 (tree exp)
3761 enum tree_code code;
3763 if (TREE_SIDE_EFFECTS (exp)
3764 || tree_could_trap_p (exp))
3767 while (CONVERT_EXPR_P (exp))
3768 exp = TREE_OPERAND (exp, 0);
3770 code = TREE_CODE (exp);
3772 if (TREE_CODE_CLASS (code) == tcc_comparison)
3773 return (simple_operand_p (TREE_OPERAND (exp, 0))
3774 && simple_operand_p (TREE_OPERAND (exp, 1)));
3776 if (code == TRUTH_NOT_EXPR)
3777 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3779 return simple_operand_p (exp);
3783 /* The following functions are subroutines to fold_range_test and allow it to
3784 try to change a logical combination of comparisons into a range test.
3787 X == 2 || X == 3 || X == 4 || X == 5
3791 (unsigned) (X - 2) <= 3
3793 We describe each set of comparisons as being either inside or outside
3794 a range, using a variable named like IN_P, and then describe the
3795 range with a lower and upper bound. If one of the bounds is omitted,
3796 it represents either the highest or lowest value of the type.
3798 In the comments below, we represent a range by two numbers in brackets
3799 preceded by a "+" to designate being inside that range, or a "-" to
3800 designate being outside that range, so the condition can be inverted by
3801 flipping the prefix. An omitted bound is represented by a "-". For
3802 example, "- [-, 10]" means being outside the range starting at the lowest
3803 possible value and ending at 10, in other words, being greater than 10.
3804 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3807 We set up things so that the missing bounds are handled in a consistent
3808 manner so neither a missing bound nor "true" and "false" need to be
3809 handled using a special case. */
3811 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3812 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3813 and UPPER1_P are nonzero if the respective argument is an upper bound
3814 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3815 must be specified for a comparison. ARG1 will be converted to ARG0's
3816 type if both are specified. */
3819 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3820 tree arg1, int upper1_p)
3826 /* If neither arg represents infinity, do the normal operation.
3827 Else, if not a comparison, return infinity. Else handle the special
3828 comparison rules. Note that most of the cases below won't occur, but
3829 are handled for consistency. */
3831 if (arg0 != 0 && arg1 != 0)
3833 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3834 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3836 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3839 if (TREE_CODE_CLASS (code) != tcc_comparison)
3842 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3843 for neither. In real maths, we cannot assume open ended ranges are
3844 the same. But, this is computer arithmetic, where numbers are finite.
3845 We can therefore make the transformation of any unbounded range with
3846 the value Z, Z being greater than any representable number. This permits
3847 us to treat unbounded ranges as equal. */
3848 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3849 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3853 result = sgn0 == sgn1;
3856 result = sgn0 != sgn1;
3859 result = sgn0 < sgn1;
3862 result = sgn0 <= sgn1;
3865 result = sgn0 > sgn1;
3868 result = sgn0 >= sgn1;
3874 return constant_boolean_node (result, type);
3877 /* Helper routine for make_range. Perform one step for it, return
3878 new expression if the loop should continue or NULL_TREE if it should
3882 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3883 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3884 bool *strict_overflow_p)
3886 tree arg0_type = TREE_TYPE (arg0);
3887 tree n_low, n_high, low = *p_low, high = *p_high;
3888 int in_p = *p_in_p, n_in_p;
3892 case TRUTH_NOT_EXPR:
3893 /* We can only do something if the range is testing for zero. */
3894 if (low == NULL_TREE || high == NULL_TREE
3895 || ! integer_zerop (low) || ! integer_zerop (high))
3900 case EQ_EXPR: case NE_EXPR:
3901 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3902 /* We can only do something if the range is testing for zero
3903 and if the second operand is an integer constant. Note that
3904 saying something is "in" the range we make is done by
3905 complementing IN_P since it will set in the initial case of
3906 being not equal to zero; "out" is leaving it alone. */
3907 if (low == NULL_TREE || high == NULL_TREE
3908 || ! integer_zerop (low) || ! integer_zerop (high)
3909 || TREE_CODE (arg1) != INTEGER_CST)
3914 case NE_EXPR: /* - [c, c] */
3917 case EQ_EXPR: /* + [c, c] */
3918 in_p = ! in_p, low = high = arg1;
3920 case GT_EXPR: /* - [-, c] */
3921 low = 0, high = arg1;
3923 case GE_EXPR: /* + [c, -] */
3924 in_p = ! in_p, low = arg1, high = 0;
3926 case LT_EXPR: /* - [c, -] */
3927 low = arg1, high = 0;
3929 case LE_EXPR: /* + [-, c] */
3930 in_p = ! in_p, low = 0, high = arg1;
3936 /* If this is an unsigned comparison, we also know that EXP is
3937 greater than or equal to zero. We base the range tests we make
3938 on that fact, so we record it here so we can parse existing
3939 range tests. We test arg0_type since often the return type
3940 of, e.g. EQ_EXPR, is boolean. */
3941 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3943 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3945 build_int_cst (arg0_type, 0),
3949 in_p = n_in_p, low = n_low, high = n_high;
3951 /* If the high bound is missing, but we have a nonzero low
3952 bound, reverse the range so it goes from zero to the low bound
3954 if (high == 0 && low && ! integer_zerop (low))
3957 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3958 integer_one_node, 0);
3959 low = build_int_cst (arg0_type, 0);
3969 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3970 low and high are non-NULL, then normalize will DTRT. */
3971 if (!TYPE_UNSIGNED (arg0_type)
3972 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3974 if (low == NULL_TREE)
3975 low = TYPE_MIN_VALUE (arg0_type);
3976 if (high == NULL_TREE)
3977 high = TYPE_MAX_VALUE (arg0_type);
3980 /* (-x) IN [a,b] -> x in [-b, -a] */
3981 n_low = range_binop (MINUS_EXPR, exp_type,
3982 build_int_cst (exp_type, 0),
3984 n_high = range_binop (MINUS_EXPR, exp_type,
3985 build_int_cst (exp_type, 0),
3987 if (n_high != 0 && TREE_OVERFLOW (n_high))
3993 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3994 build_int_cst (exp_type, 1));
3998 if (TREE_CODE (arg1) != INTEGER_CST)
4001 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4002 move a constant to the other side. */
4003 if (!TYPE_UNSIGNED (arg0_type)
4004 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4007 /* If EXP is signed, any overflow in the computation is undefined,
4008 so we don't worry about it so long as our computations on
4009 the bounds don't overflow. For unsigned, overflow is defined
4010 and this is exactly the right thing. */
4011 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4012 arg0_type, low, 0, arg1, 0);
4013 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4014 arg0_type, high, 1, arg1, 0);
4015 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4016 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4019 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4020 *strict_overflow_p = true;
4023 /* Check for an unsigned range which has wrapped around the maximum
4024 value thus making n_high < n_low, and normalize it. */
4025 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4027 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4028 integer_one_node, 0);
4029 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4030 integer_one_node, 0);
4032 /* If the range is of the form +/- [ x+1, x ], we won't
4033 be able to normalize it. But then, it represents the
4034 whole range or the empty set, so make it
4036 if (tree_int_cst_equal (n_low, low)
4037 && tree_int_cst_equal (n_high, high))
4043 low = n_low, high = n_high;
4051 case NON_LVALUE_EXPR:
4052 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4055 if (! INTEGRAL_TYPE_P (arg0_type)
4056 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4057 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4060 n_low = low, n_high = high;
4063 n_low = fold_convert_loc (loc, arg0_type, n_low);
4066 n_high = fold_convert_loc (loc, arg0_type, n_high);
4068 /* If we're converting arg0 from an unsigned type, to exp,
4069 a signed type, we will be doing the comparison as unsigned.
4070 The tests above have already verified that LOW and HIGH
4073 So we have to ensure that we will handle large unsigned
4074 values the same way that the current signed bounds treat
4077 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4081 /* For fixed-point modes, we need to pass the saturating flag
4082 as the 2nd parameter. */
4083 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4085 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4086 TYPE_SATURATING (arg0_type));
4089 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4091 /* A range without an upper bound is, naturally, unbounded.
4092 Since convert would have cropped a very large value, use
4093 the max value for the destination type. */
4095 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4096 : TYPE_MAX_VALUE (arg0_type);
4098 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4099 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4100 fold_convert_loc (loc, arg0_type,
4102 build_int_cst (arg0_type, 1));
4104 /* If the low bound is specified, "and" the range with the
4105 range for which the original unsigned value will be
4109 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4110 1, fold_convert_loc (loc, arg0_type,
4115 in_p = (n_in_p == in_p);
4119 /* Otherwise, "or" the range with the range of the input
4120 that will be interpreted as negative. */
4121 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4122 1, fold_convert_loc (loc, arg0_type,
4127 in_p = (in_p != n_in_p);
4141 /* Given EXP, a logical expression, set the range it is testing into
4142 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4143 actually being tested. *PLOW and *PHIGH will be made of the same
4144 type as the returned expression. If EXP is not a comparison, we
4145 will most likely not be returning a useful value and range. Set
4146 *STRICT_OVERFLOW_P to true if the return value is only valid
4147 because signed overflow is undefined; otherwise, do not change
4148 *STRICT_OVERFLOW_P. */
4151 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4152 bool *strict_overflow_p)
4154 enum tree_code code;
4155 tree arg0, arg1 = NULL_TREE;
4156 tree exp_type, nexp;
4159 location_t loc = EXPR_LOCATION (exp);
4161 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4162 and see if we can refine the range. Some of the cases below may not
4163 happen, but it doesn't seem worth worrying about this. We "continue"
4164 the outer loop when we've changed something; otherwise we "break"
4165 the switch, which will "break" the while. */
4168 low = high = build_int_cst (TREE_TYPE (exp), 0);
4172 code = TREE_CODE (exp);
4173 exp_type = TREE_TYPE (exp);
4176 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4178 if (TREE_OPERAND_LENGTH (exp) > 0)
4179 arg0 = TREE_OPERAND (exp, 0);
4180 if (TREE_CODE_CLASS (code) == tcc_binary
4181 || TREE_CODE_CLASS (code) == tcc_comparison
4182 || (TREE_CODE_CLASS (code) == tcc_expression
4183 && TREE_OPERAND_LENGTH (exp) > 1))
4184 arg1 = TREE_OPERAND (exp, 1);
4186 if (arg0 == NULL_TREE)
4189 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4190 &high, &in_p, strict_overflow_p);
4191 if (nexp == NULL_TREE)
4196 /* If EXP is a constant, we can evaluate whether this is true or false. */
4197 if (TREE_CODE (exp) == INTEGER_CST)
4199 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4201 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4207 *pin_p = in_p, *plow = low, *phigh = high;
4211 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4212 type, TYPE, return an expression to test if EXP is in (or out of, depending
4213 on IN_P) the range. Return 0 if the test couldn't be created. */
4216 build_range_check (location_t loc, tree type, tree exp, int in_p,
4217 tree low, tree high)
4219 tree etype = TREE_TYPE (exp), value;
4221 #ifdef HAVE_canonicalize_funcptr_for_compare
4222 /* Disable this optimization for function pointer expressions
4223 on targets that require function pointer canonicalization. */
4224 if (HAVE_canonicalize_funcptr_for_compare
4225 && TREE_CODE (etype) == POINTER_TYPE
4226 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4232 value = build_range_check (loc, type, exp, 1, low, high);
4234 return invert_truthvalue_loc (loc, value);
4239 if (low == 0 && high == 0)
4240 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4243 return fold_build2_loc (loc, LE_EXPR, type, exp,
4244 fold_convert_loc (loc, etype, high));
4247 return fold_build2_loc (loc, GE_EXPR, type, exp,
4248 fold_convert_loc (loc, etype, low));
4250 if (operand_equal_p (low, high, 0))
4251 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4252 fold_convert_loc (loc, etype, low));
4254 if (integer_zerop (low))
4256 if (! TYPE_UNSIGNED (etype))
4258 etype = unsigned_type_for (etype);
4259 high = fold_convert_loc (loc, etype, high);
4260 exp = fold_convert_loc (loc, etype, exp);
4262 return build_range_check (loc, type, exp, 1, 0, high);
4265 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4266 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4268 unsigned HOST_WIDE_INT lo;
4272 prec = TYPE_PRECISION (etype);
4273 if (prec <= HOST_BITS_PER_WIDE_INT)
4276 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4280 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4281 lo = (unsigned HOST_WIDE_INT) -1;
4284 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4286 if (TYPE_UNSIGNED (etype))
4288 tree signed_etype = signed_type_for (etype);
4289 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4293 etype = signed_etype;
4294 exp = fold_convert_loc (loc, etype, exp);
4296 return fold_build2_loc (loc, GT_EXPR, type, exp,
4297 build_int_cst (etype, 0));
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4306 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4307 TYPE_UNSIGNED (etype));
4309 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4311 tree utype, minv, maxv;
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype = unsigned_type_for (etype);
4316 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4317 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4318 integer_one_node, 1);
4319 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4321 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4328 high = fold_convert_loc (loc, etype, high);
4329 low = fold_convert_loc (loc, etype, low);
4330 exp = fold_convert_loc (loc, etype, exp);
4332 value = const_binop (MINUS_EXPR, high, low);
4335 if (POINTER_TYPE_P (etype))
4337 if (value != 0 && !TREE_OVERFLOW (value))
4339 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4340 return build_range_check (loc, type,
4341 fold_build_pointer_plus_loc (loc, exp, low),
4342 1, build_int_cst (etype, 0), value);
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (loc, type,
4349 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4358 range_predecessor (tree val)
4360 tree type = TREE_TYPE (val);
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4369 /* Return the successor of VAL in its type, handling the infinite case. */
4372 range_successor (tree val)
4374 tree type = TREE_TYPE (val);
4376 if (INTEGRAL_TYPE_P (type)
4377 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4380 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4383 /* Given two ranges, see if we can merge them into one. Return 1 if we
4384 can, 0 if we can't. Set the output range into the specified parameters. */
4387 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4388 tree high0, int in1_p, tree low1, tree high1)
4396 int lowequal = ((low0 == 0 && low1 == 0)
4397 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 low0, 0, low1, 0)));
4399 int highequal = ((high0 == 0 && high1 == 0)
4400 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4401 high0, 1, high1, 1)));
4403 /* Make range 0 be the range that starts first, or ends last if they
4404 start at the same value. Swap them if it isn't. */
4405 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4409 high1, 1, high0, 1))))
4411 temp = in0_p, in0_p = in1_p, in1_p = temp;
4412 tem = low0, low0 = low1, low1 = tem;
4413 tem = high0, high0 = high1, high1 = tem;
4416 /* Now flag two cases, whether the ranges are disjoint or whether the
4417 second range is totally subsumed in the first. Note that the tests
4418 below are simplified by the ones above. */
4419 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4420 high0, 1, low1, 0));
4421 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4422 high1, 1, high0, 1));
4424 /* We now have four cases, depending on whether we are including or
4425 excluding the two ranges. */
4428 /* If they don't overlap, the result is false. If the second range
4429 is a subset it is the result. Otherwise, the range is from the start
4430 of the second to the end of the first. */
4432 in_p = 0, low = high = 0;
4434 in_p = 1, low = low1, high = high1;
4436 in_p = 1, low = low1, high = high0;
4439 else if (in0_p && ! in1_p)
4441 /* If they don't overlap, the result is the first range. If they are
4442 equal, the result is false. If the second range is a subset of the
4443 first, and the ranges begin at the same place, we go from just after
4444 the end of the second range to the end of the first. If the second
4445 range is not a subset of the first, or if it is a subset and both
4446 ranges end at the same place, the range starts at the start of the
4447 first range and ends just before the second range.
4448 Otherwise, we can't describe this as a single range. */
4450 in_p = 1, low = low0, high = high0;
4451 else if (lowequal && highequal)
4452 in_p = 0, low = high = 0;
4453 else if (subset && lowequal)
4455 low = range_successor (high1);
4460 /* We are in the weird situation where high0 > high1 but
4461 high1 has no successor. Punt. */
4465 else if (! subset || highequal)
4468 high = range_predecessor (low1);
4472 /* low0 < low1 but low1 has no predecessor. Punt. */
4480 else if (! in0_p && in1_p)
4482 /* If they don't overlap, the result is the second range. If the second
4483 is a subset of the first, the result is false. Otherwise,
4484 the range starts just after the first range and ends at the
4485 end of the second. */
4487 in_p = 1, low = low1, high = high1;
4488 else if (subset || highequal)
4489 in_p = 0, low = high = 0;
4492 low = range_successor (high0);
4497 /* high1 > high0 but high0 has no successor. Punt. */
4505 /* The case where we are excluding both ranges. Here the complex case
4506 is if they don't overlap. In that case, the only time we have a
4507 range is if they are adjacent. If the second is a subset of the
4508 first, the result is the first. Otherwise, the range to exclude
4509 starts at the beginning of the first range and ends at the end of the
4513 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4514 range_successor (high0),
4516 in_p = 0, low = low0, high = high1;
4519 /* Canonicalize - [min, x] into - [-, x]. */
4520 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4521 switch (TREE_CODE (TREE_TYPE (low0)))
4524 if (TYPE_PRECISION (TREE_TYPE (low0))
4525 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4529 if (tree_int_cst_equal (low0,
4530 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4534 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4535 && integer_zerop (low0))
4542 /* Canonicalize - [x, max] into - [x, -]. */
4543 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4544 switch (TREE_CODE (TREE_TYPE (high1)))
4547 if (TYPE_PRECISION (TREE_TYPE (high1))
4548 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4552 if (tree_int_cst_equal (high1,
4553 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4557 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4558 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4560 integer_one_node, 1)))
4567 /* The ranges might be also adjacent between the maximum and
4568 minimum values of the given type. For
4569 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4570 return + [x + 1, y - 1]. */
4571 if (low0 == 0 && high1 == 0)
4573 low = range_successor (high0);
4574 high = range_predecessor (low1);
4575 if (low == 0 || high == 0)
4585 in_p = 0, low = low0, high = high0;
4587 in_p = 0, low = low0, high = high1;
4590 *pin_p = in_p, *plow = low, *phigh = high;
4595 /* Subroutine of fold, looking inside expressions of the form
4596 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4597 of the COND_EXPR. This function is being used also to optimize
4598 A op B ? C : A, by reversing the comparison first.
4600 Return a folded expression whose code is not a COND_EXPR
4601 anymore, or NULL_TREE if no folding opportunity is found. */
4604 fold_cond_expr_with_comparison (location_t loc, tree type,
4605 tree arg0, tree arg1, tree arg2)
4607 enum tree_code comp_code = TREE_CODE (arg0);
4608 tree arg00 = TREE_OPERAND (arg0, 0);
4609 tree arg01 = TREE_OPERAND (arg0, 1);
4610 tree arg1_type = TREE_TYPE (arg1);
4616 /* If we have A op 0 ? A : -A, consider applying the following
4619 A == 0? A : -A same as -A
4620 A != 0? A : -A same as A
4621 A >= 0? A : -A same as abs (A)
4622 A > 0? A : -A same as abs (A)
4623 A <= 0? A : -A same as -abs (A)
4624 A < 0? A : -A same as -abs (A)
4626 None of these transformations work for modes with signed
4627 zeros. If A is +/-0, the first two transformations will
4628 change the sign of the result (from +0 to -0, or vice
4629 versa). The last four will fix the sign of the result,
4630 even though the original expressions could be positive or
4631 negative, depending on the sign of A.
4633 Note that all these transformations are correct if A is
4634 NaN, since the two alternatives (A and -A) are also NaNs. */
4635 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4636 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4637 ? real_zerop (arg01)
4638 : integer_zerop (arg01))
4639 && ((TREE_CODE (arg2) == NEGATE_EXPR
4640 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4641 /* In the case that A is of the form X-Y, '-A' (arg2) may
4642 have already been folded to Y-X, check for that. */
4643 || (TREE_CODE (arg1) == MINUS_EXPR
4644 && TREE_CODE (arg2) == MINUS_EXPR
4645 && operand_equal_p (TREE_OPERAND (arg1, 0),
4646 TREE_OPERAND (arg2, 1), 0)
4647 && operand_equal_p (TREE_OPERAND (arg1, 1),
4648 TREE_OPERAND (arg2, 0), 0))))
4653 tem = fold_convert_loc (loc, arg1_type, arg1);
4654 return pedantic_non_lvalue_loc (loc,
4655 fold_convert_loc (loc, type,
4656 negate_expr (tem)));
4659 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4662 if (flag_trapping_math)
4667 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4668 arg1 = fold_convert_loc (loc, signed_type_for
4669 (TREE_TYPE (arg1)), arg1);
4670 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4674 if (flag_trapping_math)
4678 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4679 arg1 = fold_convert_loc (loc, signed_type_for
4680 (TREE_TYPE (arg1)), arg1);
4681 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4682 return negate_expr (fold_convert_loc (loc, type, tem));
4684 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4688 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4689 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4690 both transformations are correct when A is NaN: A != 0
4691 is then true, and A == 0 is false. */
4693 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4694 && integer_zerop (arg01) && integer_zerop (arg2))
4696 if (comp_code == NE_EXPR)
4697 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4698 else if (comp_code == EQ_EXPR)
4699 return build_int_cst (type, 0);
4702 /* Try some transformations of A op B ? A : B.
4704 A == B? A : B same as B
4705 A != B? A : B same as A
4706 A >= B? A : B same as max (A, B)
4707 A > B? A : B same as max (B, A)
4708 A <= B? A : B same as min (A, B)
4709 A < B? A : B same as min (B, A)
4711 As above, these transformations don't work in the presence
4712 of signed zeros. For example, if A and B are zeros of
4713 opposite sign, the first two transformations will change
4714 the sign of the result. In the last four, the original
4715 expressions give different results for (A=+0, B=-0) and
4716 (A=-0, B=+0), but the transformed expressions do not.
4718 The first two transformations are correct if either A or B
4719 is a NaN. In the first transformation, the condition will
4720 be false, and B will indeed be chosen. In the case of the
4721 second transformation, the condition A != B will be true,
4722 and A will be chosen.
4724 The conversions to max() and min() are not correct if B is
4725 a number and A is not. The conditions in the original
4726 expressions will be false, so all four give B. The min()
4727 and max() versions would give a NaN instead. */
4728 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4729 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4730 /* Avoid these transformations if the COND_EXPR may be used
4731 as an lvalue in the C++ front-end. PR c++/19199. */
4733 || (strcmp (lang_hooks.name, "GNU C++") != 0
4734 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4735 || ! maybe_lvalue_p (arg1)
4736 || ! maybe_lvalue_p (arg2)))
4738 tree comp_op0 = arg00;
4739 tree comp_op1 = arg01;
4740 tree comp_type = TREE_TYPE (comp_op0);
4742 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4743 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4753 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4755 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 /* In C++ a ?: expression can be an lvalue, so put the
4761 operand which will be used if they are equal first
4762 so that we can convert this back to the
4763 corresponding COND_EXPR. */
4764 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4766 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4767 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4768 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4769 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4770 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4771 comp_op1, comp_op0);
4772 return pedantic_non_lvalue_loc (loc,
4773 fold_convert_loc (loc, type, tem));
4780 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4782 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4783 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4784 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4785 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4786 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4787 comp_op1, comp_op0);
4788 return pedantic_non_lvalue_loc (loc,
4789 fold_convert_loc (loc, type, tem));
4793 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4794 return pedantic_non_lvalue_loc (loc,
4795 fold_convert_loc (loc, type, arg2));
4798 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4799 return pedantic_non_lvalue_loc (loc,
4800 fold_convert_loc (loc, type, arg1));
4803 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4808 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4809 we might still be able to simplify this. For example,
4810 if C1 is one less or one more than C2, this might have started
4811 out as a MIN or MAX and been transformed by this function.
4812 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4814 if (INTEGRAL_TYPE_P (type)
4815 && TREE_CODE (arg01) == INTEGER_CST
4816 && TREE_CODE (arg2) == INTEGER_CST)
4820 if (TREE_CODE (arg1) == INTEGER_CST)
4822 /* We can replace A with C1 in this case. */
4823 arg1 = fold_convert_loc (loc, type, arg01);
4824 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4827 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4828 MIN_EXPR, to preserve the signedness of the comparison. */
4829 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 && operand_equal_p (arg01,
4832 const_binop (PLUS_EXPR, arg2,
4833 build_int_cst (type, 1)),
4836 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4837 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 return pedantic_non_lvalue_loc (loc,
4840 fold_convert_loc (loc, type, tem));
4845 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4847 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4849 && operand_equal_p (arg01,
4850 const_binop (MINUS_EXPR, arg2,
4851 build_int_cst (type, 1)),
4854 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4855 fold_convert_loc (loc, TREE_TYPE (arg00),
4857 return pedantic_non_lvalue_loc (loc,
4858 fold_convert_loc (loc, type, tem));
4863 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4864 MAX_EXPR, to preserve the signedness of the comparison. */
4865 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4867 && operand_equal_p (arg01,
4868 const_binop (MINUS_EXPR, arg2,
4869 build_int_cst (type, 1)),
4872 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4873 fold_convert_loc (loc, TREE_TYPE (arg00),
4875 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4880 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4881 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4883 && operand_equal_p (arg01,
4884 const_binop (PLUS_EXPR, arg2,
4885 build_int_cst (type, 1)),
4888 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4889 fold_convert_loc (loc, TREE_TYPE (arg00),
4891 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4905 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4906 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4907 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4911 /* EXP is some logical combination of boolean tests. See if we can
4912 merge it into some range test. Return the new tree if so. */
4915 fold_range_test (location_t loc, enum tree_code code, tree type,
4918 int or_op = (code == TRUTH_ORIF_EXPR
4919 || code == TRUTH_OR_EXPR);
4920 int in0_p, in1_p, in_p;
4921 tree low0, low1, low, high0, high1, high;
4922 bool strict_overflow_p = false;
4923 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4924 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4926 const char * const warnmsg = G_("assuming signed overflow does not occur "
4927 "when simplifying range test");
4929 /* If this is an OR operation, invert both sides; we will invert
4930 again at the end. */
4932 in0_p = ! in0_p, in1_p = ! in1_p;
4934 /* If both expressions are the same, if we can merge the ranges, and we
4935 can build the range test, return it or it inverted. If one of the
4936 ranges is always true or always false, consider it to be the same
4937 expression as the other. */
4938 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4939 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4941 && 0 != (tem = (build_range_check (loc, type,
4943 : rhs != 0 ? rhs : integer_zero_node,
4946 if (strict_overflow_p)
4947 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4948 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4951 /* On machines where the branch cost is expensive, if this is a
4952 short-circuited branch and the underlying object on both sides
4953 is the same, make a non-short-circuit operation. */
4954 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4955 && lhs != 0 && rhs != 0
4956 && (code == TRUTH_ANDIF_EXPR
4957 || code == TRUTH_ORIF_EXPR)
4958 && operand_equal_p (lhs, rhs, 0))
4960 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4961 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4962 which cases we can't do this. */
4963 if (simple_operand_p (lhs))
4964 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4965 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4968 else if (!lang_hooks.decls.global_bindings_p ()
4969 && !CONTAINS_PLACEHOLDER_P (lhs))
4971 tree common = save_expr (lhs);
4973 if (0 != (lhs = build_range_check (loc, type, common,
4974 or_op ? ! in0_p : in0_p,
4976 && (0 != (rhs = build_range_check (loc, type, common,
4977 or_op ? ! in1_p : in1_p,
4980 if (strict_overflow_p)
4981 fold_overflow_warning (warnmsg,
4982 WARN_STRICT_OVERFLOW_COMPARISON);
4983 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4984 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4993 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4994 bit value. Arrange things so the extra bits will be set to zero if and
4995 only if C is signed-extended to its full width. If MASK is nonzero,
4996 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4999 unextend (tree c, int p, int unsignedp, tree mask)
5001 tree type = TREE_TYPE (c);
5002 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5005 if (p == modesize || unsignedp)
5008 /* We work by getting just the sign bit into the low-order bit, then
5009 into the high-order bit, then sign-extend. We then XOR that value
5011 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5012 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5014 /* We must use a signed type in order to get an arithmetic right shift.
5015 However, we must also avoid introducing accidental overflows, so that
5016 a subsequent call to integer_zerop will work. Hence we must
5017 do the type conversion here. At this point, the constant is either
5018 zero or one, and the conversion to a signed type can never overflow.
5019 We could get an overflow if this conversion is done anywhere else. */
5020 if (TYPE_UNSIGNED (type))
5021 temp = fold_convert (signed_type_for (type), temp);
5023 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5024 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5026 temp = const_binop (BIT_AND_EXPR, temp,
5027 fold_convert (TREE_TYPE (c), mask));
5028 /* If necessary, convert the type back to match the type of C. */
5029 if (TYPE_UNSIGNED (type))
5030 temp = fold_convert (type, temp);
5032 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5035 /* For an expression that has the form
5039 we can drop one of the inner expressions and simplify to
5043 LOC is the location of the resulting expression. OP is the inner
5044 logical operation; the left-hand side in the examples above, while CMPOP
5045 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5046 removing a condition that guards another, as in
5047 (A != NULL && A->...) || A == NULL
5048 which we must not transform. If RHS_ONLY is true, only eliminate the
5049 right-most operand of the inner logical operation. */
5052 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5055 tree type = TREE_TYPE (cmpop);
5056 enum tree_code code = TREE_CODE (cmpop);
5057 enum tree_code truthop_code = TREE_CODE (op);
5058 tree lhs = TREE_OPERAND (op, 0);
5059 tree rhs = TREE_OPERAND (op, 1);
5060 tree orig_lhs = lhs, orig_rhs = rhs;
5061 enum tree_code rhs_code = TREE_CODE (rhs);
5062 enum tree_code lhs_code = TREE_CODE (lhs);
5063 enum tree_code inv_code;
5065 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5068 if (TREE_CODE_CLASS (code) != tcc_comparison)
5071 if (rhs_code == truthop_code)
5073 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5074 if (newrhs != NULL_TREE)
5077 rhs_code = TREE_CODE (rhs);
5080 if (lhs_code == truthop_code && !rhs_only)
5082 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5083 if (newlhs != NULL_TREE)
5086 lhs_code = TREE_CODE (lhs);
5090 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5091 if (inv_code == rhs_code
5092 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 if (!rhs_only && inv_code == lhs_code
5096 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5099 if (rhs != orig_rhs || lhs != orig_lhs)
5100 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5105 /* Find ways of folding logical expressions of LHS and RHS:
5106 Try to merge two comparisons to the same innermost item.
5107 Look for range tests like "ch >= '0' && ch <= '9'".
5108 Look for combinations of simple terms on machines with expensive branches
5109 and evaluate the RHS unconditionally.
5111 For example, if we have p->a == 2 && p->b == 4 and we can make an
5112 object large enough to span both A and B, we can do this with a comparison
5113 against the object ANDed with the a mask.
5115 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5116 operations to do this with one comparison.
5118 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5119 function and the one above.
5121 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5122 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5124 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5127 We return the simplified tree or 0 if no optimization is possible. */
5130 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5133 /* If this is the "or" of two comparisons, we can do something if
5134 the comparisons are NE_EXPR. If this is the "and", we can do something
5135 if the comparisons are EQ_EXPR. I.e.,
5136 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5138 WANTED_CODE is this operation code. For single bit fields, we can
5139 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5140 comparison for one-bit fields. */
5142 enum tree_code wanted_code;
5143 enum tree_code lcode, rcode;
5144 tree ll_arg, lr_arg, rl_arg, rr_arg;
5145 tree ll_inner, lr_inner, rl_inner, rr_inner;
5146 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5147 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5148 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5149 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5150 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5151 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5152 enum machine_mode lnmode, rnmode;
5153 tree ll_mask, lr_mask, rl_mask, rr_mask;
5154 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5155 tree l_const, r_const;
5156 tree lntype, rntype, result;
5157 HOST_WIDE_INT first_bit, end_bit;
5160 /* Start by getting the comparison codes. Fail if anything is volatile.
5161 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5162 it were surrounded with a NE_EXPR. */
5164 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5167 lcode = TREE_CODE (lhs);
5168 rcode = TREE_CODE (rhs);
5170 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5172 lhs = build2 (NE_EXPR, truth_type, lhs,
5173 build_int_cst (TREE_TYPE (lhs), 0));
5177 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5179 rhs = build2 (NE_EXPR, truth_type, rhs,
5180 build_int_cst (TREE_TYPE (rhs), 0));
5184 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5185 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5188 ll_arg = TREE_OPERAND (lhs, 0);
5189 lr_arg = TREE_OPERAND (lhs, 1);
5190 rl_arg = TREE_OPERAND (rhs, 0);
5191 rr_arg = TREE_OPERAND (rhs, 1);
5193 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5194 if (simple_operand_p (ll_arg)
5195 && simple_operand_p (lr_arg))
5197 if (operand_equal_p (ll_arg, rl_arg, 0)
5198 && operand_equal_p (lr_arg, rr_arg, 0))
5200 result = combine_comparisons (loc, code, lcode, rcode,
5201 truth_type, ll_arg, lr_arg);
5205 else if (operand_equal_p (ll_arg, rr_arg, 0)
5206 && operand_equal_p (lr_arg, rl_arg, 0))
5208 result = combine_comparisons (loc, code, lcode,
5209 swap_tree_comparison (rcode),
5210 truth_type, ll_arg, lr_arg);
5216 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5217 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5219 /* If the RHS can be evaluated unconditionally and its operands are
5220 simple, it wins to evaluate the RHS unconditionally on machines
5221 with expensive branches. In this case, this isn't a comparison
5222 that can be merged. */
5224 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5226 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5227 && simple_operand_p (rl_arg)
5228 && simple_operand_p (rr_arg))
5230 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5231 if (code == TRUTH_OR_EXPR
5232 && lcode == NE_EXPR && integer_zerop (lr_arg)
5233 && rcode == NE_EXPR && integer_zerop (rr_arg)
5234 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5235 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5236 return build2_loc (loc, NE_EXPR, truth_type,
5237 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5239 build_int_cst (TREE_TYPE (ll_arg), 0));
5241 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5242 if (code == TRUTH_AND_EXPR
5243 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5244 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, EQ_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5253 /* See if the comparisons can be merged. Then get all the parameters for
5256 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5257 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5261 ll_inner = decode_field_reference (loc, ll_arg,
5262 &ll_bitsize, &ll_bitpos, &ll_mode,
5263 &ll_unsignedp, &volatilep, &ll_mask,
5265 lr_inner = decode_field_reference (loc, lr_arg,
5266 &lr_bitsize, &lr_bitpos, &lr_mode,
5267 &lr_unsignedp, &volatilep, &lr_mask,
5269 rl_inner = decode_field_reference (loc, rl_arg,
5270 &rl_bitsize, &rl_bitpos, &rl_mode,
5271 &rl_unsignedp, &volatilep, &rl_mask,
5273 rr_inner = decode_field_reference (loc, rr_arg,
5274 &rr_bitsize, &rr_bitpos, &rr_mode,
5275 &rr_unsignedp, &volatilep, &rr_mask,
5278 /* It must be true that the inner operation on the lhs of each
5279 comparison must be the same if we are to be able to do anything.
5280 Then see if we have constants. If not, the same must be true for
5282 if (volatilep || ll_inner == 0 || rl_inner == 0
5283 || ! operand_equal_p (ll_inner, rl_inner, 0))
5286 if (TREE_CODE (lr_arg) == INTEGER_CST
5287 && TREE_CODE (rr_arg) == INTEGER_CST)
5288 l_const = lr_arg, r_const = rr_arg;
5289 else if (lr_inner == 0 || rr_inner == 0
5290 || ! operand_equal_p (lr_inner, rr_inner, 0))
5293 l_const = r_const = 0;
5295 /* If either comparison code is not correct for our logical operation,
5296 fail. However, we can convert a one-bit comparison against zero into
5297 the opposite comparison against that bit being set in the field. */
5299 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5300 if (lcode != wanted_code)
5302 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5304 /* Make the left operand unsigned, since we are only interested
5305 in the value of one bit. Otherwise we are doing the wrong
5314 /* This is analogous to the code for l_const above. */
5315 if (rcode != wanted_code)
5317 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 /* See if we can find a mode that contains both fields being compared on
5327 the left. If we can't, fail. Otherwise, update all constants and masks
5328 to be relative to a field of that size. */
5329 first_bit = MIN (ll_bitpos, rl_bitpos);
5330 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5331 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5332 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5334 if (lnmode == VOIDmode)
5337 lnbitsize = GET_MODE_BITSIZE (lnmode);
5338 lnbitpos = first_bit & ~ (lnbitsize - 1);
5339 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5340 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5342 if (BYTES_BIG_ENDIAN)
5344 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5345 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5348 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5349 size_int (xll_bitpos));
5350 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5351 size_int (xrl_bitpos));
5355 l_const = fold_convert_loc (loc, lntype, l_const);
5356 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5357 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5358 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5359 fold_build1_loc (loc, BIT_NOT_EXPR,
5362 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5369 r_const = fold_convert_loc (loc, lntype, r_const);
5370 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5371 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5372 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5373 fold_build1_loc (loc, BIT_NOT_EXPR,
5376 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5378 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5382 /* If the right sides are not constant, do the same for it. Also,
5383 disallow this optimization if a size or signedness mismatch occurs
5384 between the left and right sides. */
5387 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5388 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5389 /* Make sure the two fields on the right
5390 correspond to the left without being swapped. */
5391 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5394 first_bit = MIN (lr_bitpos, rr_bitpos);
5395 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5396 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5397 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5399 if (rnmode == VOIDmode)
5402 rnbitsize = GET_MODE_BITSIZE (rnmode);
5403 rnbitpos = first_bit & ~ (rnbitsize - 1);
5404 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5405 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5407 if (BYTES_BIG_ENDIAN)
5409 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5410 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5413 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 size_int (xlr_bitpos));
5416 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 size_int (xrr_bitpos));
5420 /* Make a mask that corresponds to both fields being compared.
5421 Do this for both items being compared. If the operands are the
5422 same size and the bits being compared are in the same position
5423 then we can do this by masking both and comparing the masked
5425 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5426 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5427 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5429 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5430 ll_unsignedp || rl_unsignedp);
5431 if (! all_ones_mask_p (ll_mask, lnbitsize))
5432 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5434 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5435 lr_unsignedp || rr_unsignedp);
5436 if (! all_ones_mask_p (lr_mask, rnbitsize))
5437 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5439 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5442 /* There is still another way we can do something: If both pairs of
5443 fields being compared are adjacent, we may be able to make a wider
5444 field containing them both.
5446 Note that we still must mask the lhs/rhs expressions. Furthermore,
5447 the mask must be shifted to account for the shift done by
5448 make_bit_field_ref. */
5449 if ((ll_bitsize + ll_bitpos == rl_bitpos
5450 && lr_bitsize + lr_bitpos == rr_bitpos)
5451 || (ll_bitpos == rl_bitpos + rl_bitsize
5452 && lr_bitpos == rr_bitpos + rr_bitsize))
5456 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5457 ll_bitsize + rl_bitsize,
5458 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5459 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5460 lr_bitsize + rr_bitsize,
5461 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5463 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5464 size_int (MIN (xll_bitpos, xrl_bitpos)));
5465 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5466 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5468 /* Convert to the smaller type before masking out unwanted bits. */
5470 if (lntype != rntype)
5472 if (lnbitsize > rnbitsize)
5474 lhs = fold_convert_loc (loc, rntype, lhs);
5475 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5478 else if (lnbitsize < rnbitsize)
5480 rhs = fold_convert_loc (loc, lntype, rhs);
5481 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5486 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5487 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5489 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5490 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5492 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5498 /* Handle the case of comparisons with constants. If there is something in
5499 common between the masks, those bits of the constants must be the same.
5500 If not, the condition is always false. Test for this to avoid generating
5501 incorrect code below. */
5502 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5503 if (! integer_zerop (result)
5504 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5505 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5507 if (wanted_code == NE_EXPR)
5509 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5510 return constant_boolean_node (true, truth_type);
5514 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5515 return constant_boolean_node (false, truth_type);
5519 /* Construct the expression we will return. First get the component
5520 reference we will make. Unless the mask is all ones the width of
5521 that field, perform the mask operation. Then compare with the
5523 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5524 ll_unsignedp || rl_unsignedp);
5526 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5527 if (! all_ones_mask_p (ll_mask, lnbitsize))
5528 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5530 return build2_loc (loc, wanted_code, truth_type, result,
5531 const_binop (BIT_IOR_EXPR, l_const, r_const));
5534 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5538 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5542 enum tree_code op_code;
5545 int consts_equal, consts_lt;
5548 STRIP_SIGN_NOPS (arg0);
5550 op_code = TREE_CODE (arg0);
5551 minmax_const = TREE_OPERAND (arg0, 1);
5552 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5553 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5554 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5555 inner = TREE_OPERAND (arg0, 0);
5557 /* If something does not permit us to optimize, return the original tree. */
5558 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5559 || TREE_CODE (comp_const) != INTEGER_CST
5560 || TREE_OVERFLOW (comp_const)
5561 || TREE_CODE (minmax_const) != INTEGER_CST
5562 || TREE_OVERFLOW (minmax_const))
5565 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5566 and GT_EXPR, doing the rest with recursive calls using logical
5570 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5573 = optimize_minmax_comparison (loc,
5574 invert_tree_comparison (code, false),
5577 return invert_truthvalue_loc (loc, tem);
5583 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5584 optimize_minmax_comparison
5585 (loc, EQ_EXPR, type, arg0, comp_const),
5586 optimize_minmax_comparison
5587 (loc, GT_EXPR, type, arg0, comp_const));
5590 if (op_code == MAX_EXPR && consts_equal)
5591 /* MAX (X, 0) == 0 -> X <= 0 */
5592 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594 else if (op_code == MAX_EXPR && consts_lt)
5595 /* MAX (X, 0) == 5 -> X == 5 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) == -1 -> false */
5600 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602 else if (consts_equal)
5603 /* MIN (X, 0) == 0 -> X >= 0 */
5604 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5607 /* MIN (X, 0) == 5 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5611 /* MIN (X, 0) == -1 -> X == -1 */
5612 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5615 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5616 /* MAX (X, 0) > 0 -> X > 0
5617 MAX (X, 0) > 5 -> X > 5 */
5618 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620 else if (op_code == MAX_EXPR)
5621 /* MAX (X, 0) > -1 -> true */
5622 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5625 /* MIN (X, 0) > 0 -> false
5626 MIN (X, 0) > 5 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5630 /* MIN (X, 0) > -1 -> X > -1 */
5631 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5638 /* T is an integer expression that is being multiplied, divided, or taken a
5639 modulus (CODE says which and what kind of divide or modulus) by a
5640 constant C. See if we can eliminate that operation by folding it with
5641 other operations already in T. WIDE_TYPE, if non-null, is a type that
5642 should be used for the computation if wider than our type.
5644 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5645 (X * 2) + (Y * 4). We must, however, be assured that either the original
5646 expression would not overflow or that overflow is undefined for the type
5647 in the language in question.
5649 If we return a non-null expression, it is an equivalent form of the
5650 original computation, but need not be in the original type.
5652 We set *STRICT_OVERFLOW_P to true if the return values depends on
5653 signed overflow being undefined. Otherwise we do not change
5654 *STRICT_OVERFLOW_P. */
5657 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 /* To avoid exponential search depth, refuse to allow recursion past
5661 three levels. Beyond that (1) it's highly unlikely that we'll find
5662 something interesting and (2) we've probably processed it before
5663 when we built the inner expression. */
5672 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5679 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5680 bool *strict_overflow_p)
5682 tree type = TREE_TYPE (t);
5683 enum tree_code tcode = TREE_CODE (t);
5684 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5685 > GET_MODE_SIZE (TYPE_MODE (type)))
5686 ? wide_type : type);
5688 int same_p = tcode == code;
5689 tree op0 = NULL_TREE, op1 = NULL_TREE;
5690 bool sub_strict_overflow_p;
5692 /* Don't deal with constants of zero here; they confuse the code below. */
5693 if (integer_zerop (c))
5696 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5697 op0 = TREE_OPERAND (t, 0);
5699 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5700 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702 /* Note that we need not handle conditional operations here since fold
5703 already handles those cases. So just do arithmetic here. */
5707 /* For a constant, we can always simplify if we are a multiply
5708 or (for divide and modulus) if it is a multiple of our constant. */
5709 if (code == MULT_EXPR
5710 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5711 return const_binop (code, fold_convert (ctype, t),
5712 fold_convert (ctype, c));
5715 CASE_CONVERT: case NON_LVALUE_EXPR:
5716 /* If op0 is an expression ... */
5717 if ((COMPARISON_CLASS_P (op0)
5718 || UNARY_CLASS_P (op0)
5719 || BINARY_CLASS_P (op0)
5720 || VL_EXP_CLASS_P (op0)
5721 || EXPRESSION_CLASS_P (op0))
5722 /* ... and has wrapping overflow, and its type is smaller
5723 than ctype, then we cannot pass through as widening. */
5724 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5725 && (TYPE_PRECISION (ctype)
5726 > TYPE_PRECISION (TREE_TYPE (op0))))
5727 /* ... or this is a truncation (t is narrower than op0),
5728 then we cannot pass through this narrowing. */
5729 || (TYPE_PRECISION (type)
5730 < TYPE_PRECISION (TREE_TYPE (op0)))
5731 /* ... or signedness changes for division or modulus,
5732 then we cannot pass through this conversion. */
5733 || (code != MULT_EXPR
5734 && (TYPE_UNSIGNED (ctype)
5735 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5736 /* ... or has undefined overflow while the converted to
5737 type has not, we cannot do the operation in the inner type
5738 as that would introduce undefined overflow. */
5739 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5740 && !TYPE_OVERFLOW_UNDEFINED (type))))
5743 /* Pass the constant down and see if we can make a simplification. If
5744 we can, replace this expression with the inner simplification for
5745 possible later conversion to our or some other type. */
5746 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5747 && TREE_CODE (t2) == INTEGER_CST
5748 && !TREE_OVERFLOW (t2)
5749 && (0 != (t1 = extract_muldiv (op0, t2, code,
5751 ? ctype : NULL_TREE,
5752 strict_overflow_p))))
5757 /* If widening the type changes it from signed to unsigned, then we
5758 must avoid building ABS_EXPR itself as unsigned. */
5759 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5761 tree cstype = (*signed_type_for) (ctype);
5762 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5765 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5766 return fold_convert (ctype, t1);
5770 /* If the constant is negative, we cannot simplify this. */
5771 if (tree_int_cst_sgn (c) == -1)
5775 /* For division and modulus, type can't be unsigned, as e.g.
5776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5777 For signed types, even with wrapping overflow, this is fine. */
5778 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5780 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5782 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5785 case MIN_EXPR: case MAX_EXPR:
5786 /* If widening the type changes the signedness, then we can't perform
5787 this optimization as that changes the result. */
5788 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5792 sub_strict_overflow_p = false;
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5794 &sub_strict_overflow_p)) != 0
5795 && (t2 = extract_muldiv (op1, c, code, wide_type,
5796 &sub_strict_overflow_p)) != 0)
5798 if (tree_int_cst_sgn (c) < 0)
5799 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5800 if (sub_strict_overflow_p)
5801 *strict_overflow_p = true;
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5803 fold_convert (ctype, t2));
5807 case LSHIFT_EXPR: case RSHIFT_EXPR:
5808 /* If the second operand is constant, this is a multiplication
5809 or floor division, by a power of two, so we can treat it that
5810 way unless the multiplier or divisor overflows. Signed
5811 left-shift overflow is implementation-defined rather than
5812 undefined in C90, so do not convert signed left shift into
5814 if (TREE_CODE (op1) == INTEGER_CST
5815 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5816 /* const_binop may not detect overflow correctly,
5817 so check for it explicitly here. */
5818 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5819 && TREE_INT_CST_HIGH (op1) == 0
5820 && 0 != (t1 = fold_convert (ctype,
5821 const_binop (LSHIFT_EXPR,
5824 && !TREE_OVERFLOW (t1))
5825 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5826 ? MULT_EXPR : FLOOR_DIV_EXPR,
5828 fold_convert (ctype, op0),
5830 c, code, wide_type, strict_overflow_p);
5833 case PLUS_EXPR: case MINUS_EXPR:
5834 /* See if we can eliminate the operation on both sides. If we can, we
5835 can return a new PLUS or MINUS. If we can't, the only remaining
5836 cases where we can do anything are if the second operand is a
5838 sub_strict_overflow_p = false;
5839 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5840 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5841 if (t1 != 0 && t2 != 0
5842 && (code == MULT_EXPR
5843 /* If not multiplication, we can only do this if both operands
5844 are divisible by c. */
5845 || (multiple_of_p (ctype, op0, c)
5846 && multiple_of_p (ctype, op1, c))))
5848 if (sub_strict_overflow_p)
5849 *strict_overflow_p = true;
5850 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5851 fold_convert (ctype, t2));
5854 /* If this was a subtraction, negate OP1 and set it to be an addition.
5855 This simplifies the logic below. */
5856 if (tcode == MINUS_EXPR)
5858 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5859 /* If OP1 was not easily negatable, the constant may be OP0. */
5860 if (TREE_CODE (op0) == INTEGER_CST)
5871 if (TREE_CODE (op1) != INTEGER_CST)
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c));
5895 /* We allow the constant to overflow with wrapping semantics. */
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5903 /* If we have an unsigned type, we cannot widen the operation since it
5904 will change the result if the original computation overflowed. */
5905 if (TYPE_UNSIGNED (ctype) && ctype != type)
5908 /* If we were able to eliminate our operation from the first side,
5909 apply our operation to the second side and reform the PLUS. */
5910 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5913 /* The last case is if we are a multiply. In that case, we can
5914 apply the distributive law to commute the multiply and addition
5915 if the multiplication of the constants doesn't overflow
5916 and overflow is defined. With undefined overflow
5917 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5918 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5919 return fold_build2 (tcode, ctype,
5920 fold_build2 (code, ctype,
5921 fold_convert (ctype, op0),
5922 fold_convert (ctype, c)),
5928 /* We have a special case here if we are doing something like
5929 (C * 8) % 4 since we know that's zero. */
5930 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5931 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5932 /* If the multiplication can overflow we cannot optimize this. */
5933 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5934 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5935 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5937 *strict_overflow_p = true;
5938 return omit_one_operand (type, integer_zero_node, op0);
5941 /* ... fall through ... */
5943 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5944 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5945 /* If we can extract our operation from the LHS, do so and return a
5946 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5947 do something only if the second operand is a constant. */
5949 && (t1 = extract_muldiv (op0, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5952 fold_convert (ctype, op1));
5953 else if (tcode == MULT_EXPR && code == MULT_EXPR
5954 && (t1 = extract_muldiv (op1, c, code, wide_type,
5955 strict_overflow_p)) != 0)
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5957 fold_convert (ctype, t1));
5958 else if (TREE_CODE (op1) != INTEGER_CST)
5961 /* If these are the same operation types, we can associate them
5962 assuming no overflow. */
5967 unsigned prec = TYPE_PRECISION (ctype);
5968 bool uns = TYPE_UNSIGNED (ctype);
5969 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5970 double_int dic = tree_to_double_int (c).ext (prec, uns);
5971 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5972 overflow_p = ((!uns && overflow_p)
5973 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5974 if (!double_int_fits_to_tree_p (ctype, mul)
5975 && ((uns && tcode != MULT_EXPR) || !uns))
5978 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5979 double_int_to_tree (ctype, mul));
5982 /* If these operations "cancel" each other, we have the main
5983 optimizations of this pass, which occur when either constant is a
5984 multiple of the other, in which case we replace this with either an
5985 operation or CODE or TCODE.
5987 If we have an unsigned type, we cannot do this since it will change
5988 the result if the original computation overflowed. */
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5990 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5991 || (tcode == MULT_EXPR
5992 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5993 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5994 && code != MULT_EXPR)))
5996 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6005 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6007 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6008 *strict_overflow_p = true;
6009 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6010 fold_convert (ctype,
6011 const_binop (TRUNC_DIV_EXPR,
6024 /* Return a node which has the indicated constant VALUE (either 0 or
6025 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6026 and is of the indicated TYPE. */
6029 constant_boolean_node (bool value, tree type)
6031 if (type == integer_type_node)
6032 return value ? integer_one_node : integer_zero_node;
6033 else if (type == boolean_type_node)
6034 return value ? boolean_true_node : boolean_false_node;
6035 else if (TREE_CODE (type) == VECTOR_TYPE)
6036 return build_vector_from_val (type,
6037 build_int_cst (TREE_TYPE (type),
6040 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6044 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6045 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6046 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6047 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6048 COND is the first argument to CODE; otherwise (as in the example
6049 given here), it is the second argument. TYPE is the type of the
6050 original expression. Return NULL_TREE if no simplification is
6054 fold_binary_op_with_conditional_arg (location_t loc,
6055 enum tree_code code,
6056 tree type, tree op0, tree op1,
6057 tree cond, tree arg, int cond_first_p)
6059 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6060 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6061 tree test, true_value, false_value;
6062 tree lhs = NULL_TREE;
6063 tree rhs = NULL_TREE;
6064 enum tree_code cond_code = COND_EXPR;
6066 if (TREE_CODE (cond) == COND_EXPR
6067 || TREE_CODE (cond) == VEC_COND_EXPR)
6069 test = TREE_OPERAND (cond, 0);
6070 true_value = TREE_OPERAND (cond, 1);
6071 false_value = TREE_OPERAND (cond, 2);
6072 /* If this operand throws an expression, then it does not make
6073 sense to try to perform a logical or arithmetic operation
6075 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6077 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6082 tree testtype = TREE_TYPE (cond);
6084 true_value = constant_boolean_node (true, testtype);
6085 false_value = constant_boolean_node (false, testtype);
6088 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6089 cond_code = VEC_COND_EXPR;
6091 /* This transformation is only worthwhile if we don't have to wrap ARG
6092 in a SAVE_EXPR and the operation can be simplified without recursing
6093 on at least one of the branches once its pushed inside the COND_EXPR. */
6094 if (!TREE_CONSTANT (arg)
6095 && (TREE_SIDE_EFFECTS (arg)
6096 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6097 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6100 arg = fold_convert_loc (loc, arg_type, arg);
6103 true_value = fold_convert_loc (loc, cond_type, true_value);
6105 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6107 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6111 false_value = fold_convert_loc (loc, cond_type, false_value);
6113 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6115 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6118 /* Check that we have simplified at least one of the branches. */
6119 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6122 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6126 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6128 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6129 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6130 ADDEND is the same as X.
6132 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6133 and finite. The problematic cases are when X is zero, and its mode
6134 has signed zeros. In the case of rounding towards -infinity,
6135 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6136 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6139 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6141 if (!real_zerop (addend))
6144 /* Don't allow the fold with -fsignaling-nans. */
6145 if (HONOR_SNANS (TYPE_MODE (type)))
6148 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6149 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6152 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6153 if (TREE_CODE (addend) == REAL_CST
6154 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6157 /* The mode has signed zeros, and we have to honor their sign.
6158 In this situation, there is only one case we can return true for.
6159 X - 0 is the same as X unless rounding towards -infinity is
6161 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6164 /* Subroutine of fold() that checks comparisons of built-in math
6165 functions against real constants.
6167 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6168 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6169 is the type of the result and ARG0 and ARG1 are the operands of the
6170 comparison. ARG1 must be a TREE_REAL_CST.
6172 The function returns the constant folded tree if a simplification
6173 can be made, and NULL_TREE otherwise. */
6176 fold_mathfn_compare (location_t loc,
6177 enum built_in_function fcode, enum tree_code code,
6178 tree type, tree arg0, tree arg1)
6182 if (BUILTIN_SQRT_P (fcode))
6184 tree arg = CALL_EXPR_ARG (arg0, 0);
6185 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6187 c = TREE_REAL_CST (arg1);
6188 if (REAL_VALUE_NEGATIVE (c))
6190 /* sqrt(x) < y is always false, if y is negative. */
6191 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6192 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6194 /* sqrt(x) > y is always true, if y is negative and we
6195 don't care about NaNs, i.e. negative values of x. */
6196 if (code == NE_EXPR || !HONOR_NANS (mode))
6197 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6199 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6200 return fold_build2_loc (loc, GE_EXPR, type, arg,
6201 build_real (TREE_TYPE (arg), dconst0));
6203 else if (code == GT_EXPR || code == GE_EXPR)
6207 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6208 real_convert (&c2, mode, &c2);
6210 if (REAL_VALUE_ISINF (c2))
6212 /* sqrt(x) > y is x == +Inf, when y is very large. */
6213 if (HONOR_INFINITIES (mode))
6214 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 /* sqrt(x) > y is always false, when y is very large
6218 and we don't care about infinities. */
6219 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6222 /* sqrt(x) > c is the same as x > c*c. */
6223 return fold_build2_loc (loc, code, type, arg,
6224 build_real (TREE_TYPE (arg), c2));
6226 else if (code == LT_EXPR || code == LE_EXPR)
6230 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6231 real_convert (&c2, mode, &c2);
6233 if (REAL_VALUE_ISINF (c2))
6235 /* sqrt(x) < y is always true, when y is a very large
6236 value and we don't care about NaNs or Infinities. */
6237 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6238 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6240 /* sqrt(x) < y is x != +Inf when y is very large and we
6241 don't care about NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return fold_build2_loc (loc, NE_EXPR, type, arg,
6244 build_real (TREE_TYPE (arg), c2));
6246 /* sqrt(x) < y is x >= 0 when y is very large and we
6247 don't care about Infinities. */
6248 if (! HONOR_INFINITIES (mode))
6249 return fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg), dconst0));
6252 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6253 arg = save_expr (arg);
6254 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6255 fold_build2_loc (loc, GE_EXPR, type, arg,
6256 build_real (TREE_TYPE (arg),
6258 fold_build2_loc (loc, NE_EXPR, type, arg,
6259 build_real (TREE_TYPE (arg),
6263 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6264 if (! HONOR_NANS (mode))
6265 return fold_build2_loc (loc, code, type, arg,
6266 build_real (TREE_TYPE (arg), c2));
6268 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6269 arg = save_expr (arg);
6270 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6271 fold_build2_loc (loc, GE_EXPR, type, arg,
6272 build_real (TREE_TYPE (arg),
6274 fold_build2_loc (loc, code, type, arg,
6275 build_real (TREE_TYPE (arg),
6283 /* Subroutine of fold() that optimizes comparisons against Infinities,
6284 either +Inf or -Inf.
6286 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6287 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6288 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6290 The function returns the constant folded tree if a simplification
6291 can be made, and NULL_TREE otherwise. */
6294 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6295 tree arg0, tree arg1)
6297 enum machine_mode mode;
6298 REAL_VALUE_TYPE max;
6302 mode = TYPE_MODE (TREE_TYPE (arg0));
6304 /* For negative infinity swap the sense of the comparison. */
6305 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6307 code = swap_tree_comparison (code);
6312 /* x > +Inf is always false, if with ignore sNANs. */
6313 if (HONOR_SNANS (mode))
6315 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6318 /* x <= +Inf is always true, if we don't case about NaNs. */
6319 if (! HONOR_NANS (mode))
6320 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6322 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6323 arg0 = save_expr (arg0);
6324 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6328 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6329 real_maxval (&max, neg, mode);
6330 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6334 /* x < +Inf is always equal to x <= DBL_MAX. */
6335 real_maxval (&max, neg, mode);
6336 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6340 /* x != +Inf is always equal to !(x > DBL_MAX). */
6341 real_maxval (&max, neg, mode);
6342 if (! HONOR_NANS (mode))
6343 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6344 arg0, build_real (TREE_TYPE (arg0), max));
6346 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6347 arg0, build_real (TREE_TYPE (arg0), max));
6348 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6357 /* Subroutine of fold() that optimizes comparisons of a division by
6358 a nonzero integer constant against an integer constant, i.e.
6361 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6362 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6363 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6365 The function returns the constant folded tree if a simplification
6366 can be made, and NULL_TREE otherwise. */
6369 fold_div_compare (location_t loc,
6370 enum tree_code code, tree type, tree arg0, tree arg1)
6372 tree prod, tmp, hi, lo;
6373 tree arg00 = TREE_OPERAND (arg0, 0);
6374 tree arg01 = TREE_OPERAND (arg0, 1);
6376 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6380 /* We have to do this the hard way to detect unsigned overflow.
6381 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6382 val = TREE_INT_CST (arg01)
6383 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6384 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6385 neg_overflow = false;
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1));
6393 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6394 val = TREE_INT_CST (prod)
6395 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6396 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6397 -1, overflow | TREE_OVERFLOW (prod));
6399 else if (tree_int_cst_sgn (arg01) >= 0)
6401 tmp = int_const_binop (MINUS_EXPR, arg01,
6402 build_int_cst (TREE_TYPE (arg01), 1));
6403 switch (tree_int_cst_sgn (arg1))
6406 neg_overflow = true;
6407 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6412 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6417 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6427 /* A negative divisor reverses the relational operators. */
6428 code = swap_tree_comparison (code);
6430 tmp = int_const_binop (PLUS_EXPR, arg01,
6431 build_int_cst (TREE_TYPE (arg01), 1));
6432 switch (tree_int_cst_sgn (arg1))
6435 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6440 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6445 neg_overflow = true;
6446 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6464 return build_range_check (loc, type, arg00, 1, lo, hi);
6467 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6468 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6469 if (TREE_OVERFLOW (hi))
6470 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6471 if (TREE_OVERFLOW (lo))
6472 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6473 return build_range_check (loc, type, arg00, 0, lo, hi);
6476 if (TREE_OVERFLOW (lo))
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6484 if (TREE_OVERFLOW (hi))
6486 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6487 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6492 if (TREE_OVERFLOW (hi))
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6500 if (TREE_OVERFLOW (lo))
6502 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6503 return omit_one_operand_loc (loc, type, tmp, arg00);
6505 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6515 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6516 equality/inequality test, then return a simplified form of the test
6517 using a sign testing. Otherwise return NULL. TYPE is the desired
6521 fold_single_bit_test_into_sign_test (location_t loc,
6522 enum tree_code code, tree arg0, tree arg1,
6525 /* If this is testing a single bit, we can optimize the test. */
6526 if ((code == NE_EXPR || code == EQ_EXPR)
6527 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6528 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6530 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6531 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6532 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6534 if (arg00 != NULL_TREE
6535 /* This is only a win if casting to a signed type is cheap,
6536 i.e. when arg00's type is not a partial mode. */
6537 && TYPE_PRECISION (TREE_TYPE (arg00))
6538 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6540 tree stype = signed_type_for (TREE_TYPE (arg00));
6541 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 fold_convert_loc (loc, stype, arg00),
6544 build_int_cst (stype, 0));
6551 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6552 equality/inequality test, then return a simplified form of
6553 the test using shifts and logical operations. Otherwise return
6554 NULL. TYPE is the desired result type. */
6557 fold_single_bit_test (location_t loc, enum tree_code code,
6558 tree arg0, tree arg1, tree result_type)
6560 /* If this is testing a single bit, we can optimize the test. */
6561 if ((code == NE_EXPR || code == EQ_EXPR)
6562 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6563 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565 tree inner = TREE_OPERAND (arg0, 0);
6566 tree type = TREE_TYPE (arg0);
6567 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6568 enum machine_mode operand_mode = TYPE_MODE (type);
6570 tree signed_type, unsigned_type, intermediate_type;
6573 /* First, see if we can fold the single bit test into a sign-bit
6575 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6580 /* Otherwise we have (A & C) != 0 where C is a single bit,
6581 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6582 Similarly for (A & C) == 0. */
6584 /* If INNER is a right shift of a constant and it plus BITNUM does
6585 not overflow, adjust BITNUM and INNER. */
6586 if (TREE_CODE (inner) == RSHIFT_EXPR
6587 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6588 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6589 && bitnum < TYPE_PRECISION (type)
6590 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6591 bitnum - TYPE_PRECISION (type)))
6593 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6594 inner = TREE_OPERAND (inner, 0);
6597 /* If we are going to be able to omit the AND below, we must do our
6598 operations as unsigned. If we must use the AND, we have a choice.
6599 Normally unsigned is faster, but for some machines signed is. */
6600 #ifdef LOAD_EXTEND_OP
6601 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6602 && !flag_syntax_only) ? 0 : 1;
6607 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6608 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6609 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6610 inner = fold_convert_loc (loc, intermediate_type, inner);
6613 inner = build2 (RSHIFT_EXPR, intermediate_type,
6614 inner, size_int (bitnum));
6616 one = build_int_cst (intermediate_type, 1);
6618 if (code == EQ_EXPR)
6619 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6621 /* Put the AND last so it can combine with more things. */
6622 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6624 /* Make sure to return the proper type. */
6625 inner = fold_convert_loc (loc, result_type, inner);
6632 /* Check whether we are allowed to reorder operands arg0 and arg1,
6633 such that the evaluation of arg1 occurs before arg0. */
6636 reorder_operands_p (const_tree arg0, const_tree arg1)
6638 if (! flag_evaluation_order)
6640 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return ! TREE_SIDE_EFFECTS (arg0)
6643 && ! TREE_SIDE_EFFECTS (arg1);
6646 /* Test whether it is preferable two swap two operands, ARG0 and
6647 ARG1, for example because ARG0 is an integer constant and ARG1
6648 isn't. If REORDER is true, only recommend swapping if we can
6649 evaluate the operands in reverse order. */
6652 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6654 STRIP_SIGN_NOPS (arg0);
6655 STRIP_SIGN_NOPS (arg1);
6657 if (TREE_CODE (arg1) == INTEGER_CST)
6659 if (TREE_CODE (arg0) == INTEGER_CST)
6662 if (TREE_CODE (arg1) == REAL_CST)
6664 if (TREE_CODE (arg0) == REAL_CST)
6667 if (TREE_CODE (arg1) == FIXED_CST)
6669 if (TREE_CODE (arg0) == FIXED_CST)
6672 if (TREE_CODE (arg1) == COMPLEX_CST)
6674 if (TREE_CODE (arg0) == COMPLEX_CST)
6677 if (TREE_CONSTANT (arg1))
6679 if (TREE_CONSTANT (arg0))
6682 if (optimize_function_for_size_p (cfun))
6685 if (reorder && flag_evaluation_order
6686 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6689 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6690 for commutative and comparison operators. Ensuring a canonical
6691 form allows the optimizers to find additional redundancies without
6692 having to explicitly check for both orderings. */
6693 if (TREE_CODE (arg0) == SSA_NAME
6694 && TREE_CODE (arg1) == SSA_NAME
6695 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6698 /* Put SSA_NAMEs last. */
6699 if (TREE_CODE (arg1) == SSA_NAME)
6701 if (TREE_CODE (arg0) == SSA_NAME)
6704 /* Put variables last. */
6713 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6714 ARG0 is extended to a wider type. */
6717 fold_widened_comparison (location_t loc, enum tree_code code,
6718 tree type, tree arg0, tree arg1)
6720 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6722 tree shorter_type, outer_type;
6726 if (arg0_unw == arg0)
6728 shorter_type = TREE_TYPE (arg0_unw);
6730 #ifdef HAVE_canonicalize_funcptr_for_compare
6731 /* Disable this optimization if we're casting a function pointer
6732 type on targets that require function pointer canonicalization. */
6733 if (HAVE_canonicalize_funcptr_for_compare
6734 && TREE_CODE (shorter_type) == POINTER_TYPE
6735 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6739 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6742 arg1_unw = get_unwidened (arg1, NULL_TREE);
6744 /* If possible, express the comparison in the shorter mode. */
6745 if ((code == EQ_EXPR || code == NE_EXPR
6746 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6747 && (TREE_TYPE (arg1_unw) == shorter_type
6748 || ((TYPE_PRECISION (shorter_type)
6749 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6750 && (TYPE_UNSIGNED (shorter_type)
6751 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6752 || (TREE_CODE (arg1_unw) == INTEGER_CST
6753 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6754 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6755 && int_fits_type_p (arg1_unw, shorter_type))))
6756 return fold_build2_loc (loc, code, type, arg0_unw,
6757 fold_convert_loc (loc, shorter_type, arg1_unw));
6759 if (TREE_CODE (arg1_unw) != INTEGER_CST
6760 || TREE_CODE (shorter_type) != INTEGER_TYPE
6761 || !int_fits_type_p (arg1_unw, shorter_type))
6764 /* If we are comparing with the integer that does not fit into the range
6765 of the shorter type, the result is known. */
6766 outer_type = TREE_TYPE (arg1_unw);
6767 min = lower_bound_in_type (outer_type, shorter_type);
6768 max = upper_bound_in_type (outer_type, shorter_type);
6770 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6772 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6779 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6784 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6792 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6797 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6799 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6808 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6809 ARG0 just the signedness is changed. */
6812 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6813 tree arg0, tree arg1)
6816 tree inner_type, outer_type;
6818 if (!CONVERT_EXPR_P (arg0))
6821 outer_type = TREE_TYPE (arg0);
6822 arg0_inner = TREE_OPERAND (arg0, 0);
6823 inner_type = TREE_TYPE (arg0_inner);
6825 #ifdef HAVE_canonicalize_funcptr_for_compare
6826 /* Disable this optimization if we're casting a function pointer
6827 type on targets that require function pointer canonicalization. */
6828 if (HAVE_canonicalize_funcptr_for_compare
6829 && TREE_CODE (inner_type) == POINTER_TYPE
6830 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6834 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6837 if (TREE_CODE (arg1) != INTEGER_CST
6838 && !(CONVERT_EXPR_P (arg1)
6839 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6842 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6847 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6850 if (TREE_CODE (arg1) == INTEGER_CST)
6851 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6852 0, TREE_OVERFLOW (arg1));
6854 arg1 = fold_convert_loc (loc, inner_type, arg1);
6856 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6859 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6860 step of the array. Reconstructs s and delta in the case of s *
6861 delta being an integer constant (and thus already folded). ADDR is
6862 the address. MULT is the multiplicative expression. If the
6863 function succeeds, the new address expression is returned.
6864 Otherwise NULL_TREE is returned. LOC is the location of the
6865 resulting expression. */
6868 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6870 tree s, delta, step;
6871 tree ref = TREE_OPERAND (addr, 0), pref;
6876 /* Strip the nops that might be added when converting op1 to sizetype. */
6879 /* Canonicalize op1 into a possibly non-constant delta
6880 and an INTEGER_CST s. */
6881 if (TREE_CODE (op1) == MULT_EXPR)
6883 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6888 if (TREE_CODE (arg0) == INTEGER_CST)
6893 else if (TREE_CODE (arg1) == INTEGER_CST)
6901 else if (TREE_CODE (op1) == INTEGER_CST)
6908 /* Simulate we are delta * 1. */
6910 s = integer_one_node;
6913 /* Handle &x.array the same as we would handle &x.array[0]. */
6914 if (TREE_CODE (ref) == COMPONENT_REF
6915 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6919 /* Remember if this was a multi-dimensional array. */
6920 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6923 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6926 itype = TREE_TYPE (domain);
6928 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6929 if (TREE_CODE (step) != INTEGER_CST)
6934 if (! tree_int_cst_equal (step, s))
6939 /* Try if delta is a multiple of step. */
6940 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6946 /* Only fold here if we can verify we do not overflow one
6947 dimension of a multi-dimensional array. */
6952 if (!TYPE_MIN_VALUE (domain)
6953 || !TYPE_MAX_VALUE (domain)
6954 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6957 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6958 fold_convert_loc (loc, itype,
6959 TYPE_MIN_VALUE (domain)),
6960 fold_convert_loc (loc, itype, delta));
6961 if (TREE_CODE (tmp) != INTEGER_CST
6962 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6966 /* We found a suitable component reference. */
6968 pref = TREE_OPERAND (addr, 0);
6969 ret = copy_node (pref);
6970 SET_EXPR_LOCATION (ret, loc);
6972 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6974 (loc, PLUS_EXPR, itype,
6975 fold_convert_loc (loc, itype,
6977 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6978 fold_convert_loc (loc, itype, delta)),
6979 NULL_TREE, NULL_TREE);
6980 return build_fold_addr_expr_loc (loc, ret);
6985 for (;; ref = TREE_OPERAND (ref, 0))
6987 if (TREE_CODE (ref) == ARRAY_REF)
6991 /* Remember if this was a multi-dimensional array. */
6992 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6995 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6998 itype = TREE_TYPE (domain);
7000 step = array_ref_element_size (ref);
7001 if (TREE_CODE (step) != INTEGER_CST)
7006 if (! tree_int_cst_equal (step, s))
7011 /* Try if delta is a multiple of step. */
7012 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7018 /* Only fold here if we can verify we do not overflow one
7019 dimension of a multi-dimensional array. */
7024 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7025 || !TYPE_MAX_VALUE (domain)
7026 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7029 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7030 fold_convert_loc (loc, itype,
7031 TREE_OPERAND (ref, 1)),
7032 fold_convert_loc (loc, itype, delta));
7034 || TREE_CODE (tmp) != INTEGER_CST
7035 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7044 if (!handled_component_p (ref))
7048 /* We found the suitable array reference. So copy everything up to it,
7049 and replace the index. */
7051 pref = TREE_OPERAND (addr, 0);
7052 ret = copy_node (pref);
7053 SET_EXPR_LOCATION (ret, loc);
7058 pref = TREE_OPERAND (pref, 0);
7059 TREE_OPERAND (pos, 0) = copy_node (pref);
7060 pos = TREE_OPERAND (pos, 0);
7063 TREE_OPERAND (pos, 1)
7064 = fold_build2_loc (loc, PLUS_EXPR, itype,
7065 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7066 fold_convert_loc (loc, itype, delta));
7067 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7071 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7072 means A >= Y && A != MAX, but in this case we know that
7073 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7076 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7078 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7080 if (TREE_CODE (bound) == LT_EXPR)
7081 a = TREE_OPERAND (bound, 0);
7082 else if (TREE_CODE (bound) == GT_EXPR)
7083 a = TREE_OPERAND (bound, 1);
7087 typea = TREE_TYPE (a);
7088 if (!INTEGRAL_TYPE_P (typea)
7089 && !POINTER_TYPE_P (typea))
7092 if (TREE_CODE (ineq) == LT_EXPR)
7094 a1 = TREE_OPERAND (ineq, 1);
7095 y = TREE_OPERAND (ineq, 0);
7097 else if (TREE_CODE (ineq) == GT_EXPR)
7099 a1 = TREE_OPERAND (ineq, 0);
7100 y = TREE_OPERAND (ineq, 1);
7105 if (TREE_TYPE (a1) != typea)
7108 if (POINTER_TYPE_P (typea))
7110 /* Convert the pointer types into integer before taking the difference. */
7111 tree ta = fold_convert_loc (loc, ssizetype, a);
7112 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7113 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7116 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7118 if (!diff || !integer_onep (diff))
7121 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7124 /* Fold a sum or difference of at least one multiplication.
7125 Returns the folded tree or NULL if no simplification could be made. */
7128 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7129 tree arg0, tree arg1)
7131 tree arg00, arg01, arg10, arg11;
7132 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7134 /* (A * C) +- (B * C) -> (A+-B) * C.
7135 (A * C) +- A -> A * (C+-1).
7136 We are most concerned about the case where C is a constant,
7137 but other combinations show up during loop reduction. Since
7138 it is not difficult, try all four possibilities. */
7140 if (TREE_CODE (arg0) == MULT_EXPR)
7142 arg00 = TREE_OPERAND (arg0, 0);
7143 arg01 = TREE_OPERAND (arg0, 1);
7145 else if (TREE_CODE (arg0) == INTEGER_CST)
7147 arg00 = build_one_cst (type);
7152 /* We cannot generate constant 1 for fract. */
7153 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7156 arg01 = build_one_cst (type);
7158 if (TREE_CODE (arg1) == MULT_EXPR)
7160 arg10 = TREE_OPERAND (arg1, 0);
7161 arg11 = TREE_OPERAND (arg1, 1);
7163 else if (TREE_CODE (arg1) == INTEGER_CST)
7165 arg10 = build_one_cst (type);
7166 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7167 the purpose of this canonicalization. */
7168 if (TREE_INT_CST_HIGH (arg1) == -1
7169 && negate_expr_p (arg1)
7170 && code == PLUS_EXPR)
7172 arg11 = negate_expr (arg1);
7180 /* We cannot generate constant 1 for fract. */
7181 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7184 arg11 = build_one_cst (type);
7188 if (operand_equal_p (arg01, arg11, 0))
7189 same = arg01, alt0 = arg00, alt1 = arg10;
7190 else if (operand_equal_p (arg00, arg10, 0))
7191 same = arg00, alt0 = arg01, alt1 = arg11;
7192 else if (operand_equal_p (arg00, arg11, 0))
7193 same = arg00, alt0 = arg01, alt1 = arg10;
7194 else if (operand_equal_p (arg01, arg10, 0))
7195 same = arg01, alt0 = arg00, alt1 = arg11;
7197 /* No identical multiplicands; see if we can find a common
7198 power-of-two factor in non-power-of-two multiplies. This
7199 can help in multi-dimensional array access. */
7200 else if (host_integerp (arg01, 0)
7201 && host_integerp (arg11, 0))
7203 HOST_WIDE_INT int01, int11, tmp;
7206 int01 = TREE_INT_CST_LOW (arg01);
7207 int11 = TREE_INT_CST_LOW (arg11);
7209 /* Move min of absolute values to int11. */
7210 if (absu_hwi (int01) < absu_hwi (int11))
7212 tmp = int01, int01 = int11, int11 = tmp;
7213 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7220 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7221 /* The remainder should not be a constant, otherwise we
7222 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7223 increased the number of multiplications necessary. */
7224 && TREE_CODE (arg10) != INTEGER_CST)
7226 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7227 build_int_cst (TREE_TYPE (arg00),
7232 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7237 return fold_build2_loc (loc, MULT_EXPR, type,
7238 fold_build2_loc (loc, code, type,
7239 fold_convert_loc (loc, type, alt0),
7240 fold_convert_loc (loc, type, alt1)),
7241 fold_convert_loc (loc, type, same));
7246 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7252 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7254 tree type = TREE_TYPE (expr);
7255 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7256 int byte, offset, word, words;
7257 unsigned char value;
7259 if (total_bytes > len)
7261 words = total_bytes / UNITS_PER_WORD;
7263 for (byte = 0; byte < total_bytes; byte++)
7265 int bitpos = byte * BITS_PER_UNIT;
7266 if (bitpos < HOST_BITS_PER_WIDE_INT)
7267 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7269 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7270 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7272 if (total_bytes > UNITS_PER_WORD)
7274 word = byte / UNITS_PER_WORD;
7275 if (WORDS_BIG_ENDIAN)
7276 word = (words - 1) - word;
7277 offset = word * UNITS_PER_WORD;
7278 if (BYTES_BIG_ENDIAN)
7279 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7281 offset += byte % UNITS_PER_WORD;
7284 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7285 ptr[offset] = value;
7291 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7292 specified by EXPR into the buffer PTR of length LEN bytes.
7293 Return the number of bytes placed in the buffer, or zero
7297 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7299 tree type = TREE_TYPE (expr);
7300 enum machine_mode mode = TYPE_MODE (type);
7301 int total_bytes = GET_MODE_SIZE (mode);
7302 FIXED_VALUE_TYPE value;
7303 tree i_value, i_type;
7305 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7308 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7310 if (NULL_TREE == i_type
7311 || TYPE_PRECISION (i_type) != total_bytes)
7314 value = TREE_FIXED_CST (expr);
7315 i_value = double_int_to_tree (i_type, value.data);
7317 return native_encode_int (i_value, ptr, len);
7321 /* Subroutine of native_encode_expr. Encode the REAL_CST
7322 specified by EXPR into the buffer PTR of length LEN bytes.
7323 Return the number of bytes placed in the buffer, or zero
7327 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7329 tree type = TREE_TYPE (expr);
7330 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7331 int byte, offset, word, words, bitpos;
7332 unsigned char value;
7334 /* There are always 32 bits in each long, no matter the size of
7335 the hosts long. We handle floating point representations with
7339 if (total_bytes > len)
7341 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7343 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7345 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7346 bitpos += BITS_PER_UNIT)
7348 byte = (bitpos / BITS_PER_UNIT) & 3;
7349 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7351 if (UNITS_PER_WORD < 4)
7353 word = byte / UNITS_PER_WORD;
7354 if (WORDS_BIG_ENDIAN)
7355 word = (words - 1) - word;
7356 offset = word * UNITS_PER_WORD;
7357 if (BYTES_BIG_ENDIAN)
7358 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 offset += byte % UNITS_PER_WORD;
7363 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7364 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7369 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7370 specified by EXPR into the buffer PTR of length LEN bytes.
7371 Return the number of bytes placed in the buffer, or zero
7375 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7380 part = TREE_REALPART (expr);
7381 rsize = native_encode_expr (part, ptr, len);
7384 part = TREE_IMAGPART (expr);
7385 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7388 return rsize + isize;
7392 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7393 specified by EXPR into the buffer PTR of length LEN bytes.
7394 Return the number of bytes placed in the buffer, or zero
7398 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7405 count = VECTOR_CST_NELTS (expr);
7406 itype = TREE_TYPE (TREE_TYPE (expr));
7407 size = GET_MODE_SIZE (TYPE_MODE (itype));
7408 for (i = 0; i < count; i++)
7410 elem = VECTOR_CST_ELT (expr, i);
7411 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7419 /* Subroutine of native_encode_expr. Encode the STRING_CST
7420 specified by EXPR into the buffer PTR of length LEN bytes.
7421 Return the number of bytes placed in the buffer, or zero
7425 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7427 tree type = TREE_TYPE (expr);
7428 HOST_WIDE_INT total_bytes;
7430 if (TREE_CODE (type) != ARRAY_TYPE
7431 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7432 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7433 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7435 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7436 if (total_bytes > len)
7438 if (TREE_STRING_LENGTH (expr) < total_bytes)
7440 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7441 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7442 total_bytes - TREE_STRING_LENGTH (expr));
7445 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7450 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7451 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7452 buffer PTR of length LEN bytes. Return the number of bytes
7453 placed in the buffer, or zero upon failure. */
7456 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7458 switch (TREE_CODE (expr))
7461 return native_encode_int (expr, ptr, len);
7464 return native_encode_real (expr, ptr, len);
7467 return native_encode_fixed (expr, ptr, len);
7470 return native_encode_complex (expr, ptr, len);
7473 return native_encode_vector (expr, ptr, len);
7476 return native_encode_string (expr, ptr, len);
7484 /* Subroutine of native_interpret_expr. Interpret the contents of
7485 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7486 If the buffer cannot be interpreted, return NULL_TREE. */
7489 native_interpret_int (tree type, const unsigned char *ptr, int len)
7491 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7494 if (total_bytes > len
7495 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7498 result = double_int::from_buffer (ptr, total_bytes);
7500 return double_int_to_tree (type, result);
7504 /* Subroutine of native_interpret_expr. Interpret the contents of
7505 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7506 If the buffer cannot be interpreted, return NULL_TREE. */
7509 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7511 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7513 FIXED_VALUE_TYPE fixed_value;
7515 if (total_bytes > len
7516 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7519 result = double_int::from_buffer (ptr, total_bytes);
7520 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7522 return build_fixed (type, fixed_value);
7526 /* Subroutine of native_interpret_expr. Interpret the contents of
7527 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7528 If the buffer cannot be interpreted, return NULL_TREE. */
7531 native_interpret_real (tree type, const unsigned char *ptr, int len)
7533 enum machine_mode mode = TYPE_MODE (type);
7534 int total_bytes = GET_MODE_SIZE (mode);
7535 int byte, offset, word, words, bitpos;
7536 unsigned char value;
7537 /* There are always 32 bits in each long, no matter the size of
7538 the hosts long. We handle floating point representations with
7543 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7544 if (total_bytes > len || total_bytes > 24)
7546 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7548 memset (tmp, 0, sizeof (tmp));
7549 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7550 bitpos += BITS_PER_UNIT)
7552 byte = (bitpos / BITS_PER_UNIT) & 3;
7553 if (UNITS_PER_WORD < 4)
7555 word = byte / UNITS_PER_WORD;
7556 if (WORDS_BIG_ENDIAN)
7557 word = (words - 1) - word;
7558 offset = word * UNITS_PER_WORD;
7559 if (BYTES_BIG_ENDIAN)
7560 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7562 offset += byte % UNITS_PER_WORD;
7565 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7566 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7568 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7571 real_from_target (&r, tmp, mode);
7572 return build_real (type, r);
7576 /* Subroutine of native_interpret_expr. Interpret the contents of
7577 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7578 If the buffer cannot be interpreted, return NULL_TREE. */
7581 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7583 tree etype, rpart, ipart;
7586 etype = TREE_TYPE (type);
7587 size = GET_MODE_SIZE (TYPE_MODE (etype));
7590 rpart = native_interpret_expr (etype, ptr, size);
7593 ipart = native_interpret_expr (etype, ptr+size, size);
7596 return build_complex (type, rpart, ipart);
7600 /* Subroutine of native_interpret_expr. Interpret the contents of
7601 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7602 If the buffer cannot be interpreted, return NULL_TREE. */
7605 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7611 etype = TREE_TYPE (type);
7612 size = GET_MODE_SIZE (TYPE_MODE (etype));
7613 count = TYPE_VECTOR_SUBPARTS (type);
7614 if (size * count > len)
7617 elements = XALLOCAVEC (tree, count);
7618 for (i = count - 1; i >= 0; i--)
7620 elem = native_interpret_expr (etype, ptr+(i*size), size);
7625 return build_vector (type, elements);
7629 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7630 the buffer PTR of length LEN as a constant of type TYPE. For
7631 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7632 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7633 return NULL_TREE. */
7636 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7638 switch (TREE_CODE (type))
7644 case REFERENCE_TYPE:
7645 return native_interpret_int (type, ptr, len);
7648 return native_interpret_real (type, ptr, len);
7650 case FIXED_POINT_TYPE:
7651 return native_interpret_fixed (type, ptr, len);
7654 return native_interpret_complex (type, ptr, len);
7657 return native_interpret_vector (type, ptr, len);
7664 /* Returns true if we can interpret the contents of a native encoding
7668 can_native_interpret_type_p (tree type)
7670 switch (TREE_CODE (type))
7676 case REFERENCE_TYPE:
7677 case FIXED_POINT_TYPE:
7687 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7688 TYPE at compile-time. If we're unable to perform the conversion
7689 return NULL_TREE. */
7692 fold_view_convert_expr (tree type, tree expr)
7694 /* We support up to 512-bit values (for V8DFmode). */
7695 unsigned char buffer[64];
7698 /* Check that the host and target are sane. */
7699 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7702 len = native_encode_expr (expr, buffer, sizeof (buffer));
7706 return native_interpret_expr (type, buffer, len);
7709 /* Build an expression for the address of T. Folds away INDIRECT_REF
7710 to avoid confusing the gimplify process. */
7713 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7715 /* The size of the object is not relevant when talking about its address. */
7716 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7717 t = TREE_OPERAND (t, 0);
7719 if (TREE_CODE (t) == INDIRECT_REF)
7721 t = TREE_OPERAND (t, 0);
7723 if (TREE_TYPE (t) != ptrtype)
7724 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7726 else if (TREE_CODE (t) == MEM_REF
7727 && integer_zerop (TREE_OPERAND (t, 1)))
7728 return TREE_OPERAND (t, 0);
7729 else if (TREE_CODE (t) == MEM_REF
7730 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7731 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7732 TREE_OPERAND (t, 0),
7733 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7734 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7736 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7738 if (TREE_TYPE (t) != ptrtype)
7739 t = fold_convert_loc (loc, ptrtype, t);
7742 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7747 /* Build an expression for the address of T. */
7750 build_fold_addr_expr_loc (location_t loc, tree t)
7752 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7754 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7757 static bool vec_cst_ctor_to_array (tree, tree *);
7759 /* Fold a unary expression of code CODE and type TYPE with operand
7760 OP0. Return the folded expression if folding is successful.
7761 Otherwise, return NULL_TREE. */
7764 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7768 enum tree_code_class kind = TREE_CODE_CLASS (code);
7770 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7771 && TREE_CODE_LENGTH (code) == 1);
7776 if (CONVERT_EXPR_CODE_P (code)
7777 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7779 /* Don't use STRIP_NOPS, because signedness of argument type
7781 STRIP_SIGN_NOPS (arg0);
7785 /* Strip any conversions that don't change the mode. This
7786 is safe for every expression, except for a comparison
7787 expression because its signedness is derived from its
7790 Note that this is done as an internal manipulation within
7791 the constant folder, in order to find the simplest
7792 representation of the arguments so that their form can be
7793 studied. In any cases, the appropriate type conversions
7794 should be put back in the tree that will get out of the
7800 if (TREE_CODE_CLASS (code) == tcc_unary)
7802 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7803 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7804 fold_build1_loc (loc, code, type,
7805 fold_convert_loc (loc, TREE_TYPE (op0),
7806 TREE_OPERAND (arg0, 1))));
7807 else if (TREE_CODE (arg0) == COND_EXPR)
7809 tree arg01 = TREE_OPERAND (arg0, 1);
7810 tree arg02 = TREE_OPERAND (arg0, 2);
7811 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7812 arg01 = fold_build1_loc (loc, code, type,
7813 fold_convert_loc (loc,
7814 TREE_TYPE (op0), arg01));
7815 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7816 arg02 = fold_build1_loc (loc, code, type,
7817 fold_convert_loc (loc,
7818 TREE_TYPE (op0), arg02));
7819 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7822 /* If this was a conversion, and all we did was to move into
7823 inside the COND_EXPR, bring it back out. But leave it if
7824 it is a conversion from integer to integer and the
7825 result precision is no wider than a word since such a
7826 conversion is cheap and may be optimized away by combine,
7827 while it couldn't if it were outside the COND_EXPR. Then return
7828 so we don't get into an infinite recursion loop taking the
7829 conversion out and then back in. */
7831 if ((CONVERT_EXPR_CODE_P (code)
7832 || code == NON_LVALUE_EXPR)
7833 && TREE_CODE (tem) == COND_EXPR
7834 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7835 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7836 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7837 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7838 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7839 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7840 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7842 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7843 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7844 || flag_syntax_only))
7845 tem = build1_loc (loc, code, type,
7847 TREE_TYPE (TREE_OPERAND
7848 (TREE_OPERAND (tem, 1), 0)),
7849 TREE_OPERAND (tem, 0),
7850 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7851 TREE_OPERAND (TREE_OPERAND (tem, 2),
7860 /* Re-association barriers around constants and other re-association
7861 barriers can be removed. */
7862 if (CONSTANT_CLASS_P (op0)
7863 || TREE_CODE (op0) == PAREN_EXPR)
7864 return fold_convert_loc (loc, type, op0);
7869 case FIX_TRUNC_EXPR:
7870 if (TREE_TYPE (op0) == type)
7873 if (COMPARISON_CLASS_P (op0))
7875 /* If we have (type) (a CMP b) and type is an integral type, return
7876 new expression involving the new type. Canonicalize
7877 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7879 Do not fold the result as that would not simplify further, also
7880 folding again results in recursions. */
7881 if (TREE_CODE (type) == BOOLEAN_TYPE)
7882 return build2_loc (loc, TREE_CODE (op0), type,
7883 TREE_OPERAND (op0, 0),
7884 TREE_OPERAND (op0, 1));
7885 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7886 && TREE_CODE (type) != VECTOR_TYPE)
7887 return build3_loc (loc, COND_EXPR, type, op0,
7888 constant_boolean_node (true, type),
7889 constant_boolean_node (false, type));
7892 /* Handle cases of two conversions in a row. */
7893 if (CONVERT_EXPR_P (op0))
7895 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7896 tree inter_type = TREE_TYPE (op0);
7897 int inside_int = INTEGRAL_TYPE_P (inside_type);
7898 int inside_ptr = POINTER_TYPE_P (inside_type);
7899 int inside_float = FLOAT_TYPE_P (inside_type);
7900 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7901 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7902 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7903 int inter_int = INTEGRAL_TYPE_P (inter_type);
7904 int inter_ptr = POINTER_TYPE_P (inter_type);
7905 int inter_float = FLOAT_TYPE_P (inter_type);
7906 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7907 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7908 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7909 int final_int = INTEGRAL_TYPE_P (type);
7910 int final_ptr = POINTER_TYPE_P (type);
7911 int final_float = FLOAT_TYPE_P (type);
7912 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7913 unsigned int final_prec = TYPE_PRECISION (type);
7914 int final_unsignedp = TYPE_UNSIGNED (type);
7916 /* In addition to the cases of two conversions in a row
7917 handled below, if we are converting something to its own
7918 type via an object of identical or wider precision, neither
7919 conversion is needed. */
7920 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7921 && (((inter_int || inter_ptr) && final_int)
7922 || (inter_float && final_float))
7923 && inter_prec >= final_prec)
7924 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7926 /* Likewise, if the intermediate and initial types are either both
7927 float or both integer, we don't need the middle conversion if the
7928 former is wider than the latter and doesn't change the signedness
7929 (for integers). Avoid this if the final type is a pointer since
7930 then we sometimes need the middle conversion. Likewise if the
7931 final type has a precision not equal to the size of its mode. */
7932 if (((inter_int && inside_int)
7933 || (inter_float && inside_float)
7934 || (inter_vec && inside_vec))
7935 && inter_prec >= inside_prec
7936 && (inter_float || inter_vec
7937 || inter_unsignedp == inside_unsignedp)
7938 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7939 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7941 && (! final_vec || inter_prec == inside_prec))
7942 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7944 /* If we have a sign-extension of a zero-extended value, we can
7945 replace that by a single zero-extension. Likewise if the
7946 final conversion does not change precision we can drop the
7947 intermediate conversion. */
7948 if (inside_int && inter_int && final_int
7949 && ((inside_prec < inter_prec && inter_prec < final_prec
7950 && inside_unsignedp && !inter_unsignedp)
7951 || final_prec == inter_prec))
7952 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7954 /* Two conversions in a row are not needed unless:
7955 - some conversion is floating-point (overstrict for now), or
7956 - some conversion is a vector (overstrict for now), or
7957 - the intermediate type is narrower than both initial and
7959 - the intermediate type and innermost type differ in signedness,
7960 and the outermost type is wider than the intermediate, or
7961 - the initial type is a pointer type and the precisions of the
7962 intermediate and final types differ, or
7963 - the final type is a pointer type and the precisions of the
7964 initial and intermediate types differ. */
7965 if (! inside_float && ! inter_float && ! final_float
7966 && ! inside_vec && ! inter_vec && ! final_vec
7967 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7968 && ! (inside_int && inter_int
7969 && inter_unsignedp != inside_unsignedp
7970 && inter_prec < final_prec)
7971 && ((inter_unsignedp && inter_prec > inside_prec)
7972 == (final_unsignedp && final_prec > inter_prec))
7973 && ! (inside_ptr && inter_prec != final_prec)
7974 && ! (final_ptr && inside_prec != inter_prec)
7975 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7976 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7977 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7980 /* Handle (T *)&A.B.C for A being of type T and B and C
7981 living at offset zero. This occurs frequently in
7982 C++ upcasting and then accessing the base. */
7983 if (TREE_CODE (op0) == ADDR_EXPR
7984 && POINTER_TYPE_P (type)
7985 && handled_component_p (TREE_OPERAND (op0, 0)))
7987 HOST_WIDE_INT bitsize, bitpos;
7989 enum machine_mode mode;
7990 int unsignedp, volatilep;
7991 tree base = TREE_OPERAND (op0, 0);
7992 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7993 &mode, &unsignedp, &volatilep, false);
7994 /* If the reference was to a (constant) zero offset, we can use
7995 the address of the base if it has the same base type
7996 as the result type and the pointer type is unqualified. */
7997 if (! offset && bitpos == 0
7998 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7999 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8000 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8001 return fold_convert_loc (loc, type,
8002 build_fold_addr_expr_loc (loc, base));
8005 if (TREE_CODE (op0) == MODIFY_EXPR
8006 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8007 /* Detect assigning a bitfield. */
8008 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8010 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8012 /* Don't leave an assignment inside a conversion
8013 unless assigning a bitfield. */
8014 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8015 /* First do the assignment, then return converted constant. */
8016 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8017 TREE_NO_WARNING (tem) = 1;
8018 TREE_USED (tem) = 1;
8022 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8023 constants (if x has signed type, the sign bit cannot be set
8024 in c). This folds extension into the BIT_AND_EXPR.
8025 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8026 very likely don't have maximal range for their precision and this
8027 transformation effectively doesn't preserve non-maximal ranges. */
8028 if (TREE_CODE (type) == INTEGER_TYPE
8029 && TREE_CODE (op0) == BIT_AND_EXPR
8030 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8032 tree and_expr = op0;
8033 tree and0 = TREE_OPERAND (and_expr, 0);
8034 tree and1 = TREE_OPERAND (and_expr, 1);
8037 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8038 || (TYPE_PRECISION (type)
8039 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8041 else if (TYPE_PRECISION (TREE_TYPE (and1))
8042 <= HOST_BITS_PER_WIDE_INT
8043 && host_integerp (and1, 1))
8045 unsigned HOST_WIDE_INT cst;
8047 cst = tree_low_cst (and1, 1);
8048 cst &= (HOST_WIDE_INT) -1
8049 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8050 change = (cst == 0);
8051 #ifdef LOAD_EXTEND_OP
8053 && !flag_syntax_only
8054 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8057 tree uns = unsigned_type_for (TREE_TYPE (and0));
8058 and0 = fold_convert_loc (loc, uns, and0);
8059 and1 = fold_convert_loc (loc, uns, and1);
8065 tem = force_fit_type_double (type, tree_to_double_int (and1),
8066 0, TREE_OVERFLOW (and1));
8067 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8068 fold_convert_loc (loc, type, and0), tem);
8072 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8073 when one of the new casts will fold away. Conservatively we assume
8074 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8075 if (POINTER_TYPE_P (type)
8076 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8077 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8078 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8079 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8080 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8082 tree arg00 = TREE_OPERAND (arg0, 0);
8083 tree arg01 = TREE_OPERAND (arg0, 1);
8085 return fold_build_pointer_plus_loc
8086 (loc, fold_convert_loc (loc, type, arg00), arg01);
8089 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8090 of the same precision, and X is an integer type not narrower than
8091 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8092 if (INTEGRAL_TYPE_P (type)
8093 && TREE_CODE (op0) == BIT_NOT_EXPR
8094 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8095 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8096 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8098 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8099 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8100 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8101 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8102 fold_convert_loc (loc, type, tem));
8105 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8106 type of X and Y (integer types only). */
8107 if (INTEGRAL_TYPE_P (type)
8108 && TREE_CODE (op0) == MULT_EXPR
8109 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8110 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8112 /* Be careful not to introduce new overflows. */
8114 if (TYPE_OVERFLOW_WRAPS (type))
8117 mult_type = unsigned_type_for (type);
8119 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8121 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8122 fold_convert_loc (loc, mult_type,
8123 TREE_OPERAND (op0, 0)),
8124 fold_convert_loc (loc, mult_type,
8125 TREE_OPERAND (op0, 1)));
8126 return fold_convert_loc (loc, type, tem);
8130 tem = fold_convert_const (code, type, op0);
8131 return tem ? tem : NULL_TREE;
8133 case ADDR_SPACE_CONVERT_EXPR:
8134 if (integer_zerop (arg0))
8135 return fold_convert_const (code, type, arg0);
8138 case FIXED_CONVERT_EXPR:
8139 tem = fold_convert_const (code, type, arg0);
8140 return tem ? tem : NULL_TREE;
8142 case VIEW_CONVERT_EXPR:
8143 if (TREE_TYPE (op0) == type)
8145 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8146 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8147 type, TREE_OPERAND (op0, 0));
8148 if (TREE_CODE (op0) == MEM_REF)
8149 return fold_build2_loc (loc, MEM_REF, type,
8150 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8152 /* For integral conversions with the same precision or pointer
8153 conversions use a NOP_EXPR instead. */
8154 if ((INTEGRAL_TYPE_P (type)
8155 || POINTER_TYPE_P (type))
8156 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8157 || POINTER_TYPE_P (TREE_TYPE (op0)))
8158 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8159 return fold_convert_loc (loc, type, op0);
8161 /* Strip inner integral conversions that do not change the precision. */
8162 if (CONVERT_EXPR_P (op0)
8163 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8164 || POINTER_TYPE_P (TREE_TYPE (op0)))
8165 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8166 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8167 && (TYPE_PRECISION (TREE_TYPE (op0))
8168 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8169 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8170 type, TREE_OPERAND (op0, 0));
8172 return fold_view_convert_expr (type, op0);
8175 tem = fold_negate_expr (loc, arg0);
8177 return fold_convert_loc (loc, type, tem);
8181 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8182 return fold_abs_const (arg0, type);
8183 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8184 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8185 /* Convert fabs((double)float) into (double)fabsf(float). */
8186 else if (TREE_CODE (arg0) == NOP_EXPR
8187 && TREE_CODE (type) == REAL_TYPE)
8189 tree targ0 = strip_float_extensions (arg0);
8191 return fold_convert_loc (loc, type,
8192 fold_build1_loc (loc, ABS_EXPR,
8196 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8197 else if (TREE_CODE (arg0) == ABS_EXPR)
8199 else if (tree_expr_nonnegative_p (arg0))
8202 /* Strip sign ops from argument. */
8203 if (TREE_CODE (type) == REAL_TYPE)
8205 tem = fold_strip_sign_ops (arg0);
8207 return fold_build1_loc (loc, ABS_EXPR, type,
8208 fold_convert_loc (loc, type, tem));
8213 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8214 return fold_convert_loc (loc, type, arg0);
8215 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8217 tree itype = TREE_TYPE (type);
8218 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8219 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8220 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8221 negate_expr (ipart));
8223 if (TREE_CODE (arg0) == COMPLEX_CST)
8225 tree itype = TREE_TYPE (type);
8226 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8227 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8228 return build_complex (type, rpart, negate_expr (ipart));
8230 if (TREE_CODE (arg0) == CONJ_EXPR)
8231 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8235 if (TREE_CODE (arg0) == INTEGER_CST)
8236 return fold_not_const (arg0, type);
8237 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8238 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8239 /* Convert ~ (-A) to A - 1. */
8240 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8241 return fold_build2_loc (loc, MINUS_EXPR, type,
8242 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8243 build_int_cst (type, 1));
8244 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8245 else if (INTEGRAL_TYPE_P (type)
8246 && ((TREE_CODE (arg0) == MINUS_EXPR
8247 && integer_onep (TREE_OPERAND (arg0, 1)))
8248 || (TREE_CODE (arg0) == PLUS_EXPR
8249 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8250 return fold_build1_loc (loc, NEGATE_EXPR, type,
8251 fold_convert_loc (loc, type,
8252 TREE_OPERAND (arg0, 0)));
8253 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8254 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8255 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8256 fold_convert_loc (loc, type,
8257 TREE_OPERAND (arg0, 0)))))
8258 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8259 fold_convert_loc (loc, type,
8260 TREE_OPERAND (arg0, 1)));
8261 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8262 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8263 fold_convert_loc (loc, type,
8264 TREE_OPERAND (arg0, 1)))))
8265 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8266 fold_convert_loc (loc, type,
8267 TREE_OPERAND (arg0, 0)), tem);
8268 /* Perform BIT_NOT_EXPR on each element individually. */
8269 else if (TREE_CODE (arg0) == VECTOR_CST)
8273 unsigned count = VECTOR_CST_NELTS (arg0), i;
8275 elements = XALLOCAVEC (tree, count);
8276 for (i = 0; i < count; i++)
8278 elem = VECTOR_CST_ELT (arg0, i);
8279 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8280 if (elem == NULL_TREE)
8285 return build_vector (type, elements);
8290 case TRUTH_NOT_EXPR:
8291 /* The argument to invert_truthvalue must have Boolean type. */
8292 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8293 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8295 /* Note that the operand of this must be an int
8296 and its values must be 0 or 1.
8297 ("true" is a fixed value perhaps depending on the language,
8298 but we don't handle values other than 1 correctly yet.) */
8299 tem = fold_truth_not_expr (loc, arg0);
8302 return fold_convert_loc (loc, type, tem);
8305 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8306 return fold_convert_loc (loc, type, arg0);
8307 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8308 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8309 TREE_OPERAND (arg0, 1));
8310 if (TREE_CODE (arg0) == COMPLEX_CST)
8311 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8312 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8314 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8315 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8316 fold_build1_loc (loc, REALPART_EXPR, itype,
8317 TREE_OPERAND (arg0, 0)),
8318 fold_build1_loc (loc, REALPART_EXPR, itype,
8319 TREE_OPERAND (arg0, 1)));
8320 return fold_convert_loc (loc, type, tem);
8322 if (TREE_CODE (arg0) == CONJ_EXPR)
8324 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8325 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8326 TREE_OPERAND (arg0, 0));
8327 return fold_convert_loc (loc, type, tem);
8329 if (TREE_CODE (arg0) == CALL_EXPR)
8331 tree fn = get_callee_fndecl (arg0);
8332 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8333 switch (DECL_FUNCTION_CODE (fn))
8335 CASE_FLT_FN (BUILT_IN_CEXPI):
8336 fn = mathfn_built_in (type, BUILT_IN_COS);
8338 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8348 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8349 return build_zero_cst (type);
8350 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8351 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8352 TREE_OPERAND (arg0, 0));
8353 if (TREE_CODE (arg0) == COMPLEX_CST)
8354 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8355 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8357 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8358 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8359 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8360 TREE_OPERAND (arg0, 0)),
8361 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8362 TREE_OPERAND (arg0, 1)));
8363 return fold_convert_loc (loc, type, tem);
8365 if (TREE_CODE (arg0) == CONJ_EXPR)
8367 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8368 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8369 return fold_convert_loc (loc, type, negate_expr (tem));
8371 if (TREE_CODE (arg0) == CALL_EXPR)
8373 tree fn = get_callee_fndecl (arg0);
8374 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8375 switch (DECL_FUNCTION_CODE (fn))
8377 CASE_FLT_FN (BUILT_IN_CEXPI):
8378 fn = mathfn_built_in (type, BUILT_IN_SIN);
8380 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8390 /* Fold *&X to X if X is an lvalue. */
8391 if (TREE_CODE (op0) == ADDR_EXPR)
8393 tree op00 = TREE_OPERAND (op0, 0);
8394 if ((TREE_CODE (op00) == VAR_DECL
8395 || TREE_CODE (op00) == PARM_DECL
8396 || TREE_CODE (op00) == RESULT_DECL)
8397 && !TREE_READONLY (op00))
8402 case VEC_UNPACK_LO_EXPR:
8403 case VEC_UNPACK_HI_EXPR:
8404 case VEC_UNPACK_FLOAT_LO_EXPR:
8405 case VEC_UNPACK_FLOAT_HI_EXPR:
8407 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8409 enum tree_code subcode;
8411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8412 if (TREE_CODE (arg0) != VECTOR_CST)
8415 elts = XALLOCAVEC (tree, nelts * 2);
8416 if (!vec_cst_ctor_to_array (arg0, elts))
8419 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8420 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8423 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8426 subcode = FLOAT_EXPR;
8428 for (i = 0; i < nelts; i++)
8430 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8431 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8435 return build_vector (type, elts);
8438 case REDUC_MIN_EXPR:
8439 case REDUC_MAX_EXPR:
8440 case REDUC_PLUS_EXPR:
8442 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8444 enum tree_code subcode;
8446 if (TREE_CODE (op0) != VECTOR_CST)
8449 elts = XALLOCAVEC (tree, nelts);
8450 if (!vec_cst_ctor_to_array (op0, elts))
8455 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8456 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8457 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8458 default: gcc_unreachable ();
8461 for (i = 1; i < nelts; i++)
8463 elts[0] = const_binop (subcode, elts[0], elts[i]);
8464 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8466 elts[i] = build_zero_cst (TREE_TYPE (type));
8469 return build_vector (type, elts);
8474 } /* switch (code) */
8478 /* If the operation was a conversion do _not_ mark a resulting constant
8479 with TREE_OVERFLOW if the original constant was not. These conversions
8480 have implementation defined behavior and retaining the TREE_OVERFLOW
8481 flag here would confuse later passes such as VRP. */
8483 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8484 tree type, tree op0)
8486 tree res = fold_unary_loc (loc, code, type, op0);
8488 && TREE_CODE (res) == INTEGER_CST
8489 && TREE_CODE (op0) == INTEGER_CST
8490 && CONVERT_EXPR_CODE_P (code))
8491 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8496 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8497 operands OP0 and OP1. LOC is the location of the resulting expression.
8498 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8502 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8503 tree arg0, tree arg1, tree op0, tree op1)
8507 /* We only do these simplifications if we are optimizing. */
8511 /* Check for things like (A || B) && (A || C). We can convert this
8512 to A || (B && C). Note that either operator can be any of the four
8513 truth and/or operations and the transformation will still be
8514 valid. Also note that we only care about order for the
8515 ANDIF and ORIF operators. If B contains side effects, this
8516 might change the truth-value of A. */
8517 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8518 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8519 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8520 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8521 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8524 tree a00 = TREE_OPERAND (arg0, 0);
8525 tree a01 = TREE_OPERAND (arg0, 1);
8526 tree a10 = TREE_OPERAND (arg1, 0);
8527 tree a11 = TREE_OPERAND (arg1, 1);
8528 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8529 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8530 && (code == TRUTH_AND_EXPR
8531 || code == TRUTH_OR_EXPR));
8533 if (operand_equal_p (a00, a10, 0))
8534 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8535 fold_build2_loc (loc, code, type, a01, a11));
8536 else if (commutative && operand_equal_p (a00, a11, 0))
8537 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8538 fold_build2_loc (loc, code, type, a01, a10));
8539 else if (commutative && operand_equal_p (a01, a10, 0))
8540 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8541 fold_build2_loc (loc, code, type, a00, a11));
8543 /* This case if tricky because we must either have commutative
8544 operators or else A10 must not have side-effects. */
8546 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8547 && operand_equal_p (a01, a11, 0))
8548 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8549 fold_build2_loc (loc, code, type, a00, a10),
8553 /* See if we can build a range comparison. */
8554 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8557 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8558 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8560 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8562 return fold_build2_loc (loc, code, type, tem, arg1);
8565 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8566 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8568 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8570 return fold_build2_loc (loc, code, type, arg0, tem);
8573 /* Check for the possibility of merging component references. If our
8574 lhs is another similar operation, try to merge its rhs with our
8575 rhs. Then try to merge our lhs and rhs. */
8576 if (TREE_CODE (arg0) == code
8577 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8578 TREE_OPERAND (arg0, 1), arg1)))
8579 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8581 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8584 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8585 && (code == TRUTH_AND_EXPR
8586 || code == TRUTH_ANDIF_EXPR
8587 || code == TRUTH_OR_EXPR
8588 || code == TRUTH_ORIF_EXPR))
8590 enum tree_code ncode, icode;
8592 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8593 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8594 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8596 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8597 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8598 We don't want to pack more than two leafs to a non-IF AND/OR
8600 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8601 equal to IF-CODE, then we don't want to add right-hand operand.
8602 If the inner right-hand side of left-hand operand has
8603 side-effects, or isn't simple, then we can't add to it,
8604 as otherwise we might destroy if-sequence. */
8605 if (TREE_CODE (arg0) == icode
8606 && simple_operand_p_2 (arg1)
8607 /* Needed for sequence points to handle trappings, and
8609 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8611 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8613 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8616 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8617 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8618 else if (TREE_CODE (arg1) == icode
8619 && simple_operand_p_2 (arg0)
8620 /* Needed for sequence points to handle trappings, and
8622 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8624 tem = fold_build2_loc (loc, ncode, type,
8625 arg0, TREE_OPERAND (arg1, 0));
8626 return fold_build2_loc (loc, icode, type, tem,
8627 TREE_OPERAND (arg1, 1));
8629 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8631 For sequence point consistancy, we need to check for trapping,
8632 and side-effects. */
8633 else if (code == icode && simple_operand_p_2 (arg0)
8634 && simple_operand_p_2 (arg1))
8635 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8641 /* Fold a binary expression of code CODE and type TYPE with operands
8642 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8643 Return the folded expression if folding is successful. Otherwise,
8644 return NULL_TREE. */
8647 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8649 enum tree_code compl_code;
8651 if (code == MIN_EXPR)
8652 compl_code = MAX_EXPR;
8653 else if (code == MAX_EXPR)
8654 compl_code = MIN_EXPR;
8658 /* MIN (MAX (a, b), b) == b. */
8659 if (TREE_CODE (op0) == compl_code
8660 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8661 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8663 /* MIN (MAX (b, a), b) == b. */
8664 if (TREE_CODE (op0) == compl_code
8665 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8666 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8667 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8669 /* MIN (a, MAX (a, b)) == a. */
8670 if (TREE_CODE (op1) == compl_code
8671 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8672 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8673 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8675 /* MIN (a, MAX (b, a)) == a. */
8676 if (TREE_CODE (op1) == compl_code
8677 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8678 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8679 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8684 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8685 by changing CODE to reduce the magnitude of constants involved in
8686 ARG0 of the comparison.
8687 Returns a canonicalized comparison tree if a simplification was
8688 possible, otherwise returns NULL_TREE.
8689 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8690 valid if signed overflow is undefined. */
8693 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8694 tree arg0, tree arg1,
8695 bool *strict_overflow_p)
8697 enum tree_code code0 = TREE_CODE (arg0);
8698 tree t, cst0 = NULL_TREE;
8702 /* Match A +- CST code arg1 and CST code arg1. We can change the
8703 first form only if overflow is undefined. */
8704 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8705 /* In principle pointers also have undefined overflow behavior,
8706 but that causes problems elsewhere. */
8707 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8708 && (code0 == MINUS_EXPR
8709 || code0 == PLUS_EXPR)
8710 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8711 || code0 == INTEGER_CST))
8714 /* Identify the constant in arg0 and its sign. */
8715 if (code0 == INTEGER_CST)
8718 cst0 = TREE_OPERAND (arg0, 1);
8719 sgn0 = tree_int_cst_sgn (cst0);
8721 /* Overflowed constants and zero will cause problems. */
8722 if (integer_zerop (cst0)
8723 || TREE_OVERFLOW (cst0))
8726 /* See if we can reduce the magnitude of the constant in
8727 arg0 by changing the comparison code. */
8728 if (code0 == INTEGER_CST)
8730 /* CST <= arg1 -> CST-1 < arg1. */
8731 if (code == LE_EXPR && sgn0 == 1)
8733 /* -CST < arg1 -> -CST-1 <= arg1. */
8734 else if (code == LT_EXPR && sgn0 == -1)
8736 /* CST > arg1 -> CST-1 >= arg1. */
8737 else if (code == GT_EXPR && sgn0 == 1)
8739 /* -CST >= arg1 -> -CST-1 > arg1. */
8740 else if (code == GE_EXPR && sgn0 == -1)
8744 /* arg1 code' CST' might be more canonical. */
8749 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8751 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8753 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8754 else if (code == GT_EXPR
8755 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8757 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8758 else if (code == LE_EXPR
8759 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8761 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8762 else if (code == GE_EXPR
8763 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8767 *strict_overflow_p = true;
8770 /* Now build the constant reduced in magnitude. But not if that
8771 would produce one outside of its types range. */
8772 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8774 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8775 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8777 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8778 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8779 /* We cannot swap the comparison here as that would cause us to
8780 endlessly recurse. */
8783 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8784 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8785 if (code0 != INTEGER_CST)
8786 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8787 t = fold_convert (TREE_TYPE (arg1), t);
8789 /* If swapping might yield to a more canonical form, do so. */
8791 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8793 return fold_build2_loc (loc, code, type, t, arg1);
8796 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8797 overflow further. Try to decrease the magnitude of constants involved
8798 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8799 and put sole constants at the second argument position.
8800 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8803 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8804 tree arg0, tree arg1)
8807 bool strict_overflow_p;
8808 const char * const warnmsg = G_("assuming signed overflow does not occur "
8809 "when reducing constant in comparison");
8811 /* Try canonicalization by simplifying arg0. */
8812 strict_overflow_p = false;
8813 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8814 &strict_overflow_p);
8817 if (strict_overflow_p)
8818 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8822 /* Try canonicalization by simplifying arg1 using the swapped
8824 code = swap_tree_comparison (code);
8825 strict_overflow_p = false;
8826 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8827 &strict_overflow_p);
8828 if (t && strict_overflow_p)
8829 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8833 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8834 space. This is used to avoid issuing overflow warnings for
8835 expressions like &p->x which can not wrap. */
8838 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8840 double_int di_offset, total;
8842 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8848 if (offset == NULL_TREE)
8849 di_offset = double_int_zero;
8850 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8853 di_offset = TREE_INT_CST (offset);
8856 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8857 total = di_offset.add_with_sign (units, true, &overflow);
8861 if (total.high != 0)
8864 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8868 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8870 if (TREE_CODE (base) == ADDR_EXPR)
8872 HOST_WIDE_INT base_size;
8874 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8875 if (base_size > 0 && size < base_size)
8879 return total.low > (unsigned HOST_WIDE_INT) size;
8882 /* Subroutine of fold_binary. This routine performs all of the
8883 transformations that are common to the equality/inequality
8884 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8885 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8886 fold_binary should call fold_binary. Fold a comparison with
8887 tree code CODE and type TYPE with operands OP0 and OP1. Return
8888 the folded comparison or NULL_TREE. */
8891 fold_comparison (location_t loc, enum tree_code code, tree type,
8894 tree arg0, arg1, tem;
8899 STRIP_SIGN_NOPS (arg0);
8900 STRIP_SIGN_NOPS (arg1);
8902 tem = fold_relational_const (code, type, arg0, arg1);
8903 if (tem != NULL_TREE)
8906 /* If one arg is a real or integer constant, put it last. */
8907 if (tree_swap_operands_p (arg0, arg1, true))
8908 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8910 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8911 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8912 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8913 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8914 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8915 && (TREE_CODE (arg1) == INTEGER_CST
8916 && !TREE_OVERFLOW (arg1)))
8918 tree const1 = TREE_OPERAND (arg0, 1);
8920 tree variable = TREE_OPERAND (arg0, 0);
8923 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8925 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8926 TREE_TYPE (arg1), const2, const1);
8928 /* If the constant operation overflowed this can be
8929 simplified as a comparison against INT_MAX/INT_MIN. */
8930 if (TREE_CODE (lhs) == INTEGER_CST
8931 && TREE_OVERFLOW (lhs))
8933 int const1_sgn = tree_int_cst_sgn (const1);
8934 enum tree_code code2 = code;
8936 /* Get the sign of the constant on the lhs if the
8937 operation were VARIABLE + CONST1. */
8938 if (TREE_CODE (arg0) == MINUS_EXPR)
8939 const1_sgn = -const1_sgn;
8941 /* The sign of the constant determines if we overflowed
8942 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8943 Canonicalize to the INT_MIN overflow by swapping the comparison
8945 if (const1_sgn == -1)
8946 code2 = swap_tree_comparison (code);
8948 /* We now can look at the canonicalized case
8949 VARIABLE + 1 CODE2 INT_MIN
8950 and decide on the result. */
8951 if (code2 == LT_EXPR
8953 || code2 == EQ_EXPR)
8954 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8955 else if (code2 == NE_EXPR
8957 || code2 == GT_EXPR)
8958 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8961 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8962 && (TREE_CODE (lhs) != INTEGER_CST
8963 || !TREE_OVERFLOW (lhs)))
8965 if (code != EQ_EXPR && code != NE_EXPR)
8966 fold_overflow_warning ("assuming signed overflow does not occur "
8967 "when changing X +- C1 cmp C2 to "
8969 WARN_STRICT_OVERFLOW_COMPARISON);
8970 return fold_build2_loc (loc, code, type, variable, lhs);
8974 /* For comparisons of pointers we can decompose it to a compile time
8975 comparison of the base objects and the offsets into the object.
8976 This requires at least one operand being an ADDR_EXPR or a
8977 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8978 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8979 && (TREE_CODE (arg0) == ADDR_EXPR
8980 || TREE_CODE (arg1) == ADDR_EXPR
8981 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8982 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8984 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8985 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8986 enum machine_mode mode;
8987 int volatilep, unsignedp;
8988 bool indirect_base0 = false, indirect_base1 = false;
8990 /* Get base and offset for the access. Strip ADDR_EXPR for
8991 get_inner_reference, but put it back by stripping INDIRECT_REF
8992 off the base object if possible. indirect_baseN will be true
8993 if baseN is not an address but refers to the object itself. */
8995 if (TREE_CODE (arg0) == ADDR_EXPR)
8997 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8998 &bitsize, &bitpos0, &offset0, &mode,
8999 &unsignedp, &volatilep, false);
9000 if (TREE_CODE (base0) == INDIRECT_REF)
9001 base0 = TREE_OPERAND (base0, 0);
9003 indirect_base0 = true;
9005 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9007 base0 = TREE_OPERAND (arg0, 0);
9008 STRIP_SIGN_NOPS (base0);
9009 if (TREE_CODE (base0) == ADDR_EXPR)
9011 base0 = TREE_OPERAND (base0, 0);
9012 indirect_base0 = true;
9014 offset0 = TREE_OPERAND (arg0, 1);
9015 if (host_integerp (offset0, 0))
9017 HOST_WIDE_INT off = size_low_cst (offset0);
9018 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9020 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9022 bitpos0 = off * BITS_PER_UNIT;
9023 offset0 = NULL_TREE;
9029 if (TREE_CODE (arg1) == ADDR_EXPR)
9031 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9032 &bitsize, &bitpos1, &offset1, &mode,
9033 &unsignedp, &volatilep, false);
9034 if (TREE_CODE (base1) == INDIRECT_REF)
9035 base1 = TREE_OPERAND (base1, 0);
9037 indirect_base1 = true;
9039 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9041 base1 = TREE_OPERAND (arg1, 0);
9042 STRIP_SIGN_NOPS (base1);
9043 if (TREE_CODE (base1) == ADDR_EXPR)
9045 base1 = TREE_OPERAND (base1, 0);
9046 indirect_base1 = true;
9048 offset1 = TREE_OPERAND (arg1, 1);
9049 if (host_integerp (offset1, 0))
9051 HOST_WIDE_INT off = size_low_cst (offset1);
9052 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9054 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9056 bitpos1 = off * BITS_PER_UNIT;
9057 offset1 = NULL_TREE;
9062 /* A local variable can never be pointed to by
9063 the default SSA name of an incoming parameter. */
9064 if ((TREE_CODE (arg0) == ADDR_EXPR
9066 && TREE_CODE (base0) == VAR_DECL
9067 && auto_var_in_fn_p (base0, current_function_decl)
9069 && TREE_CODE (base1) == SSA_NAME
9070 && SSA_NAME_IS_DEFAULT_DEF (base1)
9071 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9072 || (TREE_CODE (arg1) == ADDR_EXPR
9074 && TREE_CODE (base1) == VAR_DECL
9075 && auto_var_in_fn_p (base1, current_function_decl)
9077 && TREE_CODE (base0) == SSA_NAME
9078 && SSA_NAME_IS_DEFAULT_DEF (base0)
9079 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9081 if (code == NE_EXPR)
9082 return constant_boolean_node (1, type);
9083 else if (code == EQ_EXPR)
9084 return constant_boolean_node (0, type);
9086 /* If we have equivalent bases we might be able to simplify. */
9087 else if (indirect_base0 == indirect_base1
9088 && operand_equal_p (base0, base1, 0))
9090 /* We can fold this expression to a constant if the non-constant
9091 offset parts are equal. */
9092 if ((offset0 == offset1
9093 || (offset0 && offset1
9094 && operand_equal_p (offset0, offset1, 0)))
9097 || (indirect_base0 && DECL_P (base0))
9098 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9103 && bitpos0 != bitpos1
9104 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9105 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9106 fold_overflow_warning (("assuming pointer wraparound does not "
9107 "occur when comparing P +- C1 with "
9109 WARN_STRICT_OVERFLOW_CONDITIONAL);
9114 return constant_boolean_node (bitpos0 == bitpos1, type);
9116 return constant_boolean_node (bitpos0 != bitpos1, type);
9118 return constant_boolean_node (bitpos0 < bitpos1, type);
9120 return constant_boolean_node (bitpos0 <= bitpos1, type);
9122 return constant_boolean_node (bitpos0 >= bitpos1, type);
9124 return constant_boolean_node (bitpos0 > bitpos1, type);
9128 /* We can simplify the comparison to a comparison of the variable
9129 offset parts if the constant offset parts are equal.
9130 Be careful to use signed sizetype here because otherwise we
9131 mess with array offsets in the wrong way. This is possible
9132 because pointer arithmetic is restricted to retain within an
9133 object and overflow on pointer differences is undefined as of
9134 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9135 else if (bitpos0 == bitpos1
9136 && ((code == EQ_EXPR || code == NE_EXPR)
9137 || (indirect_base0 && DECL_P (base0))
9138 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9140 /* By converting to signed sizetype we cover middle-end pointer
9141 arithmetic which operates on unsigned pointer types of size
9142 type size and ARRAY_REF offsets which are properly sign or
9143 zero extended from their type in case it is narrower than
9145 if (offset0 == NULL_TREE)
9146 offset0 = build_int_cst (ssizetype, 0);
9148 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9149 if (offset1 == NULL_TREE)
9150 offset1 = build_int_cst (ssizetype, 0);
9152 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9156 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9157 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9158 fold_overflow_warning (("assuming pointer wraparound does not "
9159 "occur when comparing P +- C1 with "
9161 WARN_STRICT_OVERFLOW_COMPARISON);
9163 return fold_build2_loc (loc, code, type, offset0, offset1);
9166 /* For non-equal bases we can simplify if they are addresses
9167 of local binding decls or constants. */
9168 else if (indirect_base0 && indirect_base1
9169 /* We know that !operand_equal_p (base0, base1, 0)
9170 because the if condition was false. But make
9171 sure two decls are not the same. */
9173 && TREE_CODE (arg0) == ADDR_EXPR
9174 && TREE_CODE (arg1) == ADDR_EXPR
9175 && (((TREE_CODE (base0) == VAR_DECL
9176 || TREE_CODE (base0) == PARM_DECL)
9177 && (targetm.binds_local_p (base0)
9178 || CONSTANT_CLASS_P (base1)))
9179 || CONSTANT_CLASS_P (base0))
9180 && (((TREE_CODE (base1) == VAR_DECL
9181 || TREE_CODE (base1) == PARM_DECL)
9182 && (targetm.binds_local_p (base1)
9183 || CONSTANT_CLASS_P (base0)))
9184 || CONSTANT_CLASS_P (base1)))
9186 if (code == EQ_EXPR)
9187 return omit_two_operands_loc (loc, type, boolean_false_node,
9189 else if (code == NE_EXPR)
9190 return omit_two_operands_loc (loc, type, boolean_true_node,
9193 /* For equal offsets we can simplify to a comparison of the
9195 else if (bitpos0 == bitpos1
9197 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9199 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9200 && ((offset0 == offset1)
9201 || (offset0 && offset1
9202 && operand_equal_p (offset0, offset1, 0))))
9205 base0 = build_fold_addr_expr_loc (loc, base0);
9207 base1 = build_fold_addr_expr_loc (loc, base1);
9208 return fold_build2_loc (loc, code, type, base0, base1);
9212 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9213 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9214 the resulting offset is smaller in absolute value than the
9216 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9217 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9218 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9219 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9220 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9221 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9222 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9224 tree const1 = TREE_OPERAND (arg0, 1);
9225 tree const2 = TREE_OPERAND (arg1, 1);
9226 tree variable1 = TREE_OPERAND (arg0, 0);
9227 tree variable2 = TREE_OPERAND (arg1, 0);
9229 const char * const warnmsg = G_("assuming signed overflow does not "
9230 "occur when combining constants around "
9233 /* Put the constant on the side where it doesn't overflow and is
9234 of lower absolute value than before. */
9235 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9236 ? MINUS_EXPR : PLUS_EXPR,
9238 if (!TREE_OVERFLOW (cst)
9239 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9241 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9242 return fold_build2_loc (loc, code, type,
9244 fold_build2_loc (loc,
9245 TREE_CODE (arg1), TREE_TYPE (arg1),
9249 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9250 ? MINUS_EXPR : PLUS_EXPR,
9252 if (!TREE_OVERFLOW (cst)
9253 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9255 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9256 return fold_build2_loc (loc, code, type,
9257 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9263 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9264 signed arithmetic case. That form is created by the compiler
9265 often enough for folding it to be of value. One example is in
9266 computing loop trip counts after Operator Strength Reduction. */
9267 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9268 && TREE_CODE (arg0) == MULT_EXPR
9269 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9270 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9271 && integer_zerop (arg1))
9273 tree const1 = TREE_OPERAND (arg0, 1);
9274 tree const2 = arg1; /* zero */
9275 tree variable1 = TREE_OPERAND (arg0, 0);
9276 enum tree_code cmp_code = code;
9278 /* Handle unfolded multiplication by zero. */
9279 if (integer_zerop (const1))
9280 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9282 fold_overflow_warning (("assuming signed overflow does not occur when "
9283 "eliminating multiplication in comparison "
9285 WARN_STRICT_OVERFLOW_COMPARISON);
9287 /* If const1 is negative we swap the sense of the comparison. */
9288 if (tree_int_cst_sgn (const1) < 0)
9289 cmp_code = swap_tree_comparison (cmp_code);
9291 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9294 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9298 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9300 tree targ0 = strip_float_extensions (arg0);
9301 tree targ1 = strip_float_extensions (arg1);
9302 tree newtype = TREE_TYPE (targ0);
9304 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9305 newtype = TREE_TYPE (targ1);
9307 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9308 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9309 return fold_build2_loc (loc, code, type,
9310 fold_convert_loc (loc, newtype, targ0),
9311 fold_convert_loc (loc, newtype, targ1));
9313 /* (-a) CMP (-b) -> b CMP a */
9314 if (TREE_CODE (arg0) == NEGATE_EXPR
9315 && TREE_CODE (arg1) == NEGATE_EXPR)
9316 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9317 TREE_OPERAND (arg0, 0));
9319 if (TREE_CODE (arg1) == REAL_CST)
9321 REAL_VALUE_TYPE cst;
9322 cst = TREE_REAL_CST (arg1);
9324 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9325 if (TREE_CODE (arg0) == NEGATE_EXPR)
9326 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9327 TREE_OPERAND (arg0, 0),
9328 build_real (TREE_TYPE (arg1),
9329 real_value_negate (&cst)));
9331 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9332 /* a CMP (-0) -> a CMP 0 */
9333 if (REAL_VALUE_MINUS_ZERO (cst))
9334 return fold_build2_loc (loc, code, type, arg0,
9335 build_real (TREE_TYPE (arg1), dconst0));
9337 /* x != NaN is always true, other ops are always false. */
9338 if (REAL_VALUE_ISNAN (cst)
9339 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9341 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9342 return omit_one_operand_loc (loc, type, tem, arg0);
9345 /* Fold comparisons against infinity. */
9346 if (REAL_VALUE_ISINF (cst)
9347 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9349 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9350 if (tem != NULL_TREE)
9355 /* If this is a comparison of a real constant with a PLUS_EXPR
9356 or a MINUS_EXPR of a real constant, we can convert it into a
9357 comparison with a revised real constant as long as no overflow
9358 occurs when unsafe_math_optimizations are enabled. */
9359 if (flag_unsafe_math_optimizations
9360 && TREE_CODE (arg1) == REAL_CST
9361 && (TREE_CODE (arg0) == PLUS_EXPR
9362 || TREE_CODE (arg0) == MINUS_EXPR)
9363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9364 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9365 ? MINUS_EXPR : PLUS_EXPR,
9366 arg1, TREE_OPERAND (arg0, 1)))
9367 && !TREE_OVERFLOW (tem))
9368 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9370 /* Likewise, we can simplify a comparison of a real constant with
9371 a MINUS_EXPR whose first operand is also a real constant, i.e.
9372 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9373 floating-point types only if -fassociative-math is set. */
9374 if (flag_associative_math
9375 && TREE_CODE (arg1) == REAL_CST
9376 && TREE_CODE (arg0) == MINUS_EXPR
9377 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9378 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9380 && !TREE_OVERFLOW (tem))
9381 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9382 TREE_OPERAND (arg0, 1), tem);
9384 /* Fold comparisons against built-in math functions. */
9385 if (TREE_CODE (arg1) == REAL_CST
9386 && flag_unsafe_math_optimizations
9387 && ! flag_errno_math)
9389 enum built_in_function fcode = builtin_mathfn_code (arg0);
9391 if (fcode != END_BUILTINS)
9393 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9394 if (tem != NULL_TREE)
9400 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9401 && CONVERT_EXPR_P (arg0))
9403 /* If we are widening one operand of an integer comparison,
9404 see if the other operand is similarly being widened. Perhaps we
9405 can do the comparison in the narrower type. */
9406 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9410 /* Or if we are changing signedness. */
9411 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9416 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9417 constant, we can simplify it. */
9418 if (TREE_CODE (arg1) == INTEGER_CST
9419 && (TREE_CODE (arg0) == MIN_EXPR
9420 || TREE_CODE (arg0) == MAX_EXPR)
9421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9423 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9428 /* Simplify comparison of something with itself. (For IEEE
9429 floating-point, we can only do some of these simplifications.) */
9430 if (operand_equal_p (arg0, arg1, 0))
9435 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9436 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9437 return constant_boolean_node (1, type);
9442 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9443 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9444 return constant_boolean_node (1, type);
9445 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9448 /* For NE, we can only do this simplification if integer
9449 or we don't honor IEEE floating point NaNs. */
9450 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9451 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9453 /* ... fall through ... */
9456 return constant_boolean_node (0, type);
9462 /* If we are comparing an expression that just has comparisons
9463 of two integer values, arithmetic expressions of those comparisons,
9464 and constants, we can simplify it. There are only three cases
9465 to check: the two values can either be equal, the first can be
9466 greater, or the second can be greater. Fold the expression for
9467 those three values. Since each value must be 0 or 1, we have
9468 eight possibilities, each of which corresponds to the constant 0
9469 or 1 or one of the six possible comparisons.
9471 This handles common cases like (a > b) == 0 but also handles
9472 expressions like ((x > y) - (y > x)) > 0, which supposedly
9473 occur in macroized code. */
9475 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9477 tree cval1 = 0, cval2 = 0;
9480 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9481 /* Don't handle degenerate cases here; they should already
9482 have been handled anyway. */
9483 && cval1 != 0 && cval2 != 0
9484 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9485 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9486 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9487 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9488 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9489 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9490 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9492 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9493 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9495 /* We can't just pass T to eval_subst in case cval1 or cval2
9496 was the same as ARG1. */
9499 = fold_build2_loc (loc, code, type,
9500 eval_subst (loc, arg0, cval1, maxval,
9504 = fold_build2_loc (loc, code, type,
9505 eval_subst (loc, arg0, cval1, maxval,
9509 = fold_build2_loc (loc, code, type,
9510 eval_subst (loc, arg0, cval1, minval,
9514 /* All three of these results should be 0 or 1. Confirm they are.
9515 Then use those values to select the proper code to use. */
9517 if (TREE_CODE (high_result) == INTEGER_CST
9518 && TREE_CODE (equal_result) == INTEGER_CST
9519 && TREE_CODE (low_result) == INTEGER_CST)
9521 /* Make a 3-bit mask with the high-order bit being the
9522 value for `>', the next for '=', and the low for '<'. */
9523 switch ((integer_onep (high_result) * 4)
9524 + (integer_onep (equal_result) * 2)
9525 + integer_onep (low_result))
9529 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9550 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9555 tem = save_expr (build2 (code, type, cval1, cval2));
9556 SET_EXPR_LOCATION (tem, loc);
9559 return fold_build2_loc (loc, code, type, cval1, cval2);
9564 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9565 into a single range test. */
9566 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9567 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9568 && TREE_CODE (arg1) == INTEGER_CST
9569 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9570 && !integer_zerop (TREE_OPERAND (arg0, 1))
9571 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9572 && !TREE_OVERFLOW (arg1))
9574 tem = fold_div_compare (loc, code, type, arg0, arg1);
9575 if (tem != NULL_TREE)
9579 /* Fold ~X op ~Y as Y op X. */
9580 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9581 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9583 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9584 return fold_build2_loc (loc, code, type,
9585 fold_convert_loc (loc, cmp_type,
9586 TREE_OPERAND (arg1, 0)),
9587 TREE_OPERAND (arg0, 0));
9590 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9591 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9592 && TREE_CODE (arg1) == INTEGER_CST)
9594 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9595 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9596 TREE_OPERAND (arg0, 0),
9597 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9598 fold_convert_loc (loc, cmp_type, arg1)));
9605 /* Subroutine of fold_binary. Optimize complex multiplications of the
9606 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9607 argument EXPR represents the expression "z" of type TYPE. */
9610 fold_mult_zconjz (location_t loc, tree type, tree expr)
9612 tree itype = TREE_TYPE (type);
9613 tree rpart, ipart, tem;
9615 if (TREE_CODE (expr) == COMPLEX_EXPR)
9617 rpart = TREE_OPERAND (expr, 0);
9618 ipart = TREE_OPERAND (expr, 1);
9620 else if (TREE_CODE (expr) == COMPLEX_CST)
9622 rpart = TREE_REALPART (expr);
9623 ipart = TREE_IMAGPART (expr);
9627 expr = save_expr (expr);
9628 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9629 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9632 rpart = save_expr (rpart);
9633 ipart = save_expr (ipart);
9634 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9635 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9636 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9637 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9638 build_zero_cst (itype));
9642 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9643 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9644 guarantees that P and N have the same least significant log2(M) bits.
9645 N is not otherwise constrained. In particular, N is not normalized to
9646 0 <= N < M as is common. In general, the precise value of P is unknown.
9647 M is chosen as large as possible such that constant N can be determined.
9649 Returns M and sets *RESIDUE to N.
9651 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9652 account. This is not always possible due to PR 35705.
9655 static unsigned HOST_WIDE_INT
9656 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9657 bool allow_func_align)
9659 enum tree_code code;
9663 code = TREE_CODE (expr);
9664 if (code == ADDR_EXPR)
9666 unsigned int bitalign;
9667 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9668 *residue /= BITS_PER_UNIT;
9669 return bitalign / BITS_PER_UNIT;
9671 else if (code == POINTER_PLUS_EXPR)
9674 unsigned HOST_WIDE_INT modulus;
9675 enum tree_code inner_code;
9677 op0 = TREE_OPERAND (expr, 0);
9679 modulus = get_pointer_modulus_and_residue (op0, residue,
9682 op1 = TREE_OPERAND (expr, 1);
9684 inner_code = TREE_CODE (op1);
9685 if (inner_code == INTEGER_CST)
9687 *residue += TREE_INT_CST_LOW (op1);
9690 else if (inner_code == MULT_EXPR)
9692 op1 = TREE_OPERAND (op1, 1);
9693 if (TREE_CODE (op1) == INTEGER_CST)
9695 unsigned HOST_WIDE_INT align;
9697 /* Compute the greatest power-of-2 divisor of op1. */
9698 align = TREE_INT_CST_LOW (op1);
9701 /* If align is non-zero and less than *modulus, replace
9702 *modulus with align., If align is 0, then either op1 is 0
9703 or the greatest power-of-2 divisor of op1 doesn't fit in an
9704 unsigned HOST_WIDE_INT. In either case, no additional
9705 constraint is imposed. */
9707 modulus = MIN (modulus, align);
9714 /* If we get here, we were unable to determine anything useful about the
9719 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9720 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9723 vec_cst_ctor_to_array (tree arg, tree *elts)
9725 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9727 if (TREE_CODE (arg) == VECTOR_CST)
9729 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9730 elts[i] = VECTOR_CST_ELT (arg, i);
9732 else if (TREE_CODE (arg) == CONSTRUCTOR)
9734 constructor_elt *elt;
9736 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9737 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9740 elts[i] = elt->value;
9744 for (; i < nelts; i++)
9746 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9750 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9751 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9752 NULL_TREE otherwise. */
9755 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9757 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9759 bool need_ctor = false;
9761 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9762 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9763 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9764 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9767 elts = XALLOCAVEC (tree, nelts * 3);
9768 if (!vec_cst_ctor_to_array (arg0, elts)
9769 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9772 for (i = 0; i < nelts; i++)
9774 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9776 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9781 vec<constructor_elt, va_gc> *v;
9782 vec_alloc (v, nelts);
9783 for (i = 0; i < nelts; i++)
9784 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9785 return build_constructor (type, v);
9788 return build_vector (type, &elts[2 * nelts]);
9791 /* Try to fold a pointer difference of type TYPE two address expressions of
9792 array references AREF0 and AREF1 using location LOC. Return a
9793 simplified expression for the difference or NULL_TREE. */
9796 fold_addr_of_array_ref_difference (location_t loc, tree type,
9797 tree aref0, tree aref1)
9799 tree base0 = TREE_OPERAND (aref0, 0);
9800 tree base1 = TREE_OPERAND (aref1, 0);
9801 tree base_offset = build_int_cst (type, 0);
9803 /* If the bases are array references as well, recurse. If the bases
9804 are pointer indirections compute the difference of the pointers.
9805 If the bases are equal, we are set. */
9806 if ((TREE_CODE (base0) == ARRAY_REF
9807 && TREE_CODE (base1) == ARRAY_REF
9809 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9810 || (INDIRECT_REF_P (base0)
9811 && INDIRECT_REF_P (base1)
9812 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9813 TREE_OPERAND (base0, 0),
9814 TREE_OPERAND (base1, 0))))
9815 || operand_equal_p (base0, base1, 0))
9817 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9818 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9819 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9820 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9821 return fold_build2_loc (loc, PLUS_EXPR, type,
9823 fold_build2_loc (loc, MULT_EXPR, type,
9829 /* If the real or vector real constant CST of type TYPE has an exact
9830 inverse, return it, else return NULL. */
9833 exact_inverse (tree type, tree cst)
9836 tree unit_type, *elts;
9837 enum machine_mode mode;
9838 unsigned vec_nelts, i;
9840 switch (TREE_CODE (cst))
9843 r = TREE_REAL_CST (cst);
9845 if (exact_real_inverse (TYPE_MODE (type), &r))
9846 return build_real (type, r);
9851 vec_nelts = VECTOR_CST_NELTS (cst);
9852 elts = XALLOCAVEC (tree, vec_nelts);
9853 unit_type = TREE_TYPE (type);
9854 mode = TYPE_MODE (unit_type);
9856 for (i = 0; i < vec_nelts; i++)
9858 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9859 if (!exact_real_inverse (mode, &r))
9861 elts[i] = build_real (unit_type, r);
9864 return build_vector (type, elts);
9871 /* Mask out the tz least significant bits of X of type TYPE where
9872 tz is the number of trailing zeroes in Y. */
9874 mask_with_tz (tree type, double_int x, double_int y)
9876 int tz = y.trailing_zeros ();
9882 mask = ~double_int::mask (tz);
9883 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9889 /* Fold a binary expression of code CODE and type TYPE with operands
9890 OP0 and OP1. LOC is the location of the resulting expression.
9891 Return the folded expression if folding is successful. Otherwise,
9892 return NULL_TREE. */
9895 fold_binary_loc (location_t loc,
9896 enum tree_code code, tree type, tree op0, tree op1)
9898 enum tree_code_class kind = TREE_CODE_CLASS (code);
9899 tree arg0, arg1, tem;
9900 tree t1 = NULL_TREE;
9901 bool strict_overflow_p;
9903 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9904 && TREE_CODE_LENGTH (code) == 2
9906 && op1 != NULL_TREE);
9911 /* Strip any conversions that don't change the mode. This is
9912 safe for every expression, except for a comparison expression
9913 because its signedness is derived from its operands. So, in
9914 the latter case, only strip conversions that don't change the
9915 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9918 Note that this is done as an internal manipulation within the
9919 constant folder, in order to find the simplest representation
9920 of the arguments so that their form can be studied. In any
9921 cases, the appropriate type conversions should be put back in
9922 the tree that will get out of the constant folder. */
9924 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9926 STRIP_SIGN_NOPS (arg0);
9927 STRIP_SIGN_NOPS (arg1);
9935 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9936 constant but we can't do arithmetic on them. */
9937 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9938 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9939 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9940 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9941 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9942 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9943 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9945 if (kind == tcc_binary)
9947 /* Make sure type and arg0 have the same saturating flag. */
9948 gcc_assert (TYPE_SATURATING (type)
9949 == TYPE_SATURATING (TREE_TYPE (arg0)));
9950 tem = const_binop (code, arg0, arg1);
9952 else if (kind == tcc_comparison)
9953 tem = fold_relational_const (code, type, arg0, arg1);
9957 if (tem != NULL_TREE)
9959 if (TREE_TYPE (tem) != type)
9960 tem = fold_convert_loc (loc, type, tem);
9965 /* If this is a commutative operation, and ARG0 is a constant, move it
9966 to ARG1 to reduce the number of tests below. */
9967 if (commutative_tree_code (code)
9968 && tree_swap_operands_p (arg0, arg1, true))
9969 return fold_build2_loc (loc, code, type, op1, op0);
9971 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9973 First check for cases where an arithmetic operation is applied to a
9974 compound, conditional, or comparison operation. Push the arithmetic
9975 operation inside the compound or conditional to see if any folding
9976 can then be done. Convert comparison to conditional for this purpose.
9977 The also optimizes non-constant cases that used to be done in
9980 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9981 one of the operands is a comparison and the other is a comparison, a
9982 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9983 code below would make the expression more complex. Change it to a
9984 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9985 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9987 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9988 || code == EQ_EXPR || code == NE_EXPR)
9989 && TREE_CODE (type) != VECTOR_TYPE
9990 && ((truth_value_p (TREE_CODE (arg0))
9991 && (truth_value_p (TREE_CODE (arg1))
9992 || (TREE_CODE (arg1) == BIT_AND_EXPR
9993 && integer_onep (TREE_OPERAND (arg1, 1)))))
9994 || (truth_value_p (TREE_CODE (arg1))
9995 && (truth_value_p (TREE_CODE (arg0))
9996 || (TREE_CODE (arg0) == BIT_AND_EXPR
9997 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9999 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10000 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10003 fold_convert_loc (loc, boolean_type_node, arg0),
10004 fold_convert_loc (loc, boolean_type_node, arg1));
10006 if (code == EQ_EXPR)
10007 tem = invert_truthvalue_loc (loc, tem);
10009 return fold_convert_loc (loc, type, tem);
10012 if (TREE_CODE_CLASS (code) == tcc_binary
10013 || TREE_CODE_CLASS (code) == tcc_comparison)
10015 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10017 tem = fold_build2_loc (loc, code, type,
10018 fold_convert_loc (loc, TREE_TYPE (op0),
10019 TREE_OPERAND (arg0, 1)), op1);
10020 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10023 if (TREE_CODE (arg1) == COMPOUND_EXPR
10024 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10026 tem = fold_build2_loc (loc, code, type, op0,
10027 fold_convert_loc (loc, TREE_TYPE (op1),
10028 TREE_OPERAND (arg1, 1)));
10029 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10033 if (TREE_CODE (arg0) == COND_EXPR
10034 || TREE_CODE (arg0) == VEC_COND_EXPR
10035 || COMPARISON_CLASS_P (arg0))
10037 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10039 /*cond_first_p=*/1);
10040 if (tem != NULL_TREE)
10044 if (TREE_CODE (arg1) == COND_EXPR
10045 || TREE_CODE (arg1) == VEC_COND_EXPR
10046 || COMPARISON_CLASS_P (arg1))
10048 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10050 /*cond_first_p=*/0);
10051 if (tem != NULL_TREE)
10059 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10060 if (TREE_CODE (arg0) == ADDR_EXPR
10061 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10063 tree iref = TREE_OPERAND (arg0, 0);
10064 return fold_build2 (MEM_REF, type,
10065 TREE_OPERAND (iref, 0),
10066 int_const_binop (PLUS_EXPR, arg1,
10067 TREE_OPERAND (iref, 1)));
10070 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10071 if (TREE_CODE (arg0) == ADDR_EXPR
10072 && handled_component_p (TREE_OPERAND (arg0, 0)))
10075 HOST_WIDE_INT coffset;
10076 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10080 return fold_build2 (MEM_REF, type,
10081 build_fold_addr_expr (base),
10082 int_const_binop (PLUS_EXPR, arg1,
10083 size_int (coffset)));
10088 case POINTER_PLUS_EXPR:
10089 /* 0 +p index -> (type)index */
10090 if (integer_zerop (arg0))
10091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10093 /* PTR +p 0 -> PTR */
10094 if (integer_zerop (arg1))
10095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10097 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10098 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10099 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10100 return fold_convert_loc (loc, type,
10101 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10102 fold_convert_loc (loc, sizetype,
10104 fold_convert_loc (loc, sizetype,
10107 /* (PTR +p B) +p A -> PTR +p (B + A) */
10108 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10111 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10112 tree arg00 = TREE_OPERAND (arg0, 0);
10113 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10114 arg01, fold_convert_loc (loc, sizetype, arg1));
10115 return fold_convert_loc (loc, type,
10116 fold_build_pointer_plus_loc (loc,
10120 /* PTR_CST +p CST -> CST1 */
10121 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10122 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10123 fold_convert_loc (loc, type, arg1));
10125 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10126 of the array. Loop optimizer sometimes produce this type of
10128 if (TREE_CODE (arg0) == ADDR_EXPR)
10130 tem = try_move_mult_to_index (loc, arg0,
10131 fold_convert_loc (loc,
10134 return fold_convert_loc (loc, type, tem);
10140 /* A + (-B) -> A - B */
10141 if (TREE_CODE (arg1) == NEGATE_EXPR)
10142 return fold_build2_loc (loc, MINUS_EXPR, type,
10143 fold_convert_loc (loc, type, arg0),
10144 fold_convert_loc (loc, type,
10145 TREE_OPERAND (arg1, 0)));
10146 /* (-A) + B -> B - A */
10147 if (TREE_CODE (arg0) == NEGATE_EXPR
10148 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10149 return fold_build2_loc (loc, MINUS_EXPR, type,
10150 fold_convert_loc (loc, type, arg1),
10151 fold_convert_loc (loc, type,
10152 TREE_OPERAND (arg0, 0)));
10154 if (INTEGRAL_TYPE_P (type))
10156 /* Convert ~A + 1 to -A. */
10157 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10158 && integer_onep (arg1))
10159 return fold_build1_loc (loc, NEGATE_EXPR, type,
10160 fold_convert_loc (loc, type,
10161 TREE_OPERAND (arg0, 0)));
10163 /* ~X + X is -1. */
10164 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10165 && !TYPE_OVERFLOW_TRAPS (type))
10167 tree tem = TREE_OPERAND (arg0, 0);
10170 if (operand_equal_p (tem, arg1, 0))
10172 t1 = build_int_cst_type (type, -1);
10173 return omit_one_operand_loc (loc, type, t1, arg1);
10177 /* X + ~X is -1. */
10178 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10179 && !TYPE_OVERFLOW_TRAPS (type))
10181 tree tem = TREE_OPERAND (arg1, 0);
10184 if (operand_equal_p (arg0, tem, 0))
10186 t1 = build_int_cst_type (type, -1);
10187 return omit_one_operand_loc (loc, type, t1, arg0);
10191 /* X + (X / CST) * -CST is X % CST. */
10192 if (TREE_CODE (arg1) == MULT_EXPR
10193 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10194 && operand_equal_p (arg0,
10195 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10197 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10198 tree cst1 = TREE_OPERAND (arg1, 1);
10199 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10201 if (sum && integer_zerop (sum))
10202 return fold_convert_loc (loc, type,
10203 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10204 TREE_TYPE (arg0), arg0,
10209 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10210 one. Make sure the type is not saturating and has the signedness of
10211 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10212 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10213 if ((TREE_CODE (arg0) == MULT_EXPR
10214 || TREE_CODE (arg1) == MULT_EXPR)
10215 && !TYPE_SATURATING (type)
10216 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10217 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10218 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10220 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10225 if (! FLOAT_TYPE_P (type))
10227 if (integer_zerop (arg1))
10228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10230 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10231 with a constant, and the two constants have no bits in common,
10232 we should treat this as a BIT_IOR_EXPR since this may produce more
10233 simplifications. */
10234 if (TREE_CODE (arg0) == BIT_AND_EXPR
10235 && TREE_CODE (arg1) == BIT_AND_EXPR
10236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10237 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10238 && integer_zerop (const_binop (BIT_AND_EXPR,
10239 TREE_OPERAND (arg0, 1),
10240 TREE_OPERAND (arg1, 1))))
10242 code = BIT_IOR_EXPR;
10246 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10247 (plus (plus (mult) (mult)) (foo)) so that we can
10248 take advantage of the factoring cases below. */
10249 if (TYPE_OVERFLOW_WRAPS (type)
10250 && (((TREE_CODE (arg0) == PLUS_EXPR
10251 || TREE_CODE (arg0) == MINUS_EXPR)
10252 && TREE_CODE (arg1) == MULT_EXPR)
10253 || ((TREE_CODE (arg1) == PLUS_EXPR
10254 || TREE_CODE (arg1) == MINUS_EXPR)
10255 && TREE_CODE (arg0) == MULT_EXPR)))
10257 tree parg0, parg1, parg, marg;
10258 enum tree_code pcode;
10260 if (TREE_CODE (arg1) == MULT_EXPR)
10261 parg = arg0, marg = arg1;
10263 parg = arg1, marg = arg0;
10264 pcode = TREE_CODE (parg);
10265 parg0 = TREE_OPERAND (parg, 0);
10266 parg1 = TREE_OPERAND (parg, 1);
10267 STRIP_NOPS (parg0);
10268 STRIP_NOPS (parg1);
10270 if (TREE_CODE (parg0) == MULT_EXPR
10271 && TREE_CODE (parg1) != MULT_EXPR)
10272 return fold_build2_loc (loc, pcode, type,
10273 fold_build2_loc (loc, PLUS_EXPR, type,
10274 fold_convert_loc (loc, type,
10276 fold_convert_loc (loc, type,
10278 fold_convert_loc (loc, type, parg1));
10279 if (TREE_CODE (parg0) != MULT_EXPR
10280 && TREE_CODE (parg1) == MULT_EXPR)
10282 fold_build2_loc (loc, PLUS_EXPR, type,
10283 fold_convert_loc (loc, type, parg0),
10284 fold_build2_loc (loc, pcode, type,
10285 fold_convert_loc (loc, type, marg),
10286 fold_convert_loc (loc, type,
10292 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10293 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10294 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10296 /* Likewise if the operands are reversed. */
10297 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10300 /* Convert X + -C into X - C. */
10301 if (TREE_CODE (arg1) == REAL_CST
10302 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10304 tem = fold_negate_const (arg1, type);
10305 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10306 return fold_build2_loc (loc, MINUS_EXPR, type,
10307 fold_convert_loc (loc, type, arg0),
10308 fold_convert_loc (loc, type, tem));
10311 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10312 to __complex__ ( x, y ). This is not the same for SNaNs or
10313 if signed zeros are involved. */
10314 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10315 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10316 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10318 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10319 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10320 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10321 bool arg0rz = false, arg0iz = false;
10322 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10323 || (arg0i && (arg0iz = real_zerop (arg0i))))
10325 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10326 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10327 if (arg0rz && arg1i && real_zerop (arg1i))
10329 tree rp = arg1r ? arg1r
10330 : build1 (REALPART_EXPR, rtype, arg1);
10331 tree ip = arg0i ? arg0i
10332 : build1 (IMAGPART_EXPR, rtype, arg0);
10333 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10335 else if (arg0iz && arg1r && real_zerop (arg1r))
10337 tree rp = arg0r ? arg0r
10338 : build1 (REALPART_EXPR, rtype, arg0);
10339 tree ip = arg1i ? arg1i
10340 : build1 (IMAGPART_EXPR, rtype, arg1);
10341 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10346 if (flag_unsafe_math_optimizations
10347 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10348 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10349 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10352 /* Convert x+x into x*2.0. */
10353 if (operand_equal_p (arg0, arg1, 0)
10354 && SCALAR_FLOAT_TYPE_P (type))
10355 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10356 build_real (type, dconst2));
10358 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10359 We associate floats only if the user has specified
10360 -fassociative-math. */
10361 if (flag_associative_math
10362 && TREE_CODE (arg1) == PLUS_EXPR
10363 && TREE_CODE (arg0) != MULT_EXPR)
10365 tree tree10 = TREE_OPERAND (arg1, 0);
10366 tree tree11 = TREE_OPERAND (arg1, 1);
10367 if (TREE_CODE (tree11) == MULT_EXPR
10368 && TREE_CODE (tree10) == MULT_EXPR)
10371 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10372 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10375 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10376 We associate floats only if the user has specified
10377 -fassociative-math. */
10378 if (flag_associative_math
10379 && TREE_CODE (arg0) == PLUS_EXPR
10380 && TREE_CODE (arg1) != MULT_EXPR)
10382 tree tree00 = TREE_OPERAND (arg0, 0);
10383 tree tree01 = TREE_OPERAND (arg0, 1);
10384 if (TREE_CODE (tree01) == MULT_EXPR
10385 && TREE_CODE (tree00) == MULT_EXPR)
10388 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10389 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10395 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10396 is a rotate of A by C1 bits. */
10397 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10398 is a rotate of A by B bits. */
10400 enum tree_code code0, code1;
10402 code0 = TREE_CODE (arg0);
10403 code1 = TREE_CODE (arg1);
10404 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10405 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10406 && operand_equal_p (TREE_OPERAND (arg0, 0),
10407 TREE_OPERAND (arg1, 0), 0)
10408 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10409 TYPE_UNSIGNED (rtype))
10410 /* Only create rotates in complete modes. Other cases are not
10411 expanded properly. */
10412 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10414 tree tree01, tree11;
10415 enum tree_code code01, code11;
10417 tree01 = TREE_OPERAND (arg0, 1);
10418 tree11 = TREE_OPERAND (arg1, 1);
10419 STRIP_NOPS (tree01);
10420 STRIP_NOPS (tree11);
10421 code01 = TREE_CODE (tree01);
10422 code11 = TREE_CODE (tree11);
10423 if (code01 == INTEGER_CST
10424 && code11 == INTEGER_CST
10425 && TREE_INT_CST_HIGH (tree01) == 0
10426 && TREE_INT_CST_HIGH (tree11) == 0
10427 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10428 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10430 tem = build2_loc (loc, LROTATE_EXPR,
10431 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10432 TREE_OPERAND (arg0, 0),
10433 code0 == LSHIFT_EXPR ? tree01 : tree11);
10434 return fold_convert_loc (loc, type, tem);
10436 else if (code11 == MINUS_EXPR)
10438 tree tree110, tree111;
10439 tree110 = TREE_OPERAND (tree11, 0);
10440 tree111 = TREE_OPERAND (tree11, 1);
10441 STRIP_NOPS (tree110);
10442 STRIP_NOPS (tree111);
10443 if (TREE_CODE (tree110) == INTEGER_CST
10444 && 0 == compare_tree_int (tree110,
10446 (TREE_TYPE (TREE_OPERAND
10448 && operand_equal_p (tree01, tree111, 0))
10450 fold_convert_loc (loc, type,
10451 build2 ((code0 == LSHIFT_EXPR
10454 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10455 TREE_OPERAND (arg0, 0), tree01));
10457 else if (code01 == MINUS_EXPR)
10459 tree tree010, tree011;
10460 tree010 = TREE_OPERAND (tree01, 0);
10461 tree011 = TREE_OPERAND (tree01, 1);
10462 STRIP_NOPS (tree010);
10463 STRIP_NOPS (tree011);
10464 if (TREE_CODE (tree010) == INTEGER_CST
10465 && 0 == compare_tree_int (tree010,
10467 (TREE_TYPE (TREE_OPERAND
10469 && operand_equal_p (tree11, tree011, 0))
10470 return fold_convert_loc
10472 build2 ((code0 != LSHIFT_EXPR
10475 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10476 TREE_OPERAND (arg0, 0), tree11));
10482 /* In most languages, can't associate operations on floats through
10483 parentheses. Rather than remember where the parentheses were, we
10484 don't associate floats at all, unless the user has specified
10485 -fassociative-math.
10486 And, we need to make sure type is not saturating. */
10488 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10489 && !TYPE_SATURATING (type))
10491 tree var0, con0, lit0, minus_lit0;
10492 tree var1, con1, lit1, minus_lit1;
10496 /* Split both trees into variables, constants, and literals. Then
10497 associate each group together, the constants with literals,
10498 then the result with variables. This increases the chances of
10499 literals being recombined later and of generating relocatable
10500 expressions for the sum of a constant and literal. */
10501 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10502 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10503 code == MINUS_EXPR);
10505 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10506 if (code == MINUS_EXPR)
10509 /* With undefined overflow prefer doing association in a type
10510 which wraps on overflow, if that is one of the operand types. */
10511 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10512 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10514 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10515 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10516 atype = TREE_TYPE (arg0);
10517 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10518 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10519 atype = TREE_TYPE (arg1);
10520 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10523 /* With undefined overflow we can only associate constants with one
10524 variable, and constants whose association doesn't overflow. */
10525 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10526 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10533 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10534 tmp0 = TREE_OPERAND (tmp0, 0);
10535 if (CONVERT_EXPR_P (tmp0)
10536 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10537 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10538 <= TYPE_PRECISION (atype)))
10539 tmp0 = TREE_OPERAND (tmp0, 0);
10540 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10541 tmp1 = TREE_OPERAND (tmp1, 0);
10542 if (CONVERT_EXPR_P (tmp1)
10543 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10544 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10545 <= TYPE_PRECISION (atype)))
10546 tmp1 = TREE_OPERAND (tmp1, 0);
10547 /* The only case we can still associate with two variables
10548 is if they are the same, modulo negation and bit-pattern
10549 preserving conversions. */
10550 if (!operand_equal_p (tmp0, tmp1, 0))
10555 /* Only do something if we found more than two objects. Otherwise,
10556 nothing has changed and we risk infinite recursion. */
10558 && (2 < ((var0 != 0) + (var1 != 0)
10559 + (con0 != 0) + (con1 != 0)
10560 + (lit0 != 0) + (lit1 != 0)
10561 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10563 bool any_overflows = false;
10564 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10565 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10566 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10567 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10568 var0 = associate_trees (loc, var0, var1, code, atype);
10569 con0 = associate_trees (loc, con0, con1, code, atype);
10570 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10571 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10574 /* Preserve the MINUS_EXPR if the negative part of the literal is
10575 greater than the positive part. Otherwise, the multiplicative
10576 folding code (i.e extract_muldiv) may be fooled in case
10577 unsigned constants are subtracted, like in the following
10578 example: ((X*2 + 4) - 8U)/2. */
10579 if (minus_lit0 && lit0)
10581 if (TREE_CODE (lit0) == INTEGER_CST
10582 && TREE_CODE (minus_lit0) == INTEGER_CST
10583 && tree_int_cst_lt (lit0, minus_lit0))
10585 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10586 MINUS_EXPR, atype);
10591 lit0 = associate_trees (loc, lit0, minus_lit0,
10592 MINUS_EXPR, atype);
10597 /* Don't introduce overflows through reassociation. */
10599 && ((lit0 && TREE_OVERFLOW (lit0))
10600 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10607 fold_convert_loc (loc, type,
10608 associate_trees (loc, var0, minus_lit0,
10609 MINUS_EXPR, atype));
10612 con0 = associate_trees (loc, con0, minus_lit0,
10613 MINUS_EXPR, atype);
10615 fold_convert_loc (loc, type,
10616 associate_trees (loc, var0, con0,
10617 PLUS_EXPR, atype));
10621 con0 = associate_trees (loc, con0, lit0, code, atype);
10623 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10631 /* Pointer simplifications for subtraction, simple reassociations. */
10632 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10634 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10635 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10636 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10638 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10639 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10640 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10641 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10642 return fold_build2_loc (loc, PLUS_EXPR, type,
10643 fold_build2_loc (loc, MINUS_EXPR, type,
10645 fold_build2_loc (loc, MINUS_EXPR, type,
10648 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10649 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10651 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10652 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10653 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10654 fold_convert_loc (loc, type, arg1));
10656 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10659 /* A - (-B) -> A + B */
10660 if (TREE_CODE (arg1) == NEGATE_EXPR)
10661 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10662 fold_convert_loc (loc, type,
10663 TREE_OPERAND (arg1, 0)));
10664 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10665 if (TREE_CODE (arg0) == NEGATE_EXPR
10666 && (FLOAT_TYPE_P (type)
10667 || INTEGRAL_TYPE_P (type))
10668 && negate_expr_p (arg1)
10669 && reorder_operands_p (arg0, arg1))
10670 return fold_build2_loc (loc, MINUS_EXPR, type,
10671 fold_convert_loc (loc, type,
10672 negate_expr (arg1)),
10673 fold_convert_loc (loc, type,
10674 TREE_OPERAND (arg0, 0)));
10675 /* Convert -A - 1 to ~A. */
10676 if (INTEGRAL_TYPE_P (type)
10677 && TREE_CODE (arg0) == NEGATE_EXPR
10678 && integer_onep (arg1)
10679 && !TYPE_OVERFLOW_TRAPS (type))
10680 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10681 fold_convert_loc (loc, type,
10682 TREE_OPERAND (arg0, 0)));
10684 /* Convert -1 - A to ~A. */
10685 if (INTEGRAL_TYPE_P (type)
10686 && integer_all_onesp (arg0))
10687 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10690 /* X - (X / CST) * CST is X % CST. */
10691 if (INTEGRAL_TYPE_P (type)
10692 && TREE_CODE (arg1) == MULT_EXPR
10693 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10694 && operand_equal_p (arg0,
10695 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10696 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10697 TREE_OPERAND (arg1, 1), 0))
10699 fold_convert_loc (loc, type,
10700 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10701 arg0, TREE_OPERAND (arg1, 1)));
10703 if (! FLOAT_TYPE_P (type))
10705 if (integer_zerop (arg0))
10706 return negate_expr (fold_convert_loc (loc, type, arg1));
10707 if (integer_zerop (arg1))
10708 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10710 /* Fold A - (A & B) into ~B & A. */
10711 if (!TREE_SIDE_EFFECTS (arg0)
10712 && TREE_CODE (arg1) == BIT_AND_EXPR)
10714 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10716 tree arg10 = fold_convert_loc (loc, type,
10717 TREE_OPERAND (arg1, 0));
10718 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10719 fold_build1_loc (loc, BIT_NOT_EXPR,
10721 fold_convert_loc (loc, type, arg0));
10723 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10725 tree arg11 = fold_convert_loc (loc,
10726 type, TREE_OPERAND (arg1, 1));
10727 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10728 fold_build1_loc (loc, BIT_NOT_EXPR,
10730 fold_convert_loc (loc, type, arg0));
10734 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10735 any power of 2 minus 1. */
10736 if (TREE_CODE (arg0) == BIT_AND_EXPR
10737 && TREE_CODE (arg1) == BIT_AND_EXPR
10738 && operand_equal_p (TREE_OPERAND (arg0, 0),
10739 TREE_OPERAND (arg1, 0), 0))
10741 tree mask0 = TREE_OPERAND (arg0, 1);
10742 tree mask1 = TREE_OPERAND (arg1, 1);
10743 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10745 if (operand_equal_p (tem, mask1, 0))
10747 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10748 TREE_OPERAND (arg0, 0), mask1);
10749 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10754 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10755 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10756 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10758 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10759 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10760 (-ARG1 + ARG0) reduces to -ARG1. */
10761 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10762 return negate_expr (fold_convert_loc (loc, type, arg1));
10764 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10765 __complex__ ( x, -y ). This is not the same for SNaNs or if
10766 signed zeros are involved. */
10767 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10768 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10769 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10771 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10772 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10773 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10774 bool arg0rz = false, arg0iz = false;
10775 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10776 || (arg0i && (arg0iz = real_zerop (arg0i))))
10778 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10779 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10780 if (arg0rz && arg1i && real_zerop (arg1i))
10782 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10784 : build1 (REALPART_EXPR, rtype, arg1));
10785 tree ip = arg0i ? arg0i
10786 : build1 (IMAGPART_EXPR, rtype, arg0);
10787 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10789 else if (arg0iz && arg1r && real_zerop (arg1r))
10791 tree rp = arg0r ? arg0r
10792 : build1 (REALPART_EXPR, rtype, arg0);
10793 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10795 : build1 (IMAGPART_EXPR, rtype, arg1));
10796 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10801 /* Fold &x - &x. This can happen from &x.foo - &x.
10802 This is unsafe for certain floats even in non-IEEE formats.
10803 In IEEE, it is unsafe because it does wrong for NaNs.
10804 Also note that operand_equal_p is always false if an operand
10807 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10808 && operand_equal_p (arg0, arg1, 0))
10809 return build_zero_cst (type);
10811 /* A - B -> A + (-B) if B is easily negatable. */
10812 if (negate_expr_p (arg1)
10813 && ((FLOAT_TYPE_P (type)
10814 /* Avoid this transformation if B is a positive REAL_CST. */
10815 && (TREE_CODE (arg1) != REAL_CST
10816 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10817 || INTEGRAL_TYPE_P (type)))
10818 return fold_build2_loc (loc, PLUS_EXPR, type,
10819 fold_convert_loc (loc, type, arg0),
10820 fold_convert_loc (loc, type,
10821 negate_expr (arg1)));
10823 /* Try folding difference of addresses. */
10825 HOST_WIDE_INT diff;
10827 if ((TREE_CODE (arg0) == ADDR_EXPR
10828 || TREE_CODE (arg1) == ADDR_EXPR)
10829 && ptr_difference_const (arg0, arg1, &diff))
10830 return build_int_cst_type (type, diff);
10833 /* Fold &a[i] - &a[j] to i-j. */
10834 if (TREE_CODE (arg0) == ADDR_EXPR
10835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10836 && TREE_CODE (arg1) == ADDR_EXPR
10837 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10839 tree tem = fold_addr_of_array_ref_difference (loc, type,
10840 TREE_OPERAND (arg0, 0),
10841 TREE_OPERAND (arg1, 0));
10846 if (FLOAT_TYPE_P (type)
10847 && flag_unsafe_math_optimizations
10848 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10849 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10850 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10853 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10854 one. Make sure the type is not saturating and has the signedness of
10855 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10856 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10857 if ((TREE_CODE (arg0) == MULT_EXPR
10858 || TREE_CODE (arg1) == MULT_EXPR)
10859 && !TYPE_SATURATING (type)
10860 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10861 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10862 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10864 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10872 /* (-A) * (-B) -> A * B */
10873 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10874 return fold_build2_loc (loc, MULT_EXPR, type,
10875 fold_convert_loc (loc, type,
10876 TREE_OPERAND (arg0, 0)),
10877 fold_convert_loc (loc, type,
10878 negate_expr (arg1)));
10879 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10880 return fold_build2_loc (loc, MULT_EXPR, type,
10881 fold_convert_loc (loc, type,
10882 negate_expr (arg0)),
10883 fold_convert_loc (loc, type,
10884 TREE_OPERAND (arg1, 0)));
10886 if (! FLOAT_TYPE_P (type))
10888 if (integer_zerop (arg1))
10889 return omit_one_operand_loc (loc, type, arg1, arg0);
10890 if (integer_onep (arg1))
10891 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10892 /* Transform x * -1 into -x. Make sure to do the negation
10893 on the original operand with conversions not stripped
10894 because we can only strip non-sign-changing conversions. */
10895 if (integer_all_onesp (arg1))
10896 return fold_convert_loc (loc, type, negate_expr (op0));
10897 /* Transform x * -C into -x * C if x is easily negatable. */
10898 if (TREE_CODE (arg1) == INTEGER_CST
10899 && tree_int_cst_sgn (arg1) == -1
10900 && negate_expr_p (arg0)
10901 && (tem = negate_expr (arg1)) != arg1
10902 && !TREE_OVERFLOW (tem))
10903 return fold_build2_loc (loc, MULT_EXPR, type,
10904 fold_convert_loc (loc, type,
10905 negate_expr (arg0)),
10908 /* (a * (1 << b)) is (a << b) */
10909 if (TREE_CODE (arg1) == LSHIFT_EXPR
10910 && integer_onep (TREE_OPERAND (arg1, 0)))
10911 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10912 TREE_OPERAND (arg1, 1));
10913 if (TREE_CODE (arg0) == LSHIFT_EXPR
10914 && integer_onep (TREE_OPERAND (arg0, 0)))
10915 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10916 TREE_OPERAND (arg0, 1));
10918 /* (A + A) * C -> A * 2 * C */
10919 if (TREE_CODE (arg0) == PLUS_EXPR
10920 && TREE_CODE (arg1) == INTEGER_CST
10921 && operand_equal_p (TREE_OPERAND (arg0, 0),
10922 TREE_OPERAND (arg0, 1), 0))
10923 return fold_build2_loc (loc, MULT_EXPR, type,
10924 omit_one_operand_loc (loc, type,
10925 TREE_OPERAND (arg0, 0),
10926 TREE_OPERAND (arg0, 1)),
10927 fold_build2_loc (loc, MULT_EXPR, type,
10928 build_int_cst (type, 2) , arg1));
10930 strict_overflow_p = false;
10931 if (TREE_CODE (arg1) == INTEGER_CST
10932 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10933 &strict_overflow_p)))
10935 if (strict_overflow_p)
10936 fold_overflow_warning (("assuming signed overflow does not "
10937 "occur when simplifying "
10939 WARN_STRICT_OVERFLOW_MISC);
10940 return fold_convert_loc (loc, type, tem);
10943 /* Optimize z * conj(z) for integer complex numbers. */
10944 if (TREE_CODE (arg0) == CONJ_EXPR
10945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10946 return fold_mult_zconjz (loc, type, arg1);
10947 if (TREE_CODE (arg1) == CONJ_EXPR
10948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10949 return fold_mult_zconjz (loc, type, arg0);
10953 /* Maybe fold x * 0 to 0. The expressions aren't the same
10954 when x is NaN, since x * 0 is also NaN. Nor are they the
10955 same in modes with signed zeros, since multiplying a
10956 negative value by 0 gives -0, not +0. */
10957 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10958 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10959 && real_zerop (arg1))
10960 return omit_one_operand_loc (loc, type, arg1, arg0);
10961 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10962 Likewise for complex arithmetic with signed zeros. */
10963 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10964 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10965 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10966 && real_onep (arg1))
10967 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10969 /* Transform x * -1.0 into -x. */
10970 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10971 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10972 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10973 && real_minus_onep (arg1))
10974 return fold_convert_loc (loc, type, negate_expr (arg0));
10976 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10977 the result for floating point types due to rounding so it is applied
10978 only if -fassociative-math was specify. */
10979 if (flag_associative_math
10980 && TREE_CODE (arg0) == RDIV_EXPR
10981 && TREE_CODE (arg1) == REAL_CST
10982 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10984 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10987 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10988 TREE_OPERAND (arg0, 1));
10991 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10992 if (operand_equal_p (arg0, arg1, 0))
10994 tree tem = fold_strip_sign_ops (arg0);
10995 if (tem != NULL_TREE)
10997 tem = fold_convert_loc (loc, type, tem);
10998 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11002 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11003 This is not the same for NaNs or if signed zeros are
11005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11006 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11007 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11008 && TREE_CODE (arg1) == COMPLEX_CST
11009 && real_zerop (TREE_REALPART (arg1)))
11011 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11012 if (real_onep (TREE_IMAGPART (arg1)))
11014 fold_build2_loc (loc, COMPLEX_EXPR, type,
11015 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11017 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11018 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11020 fold_build2_loc (loc, COMPLEX_EXPR, type,
11021 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11022 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11026 /* Optimize z * conj(z) for floating point complex numbers.
11027 Guarded by flag_unsafe_math_optimizations as non-finite
11028 imaginary components don't produce scalar results. */
11029 if (flag_unsafe_math_optimizations
11030 && TREE_CODE (arg0) == CONJ_EXPR
11031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11032 return fold_mult_zconjz (loc, type, arg1);
11033 if (flag_unsafe_math_optimizations
11034 && TREE_CODE (arg1) == CONJ_EXPR
11035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11036 return fold_mult_zconjz (loc, type, arg0);
11038 if (flag_unsafe_math_optimizations)
11040 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11041 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11043 /* Optimizations of root(...)*root(...). */
11044 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11047 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11048 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11050 /* Optimize sqrt(x)*sqrt(x) as x. */
11051 if (BUILTIN_SQRT_P (fcode0)
11052 && operand_equal_p (arg00, arg10, 0)
11053 && ! HONOR_SNANS (TYPE_MODE (type)))
11056 /* Optimize root(x)*root(y) as root(x*y). */
11057 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11058 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11059 return build_call_expr_loc (loc, rootfn, 1, arg);
11062 /* Optimize expN(x)*expN(y) as expN(x+y). */
11063 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11065 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11066 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11067 CALL_EXPR_ARG (arg0, 0),
11068 CALL_EXPR_ARG (arg1, 0));
11069 return build_call_expr_loc (loc, expfn, 1, arg);
11072 /* Optimizations of pow(...)*pow(...). */
11073 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11074 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11075 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11077 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11078 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11079 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11080 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11082 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11083 if (operand_equal_p (arg01, arg11, 0))
11085 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11086 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11088 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11091 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11092 if (operand_equal_p (arg00, arg10, 0))
11094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11095 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11097 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11101 /* Optimize tan(x)*cos(x) as sin(x). */
11102 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11103 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11104 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11105 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11106 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11107 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11108 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11109 CALL_EXPR_ARG (arg1, 0), 0))
11111 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11113 if (sinfn != NULL_TREE)
11114 return build_call_expr_loc (loc, sinfn, 1,
11115 CALL_EXPR_ARG (arg0, 0));
11118 /* Optimize x*pow(x,c) as pow(x,c+1). */
11119 if (fcode1 == BUILT_IN_POW
11120 || fcode1 == BUILT_IN_POWF
11121 || fcode1 == BUILT_IN_POWL)
11123 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11124 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11125 if (TREE_CODE (arg11) == REAL_CST
11126 && !TREE_OVERFLOW (arg11)
11127 && operand_equal_p (arg0, arg10, 0))
11129 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11133 c = TREE_REAL_CST (arg11);
11134 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11135 arg = build_real (type, c);
11136 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11140 /* Optimize pow(x,c)*x as pow(x,c+1). */
11141 if (fcode0 == BUILT_IN_POW
11142 || fcode0 == BUILT_IN_POWF
11143 || fcode0 == BUILT_IN_POWL)
11145 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11146 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11147 if (TREE_CODE (arg01) == REAL_CST
11148 && !TREE_OVERFLOW (arg01)
11149 && operand_equal_p (arg1, arg00, 0))
11151 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11155 c = TREE_REAL_CST (arg01);
11156 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11157 arg = build_real (type, c);
11158 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11162 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11163 if (!in_gimple_form
11165 && operand_equal_p (arg0, arg1, 0))
11167 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11171 tree arg = build_real (type, dconst2);
11172 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11181 if (integer_all_onesp (arg1))
11182 return omit_one_operand_loc (loc, type, arg1, arg0);
11183 if (integer_zerop (arg1))
11184 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11185 if (operand_equal_p (arg0, arg1, 0))
11186 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11188 /* ~X | X is -1. */
11189 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11190 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11192 t1 = build_zero_cst (type);
11193 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11194 return omit_one_operand_loc (loc, type, t1, arg1);
11197 /* X | ~X is -1. */
11198 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11199 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11201 t1 = build_zero_cst (type);
11202 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11203 return omit_one_operand_loc (loc, type, t1, arg0);
11206 /* Canonicalize (X & C1) | C2. */
11207 if (TREE_CODE (arg0) == BIT_AND_EXPR
11208 && TREE_CODE (arg1) == INTEGER_CST
11209 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11211 double_int c1, c2, c3, msk;
11212 int width = TYPE_PRECISION (type), w;
11213 bool try_simplify = true;
11215 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11216 c2 = tree_to_double_int (arg1);
11218 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11219 if ((c1 & c2) == c1)
11220 return omit_one_operand_loc (loc, type, arg1,
11221 TREE_OPERAND (arg0, 0));
11223 msk = double_int::mask (width);
11225 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11226 if (msk.and_not (c1 | c2).is_zero ())
11227 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11228 TREE_OPERAND (arg0, 0), arg1);
11230 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11231 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11232 mode which allows further optimizations. */
11235 c3 = c1.and_not (c2);
11236 for (w = BITS_PER_UNIT;
11237 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11240 unsigned HOST_WIDE_INT mask
11241 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11242 if (((c1.low | c2.low) & mask) == mask
11243 && (c1.low & ~mask) == 0 && c1.high == 0)
11245 c3 = double_int::from_uhwi (mask);
11250 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11251 with that optimization from the BIT_AND_EXPR optimizations.
11252 This could end up in an infinite recursion. */
11253 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11254 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11257 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11258 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11260 try_simplify = (masked != c1);
11263 if (try_simplify && c3 != c1)
11264 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11265 fold_build2_loc (loc, BIT_AND_EXPR, type,
11266 TREE_OPERAND (arg0, 0),
11267 double_int_to_tree (type,
11272 /* (X & Y) | Y is (X, Y). */
11273 if (TREE_CODE (arg0) == BIT_AND_EXPR
11274 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11275 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11276 /* (X & Y) | X is (Y, X). */
11277 if (TREE_CODE (arg0) == BIT_AND_EXPR
11278 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11279 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11280 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11281 /* X | (X & Y) is (Y, X). */
11282 if (TREE_CODE (arg1) == BIT_AND_EXPR
11283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11284 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11285 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11286 /* X | (Y & X) is (Y, X). */
11287 if (TREE_CODE (arg1) == BIT_AND_EXPR
11288 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11289 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11290 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11292 /* (X & ~Y) | (~X & Y) is X ^ Y */
11293 if (TREE_CODE (arg0) == BIT_AND_EXPR
11294 && TREE_CODE (arg1) == BIT_AND_EXPR)
11296 tree a0, a1, l0, l1, n0, n1;
11298 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11299 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11301 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11302 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11304 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11305 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11307 if ((operand_equal_p (n0, a0, 0)
11308 && operand_equal_p (n1, a1, 0))
11309 || (operand_equal_p (n0, a1, 0)
11310 && operand_equal_p (n1, a0, 0)))
11311 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11314 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11315 if (t1 != NULL_TREE)
11318 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11320 This results in more efficient code for machines without a NAND
11321 instruction. Combine will canonicalize to the first form
11322 which will allow use of NAND instructions provided by the
11323 backend if they exist. */
11324 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11325 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11328 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11329 build2 (BIT_AND_EXPR, type,
11330 fold_convert_loc (loc, type,
11331 TREE_OPERAND (arg0, 0)),
11332 fold_convert_loc (loc, type,
11333 TREE_OPERAND (arg1, 0))));
11336 /* See if this can be simplified into a rotate first. If that
11337 is unsuccessful continue in the association code. */
11341 if (integer_zerop (arg1))
11342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11343 if (integer_all_onesp (arg1))
11344 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11345 if (operand_equal_p (arg0, arg1, 0))
11346 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11348 /* ~X ^ X is -1. */
11349 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11352 t1 = build_zero_cst (type);
11353 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11354 return omit_one_operand_loc (loc, type, t1, arg1);
11357 /* X ^ ~X is -1. */
11358 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11359 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11361 t1 = build_zero_cst (type);
11362 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11363 return omit_one_operand_loc (loc, type, t1, arg0);
11366 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11367 with a constant, and the two constants have no bits in common,
11368 we should treat this as a BIT_IOR_EXPR since this may produce more
11369 simplifications. */
11370 if (TREE_CODE (arg0) == BIT_AND_EXPR
11371 && TREE_CODE (arg1) == BIT_AND_EXPR
11372 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11373 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11374 && integer_zerop (const_binop (BIT_AND_EXPR,
11375 TREE_OPERAND (arg0, 1),
11376 TREE_OPERAND (arg1, 1))))
11378 code = BIT_IOR_EXPR;
11382 /* (X | Y) ^ X -> Y & ~ X*/
11383 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11386 tree t2 = TREE_OPERAND (arg0, 1);
11387 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11389 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11390 fold_convert_loc (loc, type, t2),
11391 fold_convert_loc (loc, type, t1));
11395 /* (Y | X) ^ X -> Y & ~ X*/
11396 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11399 tree t2 = TREE_OPERAND (arg0, 0);
11400 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11402 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11403 fold_convert_loc (loc, type, t2),
11404 fold_convert_loc (loc, type, t1));
11408 /* X ^ (X | Y) -> Y & ~ X*/
11409 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11412 tree t2 = TREE_OPERAND (arg1, 1);
11413 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11415 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11416 fold_convert_loc (loc, type, t2),
11417 fold_convert_loc (loc, type, t1));
11421 /* X ^ (Y | X) -> Y & ~ X*/
11422 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11423 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11425 tree t2 = TREE_OPERAND (arg1, 0);
11426 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11428 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11429 fold_convert_loc (loc, type, t2),
11430 fold_convert_loc (loc, type, t1));
11434 /* Convert ~X ^ ~Y to X ^ Y. */
11435 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11436 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11437 return fold_build2_loc (loc, code, type,
11438 fold_convert_loc (loc, type,
11439 TREE_OPERAND (arg0, 0)),
11440 fold_convert_loc (loc, type,
11441 TREE_OPERAND (arg1, 0)));
11443 /* Convert ~X ^ C to X ^ ~C. */
11444 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11445 && TREE_CODE (arg1) == INTEGER_CST)
11446 return fold_build2_loc (loc, code, type,
11447 fold_convert_loc (loc, type,
11448 TREE_OPERAND (arg0, 0)),
11449 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11451 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11452 if (TREE_CODE (arg0) == BIT_AND_EXPR
11453 && integer_onep (TREE_OPERAND (arg0, 1))
11454 && integer_onep (arg1))
11455 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11456 build_zero_cst (TREE_TYPE (arg0)));
11458 /* Fold (X & Y) ^ Y as ~X & Y. */
11459 if (TREE_CODE (arg0) == BIT_AND_EXPR
11460 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11462 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11463 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11464 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11465 fold_convert_loc (loc, type, arg1));
11467 /* Fold (X & Y) ^ X as ~Y & X. */
11468 if (TREE_CODE (arg0) == BIT_AND_EXPR
11469 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11470 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11472 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11473 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11474 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11475 fold_convert_loc (loc, type, arg1));
11477 /* Fold X ^ (X & Y) as X & ~Y. */
11478 if (TREE_CODE (arg1) == BIT_AND_EXPR
11479 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11481 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11482 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11483 fold_convert_loc (loc, type, arg0),
11484 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11486 /* Fold X ^ (Y & X) as ~Y & X. */
11487 if (TREE_CODE (arg1) == BIT_AND_EXPR
11488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11489 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11491 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11492 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11493 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11494 fold_convert_loc (loc, type, arg0));
11497 /* See if this can be simplified into a rotate first. If that
11498 is unsuccessful continue in the association code. */
11502 if (integer_all_onesp (arg1))
11503 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11504 if (integer_zerop (arg1))
11505 return omit_one_operand_loc (loc, type, arg1, arg0);
11506 if (operand_equal_p (arg0, arg1, 0))
11507 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11509 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11510 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11511 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11512 || (TREE_CODE (arg0) == EQ_EXPR
11513 && integer_zerop (TREE_OPERAND (arg0, 1))))
11514 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11515 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11517 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11518 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11519 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11520 || (TREE_CODE (arg1) == EQ_EXPR
11521 && integer_zerop (TREE_OPERAND (arg1, 1))))
11522 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11523 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11525 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11526 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11527 && TREE_CODE (arg1) == INTEGER_CST
11528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11530 tree tmp1 = fold_convert_loc (loc, type, arg1);
11531 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11532 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11533 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11534 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11536 fold_convert_loc (loc, type,
11537 fold_build2_loc (loc, BIT_IOR_EXPR,
11538 type, tmp2, tmp3));
11541 /* (X | Y) & Y is (X, Y). */
11542 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11543 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11544 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11545 /* (X | Y) & X is (Y, X). */
11546 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11547 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11548 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11549 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11550 /* X & (X | Y) is (Y, X). */
11551 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11552 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11553 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11554 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11555 /* X & (Y | X) is (Y, X). */
11556 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11557 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11558 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11559 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11561 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11562 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11563 && integer_onep (TREE_OPERAND (arg0, 1))
11564 && integer_onep (arg1))
11567 tem = TREE_OPERAND (arg0, 0);
11568 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11569 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11571 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11572 build_zero_cst (TREE_TYPE (tem)));
11574 /* Fold ~X & 1 as (X & 1) == 0. */
11575 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11576 && integer_onep (arg1))
11579 tem = TREE_OPERAND (arg0, 0);
11580 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11581 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11583 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11584 build_zero_cst (TREE_TYPE (tem)));
11586 /* Fold !X & 1 as X == 0. */
11587 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11588 && integer_onep (arg1))
11590 tem = TREE_OPERAND (arg0, 0);
11591 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11592 build_zero_cst (TREE_TYPE (tem)));
11595 /* Fold (X ^ Y) & Y as ~X & Y. */
11596 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11597 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11599 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11600 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11601 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11602 fold_convert_loc (loc, type, arg1));
11604 /* Fold (X ^ Y) & X as ~Y & X. */
11605 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11607 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11609 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11610 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11611 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11612 fold_convert_loc (loc, type, arg1));
11614 /* Fold X & (X ^ Y) as X & ~Y. */
11615 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11616 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11618 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11619 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11620 fold_convert_loc (loc, type, arg0),
11621 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11623 /* Fold X & (Y ^ X) as ~Y & X. */
11624 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11625 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11626 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11628 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11629 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11630 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11631 fold_convert_loc (loc, type, arg0));
11634 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11635 multiple of 1 << CST. */
11636 if (TREE_CODE (arg1) == INTEGER_CST)
11638 double_int cst1 = tree_to_double_int (arg1);
11639 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11640 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11641 if ((cst1 & ncst1) == ncst1
11642 && multiple_of_p (type, arg0,
11643 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11644 return fold_convert_loc (loc, type, arg0);
11647 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11649 if (TREE_CODE (arg1) == INTEGER_CST
11650 && TREE_CODE (arg0) == MULT_EXPR
11651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11654 = mask_with_tz (type, tree_to_double_int (arg1),
11655 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11657 if (masked.is_zero ())
11658 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11660 else if (masked != tree_to_double_int (arg1))
11661 return fold_build2_loc (loc, code, type, op0,
11662 double_int_to_tree (type, masked));
11665 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11666 ((A & N) + B) & M -> (A + B) & M
11667 Similarly if (N & M) == 0,
11668 ((A | N) + B) & M -> (A + B) & M
11669 and for - instead of + (or unary - instead of +)
11670 and/or ^ instead of |.
11671 If B is constant and (B & M) == 0, fold into A & M. */
11672 if (host_integerp (arg1, 1))
11674 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11675 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11676 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11677 && (TREE_CODE (arg0) == PLUS_EXPR
11678 || TREE_CODE (arg0) == MINUS_EXPR
11679 || TREE_CODE (arg0) == NEGATE_EXPR)
11680 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11681 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11685 unsigned HOST_WIDE_INT cst0;
11687 /* Now we know that arg0 is (C + D) or (C - D) or
11688 -C and arg1 (M) is == (1LL << cst) - 1.
11689 Store C into PMOP[0] and D into PMOP[1]. */
11690 pmop[0] = TREE_OPERAND (arg0, 0);
11692 if (TREE_CODE (arg0) != NEGATE_EXPR)
11694 pmop[1] = TREE_OPERAND (arg0, 1);
11698 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11699 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11703 for (; which >= 0; which--)
11704 switch (TREE_CODE (pmop[which]))
11709 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11712 /* tree_low_cst not used, because we don't care about
11714 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11716 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11721 else if (cst0 != 0)
11723 /* If C or D is of the form (A & N) where
11724 (N & M) == M, or of the form (A | N) or
11725 (A ^ N) where (N & M) == 0, replace it with A. */
11726 pmop[which] = TREE_OPERAND (pmop[which], 0);
11729 /* If C or D is a N where (N & M) == 0, it can be
11730 omitted (assumed 0). */
11731 if ((TREE_CODE (arg0) == PLUS_EXPR
11732 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11733 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11734 pmop[which] = NULL;
11740 /* Only build anything new if we optimized one or both arguments
11742 if (pmop[0] != TREE_OPERAND (arg0, 0)
11743 || (TREE_CODE (arg0) != NEGATE_EXPR
11744 && pmop[1] != TREE_OPERAND (arg0, 1)))
11746 tree utype = TREE_TYPE (arg0);
11747 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11749 /* Perform the operations in a type that has defined
11750 overflow behavior. */
11751 utype = unsigned_type_for (TREE_TYPE (arg0));
11752 if (pmop[0] != NULL)
11753 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11754 if (pmop[1] != NULL)
11755 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11758 if (TREE_CODE (arg0) == NEGATE_EXPR)
11759 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11760 else if (TREE_CODE (arg0) == PLUS_EXPR)
11762 if (pmop[0] != NULL && pmop[1] != NULL)
11763 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11765 else if (pmop[0] != NULL)
11767 else if (pmop[1] != NULL)
11770 return build_int_cst (type, 0);
11772 else if (pmop[0] == NULL)
11773 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11775 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11777 /* TEM is now the new binary +, - or unary - replacement. */
11778 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11779 fold_convert_loc (loc, utype, arg1));
11780 return fold_convert_loc (loc, type, tem);
11785 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11786 if (t1 != NULL_TREE)
11788 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11789 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11790 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11793 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11795 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11796 && (~TREE_INT_CST_LOW (arg1)
11797 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11799 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11802 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11804 This results in more efficient code for machines without a NOR
11805 instruction. Combine will canonicalize to the first form
11806 which will allow use of NOR instructions provided by the
11807 backend if they exist. */
11808 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11809 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11811 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11812 build2 (BIT_IOR_EXPR, type,
11813 fold_convert_loc (loc, type,
11814 TREE_OPERAND (arg0, 0)),
11815 fold_convert_loc (loc, type,
11816 TREE_OPERAND (arg1, 0))));
11819 /* If arg0 is derived from the address of an object or function, we may
11820 be able to fold this expression using the object or function's
11822 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11824 unsigned HOST_WIDE_INT modulus, residue;
11825 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11827 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11828 integer_onep (arg1));
11830 /* This works because modulus is a power of 2. If this weren't the
11831 case, we'd have to replace it by its greatest power-of-2
11832 divisor: modulus & -modulus. */
11834 return build_int_cst (type, residue & low);
11837 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11838 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11839 if the new mask might be further optimized. */
11840 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11841 || TREE_CODE (arg0) == RSHIFT_EXPR)
11842 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11843 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11844 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11845 < TYPE_PRECISION (TREE_TYPE (arg0))
11846 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11847 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11849 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11850 unsigned HOST_WIDE_INT mask
11851 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11852 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11853 tree shift_type = TREE_TYPE (arg0);
11855 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11856 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11857 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11858 && TYPE_PRECISION (TREE_TYPE (arg0))
11859 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11861 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11862 tree arg00 = TREE_OPERAND (arg0, 0);
11863 /* See if more bits can be proven as zero because of
11865 if (TREE_CODE (arg00) == NOP_EXPR
11866 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11868 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11869 if (TYPE_PRECISION (inner_type)
11870 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11871 && TYPE_PRECISION (inner_type) < prec)
11873 prec = TYPE_PRECISION (inner_type);
11874 /* See if we can shorten the right shift. */
11876 shift_type = inner_type;
11879 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11880 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11881 zerobits <<= prec - shiftc;
11882 /* For arithmetic shift if sign bit could be set, zerobits
11883 can contain actually sign bits, so no transformation is
11884 possible, unless MASK masks them all away. In that
11885 case the shift needs to be converted into logical shift. */
11886 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11887 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11889 if ((mask & zerobits) == 0)
11890 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11896 /* ((X << 16) & 0xff00) is (X, 0). */
11897 if ((mask & zerobits) == mask)
11898 return omit_one_operand_loc (loc, type,
11899 build_int_cst (type, 0), arg0);
11901 newmask = mask | zerobits;
11902 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11906 /* Only do the transformation if NEWMASK is some integer
11908 for (prec = BITS_PER_UNIT;
11909 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11910 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11912 if (prec < HOST_BITS_PER_WIDE_INT
11913 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11917 if (shift_type != TREE_TYPE (arg0))
11919 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11920 fold_convert_loc (loc, shift_type,
11921 TREE_OPERAND (arg0, 0)),
11922 TREE_OPERAND (arg0, 1));
11923 tem = fold_convert_loc (loc, type, tem);
11927 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11928 if (!tree_int_cst_equal (newmaskt, arg1))
11929 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11937 /* Don't touch a floating-point divide by zero unless the mode
11938 of the constant can represent infinity. */
11939 if (TREE_CODE (arg1) == REAL_CST
11940 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11941 && real_zerop (arg1))
11944 /* Optimize A / A to 1.0 if we don't care about
11945 NaNs or Infinities. Skip the transformation
11946 for non-real operands. */
11947 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11948 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11949 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11950 && operand_equal_p (arg0, arg1, 0))
11952 tree r = build_real (TREE_TYPE (arg0), dconst1);
11954 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11957 /* The complex version of the above A / A optimization. */
11958 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11959 && operand_equal_p (arg0, arg1, 0))
11961 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11962 if (! HONOR_NANS (TYPE_MODE (elem_type))
11963 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11965 tree r = build_real (elem_type, dconst1);
11966 /* omit_two_operands will call fold_convert for us. */
11967 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11971 /* (-A) / (-B) -> A / B */
11972 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11973 return fold_build2_loc (loc, RDIV_EXPR, type,
11974 TREE_OPERAND (arg0, 0),
11975 negate_expr (arg1));
11976 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11977 return fold_build2_loc (loc, RDIV_EXPR, type,
11978 negate_expr (arg0),
11979 TREE_OPERAND (arg1, 0));
11981 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11982 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11983 && real_onep (arg1))
11984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11986 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11987 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11988 && real_minus_onep (arg1))
11989 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11990 negate_expr (arg0)));
11992 /* If ARG1 is a constant, we can convert this to a multiply by the
11993 reciprocal. This does not have the same rounding properties,
11994 so only do this if -freciprocal-math. We can actually
11995 always safely do it if ARG1 is a power of two, but it's hard to
11996 tell if it is or not in a portable manner. */
11998 && (TREE_CODE (arg1) == REAL_CST
11999 || (TREE_CODE (arg1) == COMPLEX_CST
12000 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12001 || (TREE_CODE (arg1) == VECTOR_CST
12002 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12004 if (flag_reciprocal_math
12005 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12006 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12007 /* Find the reciprocal if optimizing and the result is exact.
12008 TODO: Complex reciprocal not implemented. */
12009 if (TREE_CODE (arg1) != COMPLEX_CST)
12011 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12014 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12017 /* Convert A/B/C to A/(B*C). */
12018 if (flag_reciprocal_math
12019 && TREE_CODE (arg0) == RDIV_EXPR)
12020 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12021 fold_build2_loc (loc, MULT_EXPR, type,
12022 TREE_OPERAND (arg0, 1), arg1));
12024 /* Convert A/(B/C) to (A/B)*C. */
12025 if (flag_reciprocal_math
12026 && TREE_CODE (arg1) == RDIV_EXPR)
12027 return fold_build2_loc (loc, MULT_EXPR, type,
12028 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12029 TREE_OPERAND (arg1, 0)),
12030 TREE_OPERAND (arg1, 1));
12032 /* Convert C1/(X*C2) into (C1/C2)/X. */
12033 if (flag_reciprocal_math
12034 && TREE_CODE (arg1) == MULT_EXPR
12035 && TREE_CODE (arg0) == REAL_CST
12036 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12038 tree tem = const_binop (RDIV_EXPR, arg0,
12039 TREE_OPERAND (arg1, 1));
12041 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12042 TREE_OPERAND (arg1, 0));
12045 if (flag_unsafe_math_optimizations)
12047 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12048 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12050 /* Optimize sin(x)/cos(x) as tan(x). */
12051 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12052 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12053 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12054 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12055 CALL_EXPR_ARG (arg1, 0), 0))
12057 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12059 if (tanfn != NULL_TREE)
12060 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12063 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12064 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12065 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12066 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12067 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12068 CALL_EXPR_ARG (arg1, 0), 0))
12070 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12072 if (tanfn != NULL_TREE)
12074 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12075 CALL_EXPR_ARG (arg0, 0));
12076 return fold_build2_loc (loc, RDIV_EXPR, type,
12077 build_real (type, dconst1), tmp);
12081 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12082 NaNs or Infinities. */
12083 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12084 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12085 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12087 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12088 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12090 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12091 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12092 && operand_equal_p (arg00, arg01, 0))
12094 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12096 if (cosfn != NULL_TREE)
12097 return build_call_expr_loc (loc, cosfn, 1, arg00);
12101 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12102 NaNs or Infinities. */
12103 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12104 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12105 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12107 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12108 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12110 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12111 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12112 && operand_equal_p (arg00, arg01, 0))
12114 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12116 if (cosfn != NULL_TREE)
12118 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12119 return fold_build2_loc (loc, RDIV_EXPR, type,
12120 build_real (type, dconst1),
12126 /* Optimize pow(x,c)/x as pow(x,c-1). */
12127 if (fcode0 == BUILT_IN_POW
12128 || fcode0 == BUILT_IN_POWF
12129 || fcode0 == BUILT_IN_POWL)
12131 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12132 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12133 if (TREE_CODE (arg01) == REAL_CST
12134 && !TREE_OVERFLOW (arg01)
12135 && operand_equal_p (arg1, arg00, 0))
12137 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12141 c = TREE_REAL_CST (arg01);
12142 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12143 arg = build_real (type, c);
12144 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12148 /* Optimize a/root(b/c) into a*root(c/b). */
12149 if (BUILTIN_ROOT_P (fcode1))
12151 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12153 if (TREE_CODE (rootarg) == RDIV_EXPR)
12155 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12156 tree b = TREE_OPERAND (rootarg, 0);
12157 tree c = TREE_OPERAND (rootarg, 1);
12159 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12161 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12162 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12166 /* Optimize x/expN(y) into x*expN(-y). */
12167 if (BUILTIN_EXPONENT_P (fcode1))
12169 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12170 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12171 arg1 = build_call_expr_loc (loc,
12173 fold_convert_loc (loc, type, arg));
12174 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12177 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12178 if (fcode1 == BUILT_IN_POW
12179 || fcode1 == BUILT_IN_POWF
12180 || fcode1 == BUILT_IN_POWL)
12182 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12183 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12184 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12185 tree neg11 = fold_convert_loc (loc, type,
12186 negate_expr (arg11));
12187 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12188 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12193 case TRUNC_DIV_EXPR:
12194 /* Optimize (X & (-A)) / A where A is a power of 2,
12196 if (TREE_CODE (arg0) == BIT_AND_EXPR
12197 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12198 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12200 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12201 arg1, TREE_OPERAND (arg0, 1));
12202 if (sum && integer_zerop (sum)) {
12203 unsigned long pow2;
12205 if (TREE_INT_CST_LOW (arg1))
12206 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12208 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12209 + HOST_BITS_PER_WIDE_INT;
12211 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12212 TREE_OPERAND (arg0, 0),
12213 build_int_cst (integer_type_node, pow2));
12219 case FLOOR_DIV_EXPR:
12220 /* Simplify A / (B << N) where A and B are positive and B is
12221 a power of 2, to A >> (N + log2(B)). */
12222 strict_overflow_p = false;
12223 if (TREE_CODE (arg1) == LSHIFT_EXPR
12224 && (TYPE_UNSIGNED (type)
12225 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12227 tree sval = TREE_OPERAND (arg1, 0);
12228 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12230 tree sh_cnt = TREE_OPERAND (arg1, 1);
12231 unsigned long pow2;
12233 if (TREE_INT_CST_LOW (sval))
12234 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12236 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12237 + HOST_BITS_PER_WIDE_INT;
12239 if (strict_overflow_p)
12240 fold_overflow_warning (("assuming signed overflow does not "
12241 "occur when simplifying A / (B << N)"),
12242 WARN_STRICT_OVERFLOW_MISC);
12244 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12246 build_int_cst (TREE_TYPE (sh_cnt),
12248 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12249 fold_convert_loc (loc, type, arg0), sh_cnt);
12253 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12254 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12255 if (INTEGRAL_TYPE_P (type)
12256 && TYPE_UNSIGNED (type)
12257 && code == FLOOR_DIV_EXPR)
12258 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12262 case ROUND_DIV_EXPR:
12263 case CEIL_DIV_EXPR:
12264 case EXACT_DIV_EXPR:
12265 if (integer_onep (arg1))
12266 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12267 if (integer_zerop (arg1))
12269 /* X / -1 is -X. */
12270 if (!TYPE_UNSIGNED (type)
12271 && TREE_CODE (arg1) == INTEGER_CST
12272 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12273 && TREE_INT_CST_HIGH (arg1) == -1)
12274 return fold_convert_loc (loc, type, negate_expr (arg0));
12276 /* Convert -A / -B to A / B when the type is signed and overflow is
12278 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12279 && TREE_CODE (arg0) == NEGATE_EXPR
12280 && negate_expr_p (arg1))
12282 if (INTEGRAL_TYPE_P (type))
12283 fold_overflow_warning (("assuming signed overflow does not occur "
12284 "when distributing negation across "
12286 WARN_STRICT_OVERFLOW_MISC);
12287 return fold_build2_loc (loc, code, type,
12288 fold_convert_loc (loc, type,
12289 TREE_OPERAND (arg0, 0)),
12290 fold_convert_loc (loc, type,
12291 negate_expr (arg1)));
12293 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12294 && TREE_CODE (arg1) == NEGATE_EXPR
12295 && negate_expr_p (arg0))
12297 if (INTEGRAL_TYPE_P (type))
12298 fold_overflow_warning (("assuming signed overflow does not occur "
12299 "when distributing negation across "
12301 WARN_STRICT_OVERFLOW_MISC);
12302 return fold_build2_loc (loc, code, type,
12303 fold_convert_loc (loc, type,
12304 negate_expr (arg0)),
12305 fold_convert_loc (loc, type,
12306 TREE_OPERAND (arg1, 0)));
12309 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12310 operation, EXACT_DIV_EXPR.
12312 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12313 At one time others generated faster code, it's not clear if they do
12314 after the last round to changes to the DIV code in expmed.c. */
12315 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12316 && multiple_of_p (type, arg0, arg1))
12317 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12319 strict_overflow_p = false;
12320 if (TREE_CODE (arg1) == INTEGER_CST
12321 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12322 &strict_overflow_p)))
12324 if (strict_overflow_p)
12325 fold_overflow_warning (("assuming signed overflow does not occur "
12326 "when simplifying division"),
12327 WARN_STRICT_OVERFLOW_MISC);
12328 return fold_convert_loc (loc, type, tem);
12333 case CEIL_MOD_EXPR:
12334 case FLOOR_MOD_EXPR:
12335 case ROUND_MOD_EXPR:
12336 case TRUNC_MOD_EXPR:
12337 /* X % 1 is always zero, but be sure to preserve any side
12339 if (integer_onep (arg1))
12340 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12342 /* X % 0, return X % 0 unchanged so that we can get the
12343 proper warnings and errors. */
12344 if (integer_zerop (arg1))
12347 /* 0 % X is always zero, but be sure to preserve any side
12348 effects in X. Place this after checking for X == 0. */
12349 if (integer_zerop (arg0))
12350 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12352 /* X % -1 is zero. */
12353 if (!TYPE_UNSIGNED (type)
12354 && TREE_CODE (arg1) == INTEGER_CST
12355 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12356 && TREE_INT_CST_HIGH (arg1) == -1)
12357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12359 /* X % -C is the same as X % C. */
12360 if (code == TRUNC_MOD_EXPR
12361 && !TYPE_UNSIGNED (type)
12362 && TREE_CODE (arg1) == INTEGER_CST
12363 && !TREE_OVERFLOW (arg1)
12364 && TREE_INT_CST_HIGH (arg1) < 0
12365 && !TYPE_OVERFLOW_TRAPS (type)
12366 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12367 && !sign_bit_p (arg1, arg1))
12368 return fold_build2_loc (loc, code, type,
12369 fold_convert_loc (loc, type, arg0),
12370 fold_convert_loc (loc, type,
12371 negate_expr (arg1)));
12373 /* X % -Y is the same as X % Y. */
12374 if (code == TRUNC_MOD_EXPR
12375 && !TYPE_UNSIGNED (type)
12376 && TREE_CODE (arg1) == NEGATE_EXPR
12377 && !TYPE_OVERFLOW_TRAPS (type))
12378 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12379 fold_convert_loc (loc, type,
12380 TREE_OPERAND (arg1, 0)));
12382 strict_overflow_p = false;
12383 if (TREE_CODE (arg1) == INTEGER_CST
12384 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12385 &strict_overflow_p)))
12387 if (strict_overflow_p)
12388 fold_overflow_warning (("assuming signed overflow does not occur "
12389 "when simplifying modulus"),
12390 WARN_STRICT_OVERFLOW_MISC);
12391 return fold_convert_loc (loc, type, tem);
12394 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12395 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12396 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12397 && (TYPE_UNSIGNED (type)
12398 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12401 /* Also optimize A % (C << N) where C is a power of 2,
12402 to A & ((C << N) - 1). */
12403 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12404 c = TREE_OPERAND (arg1, 0);
12406 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12409 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12410 build_int_cst (TREE_TYPE (arg1), 1));
12411 if (strict_overflow_p)
12412 fold_overflow_warning (("assuming signed overflow does not "
12413 "occur when simplifying "
12414 "X % (power of two)"),
12415 WARN_STRICT_OVERFLOW_MISC);
12416 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12417 fold_convert_loc (loc, type, arg0),
12418 fold_convert_loc (loc, type, mask));
12426 if (integer_all_onesp (arg0))
12427 return omit_one_operand_loc (loc, type, arg0, arg1);
12431 /* Optimize -1 >> x for arithmetic right shifts. */
12432 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12433 && tree_expr_nonnegative_p (arg1))
12434 return omit_one_operand_loc (loc, type, arg0, arg1);
12435 /* ... fall through ... */
12439 if (integer_zerop (arg1))
12440 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12441 if (integer_zerop (arg0))
12442 return omit_one_operand_loc (loc, type, arg0, arg1);
12444 /* Since negative shift count is not well-defined,
12445 don't try to compute it in the compiler. */
12446 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12449 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12450 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12451 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12452 && host_integerp (TREE_OPERAND (arg0, 1), false)
12453 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12455 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12456 + TREE_INT_CST_LOW (arg1));
12458 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12459 being well defined. */
12460 if (low >= TYPE_PRECISION (type))
12462 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12463 low = low % TYPE_PRECISION (type);
12464 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12465 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12466 TREE_OPERAND (arg0, 0));
12468 low = TYPE_PRECISION (type) - 1;
12471 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12472 build_int_cst (type, low));
12475 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12476 into x & ((unsigned)-1 >> c) for unsigned types. */
12477 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12478 || (TYPE_UNSIGNED (type)
12479 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12480 && host_integerp (arg1, false)
12481 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12482 && host_integerp (TREE_OPERAND (arg0, 1), false)
12483 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12485 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12486 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12492 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12494 lshift = build_int_cst (type, -1);
12495 lshift = int_const_binop (code, lshift, arg1);
12497 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12501 /* Rewrite an LROTATE_EXPR by a constant into an
12502 RROTATE_EXPR by a new constant. */
12503 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12505 tree tem = build_int_cst (TREE_TYPE (arg1),
12506 TYPE_PRECISION (type));
12507 tem = const_binop (MINUS_EXPR, tem, arg1);
12508 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12511 /* If we have a rotate of a bit operation with the rotate count and
12512 the second operand of the bit operation both constant,
12513 permute the two operations. */
12514 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12515 && (TREE_CODE (arg0) == BIT_AND_EXPR
12516 || TREE_CODE (arg0) == BIT_IOR_EXPR
12517 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12519 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12520 fold_build2_loc (loc, code, type,
12521 TREE_OPERAND (arg0, 0), arg1),
12522 fold_build2_loc (loc, code, type,
12523 TREE_OPERAND (arg0, 1), arg1));
12525 /* Two consecutive rotates adding up to the precision of the
12526 type can be ignored. */
12527 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12528 && TREE_CODE (arg0) == RROTATE_EXPR
12529 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12530 && TREE_INT_CST_HIGH (arg1) == 0
12531 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12532 && ((TREE_INT_CST_LOW (arg1)
12533 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12534 == (unsigned int) TYPE_PRECISION (type)))
12535 return TREE_OPERAND (arg0, 0);
12537 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12538 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12539 if the latter can be further optimized. */
12540 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12541 && TREE_CODE (arg0) == BIT_AND_EXPR
12542 && TREE_CODE (arg1) == INTEGER_CST
12543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12545 tree mask = fold_build2_loc (loc, code, type,
12546 fold_convert_loc (loc, type,
12547 TREE_OPERAND (arg0, 1)),
12549 tree shift = fold_build2_loc (loc, code, type,
12550 fold_convert_loc (loc, type,
12551 TREE_OPERAND (arg0, 0)),
12553 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12561 if (operand_equal_p (arg0, arg1, 0))
12562 return omit_one_operand_loc (loc, type, arg0, arg1);
12563 if (INTEGRAL_TYPE_P (type)
12564 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12565 return omit_one_operand_loc (loc, type, arg1, arg0);
12566 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12572 if (operand_equal_p (arg0, arg1, 0))
12573 return omit_one_operand_loc (loc, type, arg0, arg1);
12574 if (INTEGRAL_TYPE_P (type)
12575 && TYPE_MAX_VALUE (type)
12576 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12577 return omit_one_operand_loc (loc, type, arg1, arg0);
12578 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12583 case TRUTH_ANDIF_EXPR:
12584 /* Note that the operands of this must be ints
12585 and their values must be 0 or 1.
12586 ("true" is a fixed value perhaps depending on the language.) */
12587 /* If first arg is constant zero, return it. */
12588 if (integer_zerop (arg0))
12589 return fold_convert_loc (loc, type, arg0);
12590 case TRUTH_AND_EXPR:
12591 /* If either arg is constant true, drop it. */
12592 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12593 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12594 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12595 /* Preserve sequence points. */
12596 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12597 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12598 /* If second arg is constant zero, result is zero, but first arg
12599 must be evaluated. */
12600 if (integer_zerop (arg1))
12601 return omit_one_operand_loc (loc, type, arg1, arg0);
12602 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12603 case will be handled here. */
12604 if (integer_zerop (arg0))
12605 return omit_one_operand_loc (loc, type, arg0, arg1);
12607 /* !X && X is always false. */
12608 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12610 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12611 /* X && !X is always false. */
12612 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12613 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12616 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12617 means A >= Y && A != MAX, but in this case we know that
12620 if (!TREE_SIDE_EFFECTS (arg0)
12621 && !TREE_SIDE_EFFECTS (arg1))
12623 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12624 if (tem && !operand_equal_p (tem, arg0, 0))
12625 return fold_build2_loc (loc, code, type, tem, arg1);
12627 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12628 if (tem && !operand_equal_p (tem, arg1, 0))
12629 return fold_build2_loc (loc, code, type, arg0, tem);
12632 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12638 case TRUTH_ORIF_EXPR:
12639 /* Note that the operands of this must be ints
12640 and their values must be 0 or true.
12641 ("true" is a fixed value perhaps depending on the language.) */
12642 /* If first arg is constant true, return it. */
12643 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12644 return fold_convert_loc (loc, type, arg0);
12645 case TRUTH_OR_EXPR:
12646 /* If either arg is constant zero, drop it. */
12647 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12648 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12649 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12650 /* Preserve sequence points. */
12651 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12652 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12653 /* If second arg is constant true, result is true, but we must
12654 evaluate first arg. */
12655 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12656 return omit_one_operand_loc (loc, type, arg1, arg0);
12657 /* Likewise for first arg, but note this only occurs here for
12659 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12660 return omit_one_operand_loc (loc, type, arg0, arg1);
12662 /* !X || X is always true. */
12663 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12664 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12665 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12666 /* X || !X is always true. */
12667 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12669 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12671 /* (X && !Y) || (!X && Y) is X ^ Y */
12672 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12673 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12675 tree a0, a1, l0, l1, n0, n1;
12677 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12678 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12680 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12681 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12683 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12684 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12686 if ((operand_equal_p (n0, a0, 0)
12687 && operand_equal_p (n1, a1, 0))
12688 || (operand_equal_p (n0, a1, 0)
12689 && operand_equal_p (n1, a0, 0)))
12690 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12693 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12699 case TRUTH_XOR_EXPR:
12700 /* If the second arg is constant zero, drop it. */
12701 if (integer_zerop (arg1))
12702 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12703 /* If the second arg is constant true, this is a logical inversion. */
12704 if (integer_onep (arg1))
12706 /* Only call invert_truthvalue if operand is a truth value. */
12707 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12708 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12710 tem = invert_truthvalue_loc (loc, arg0);
12711 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12713 /* Identical arguments cancel to zero. */
12714 if (operand_equal_p (arg0, arg1, 0))
12715 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12717 /* !X ^ X is always true. */
12718 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12719 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12720 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12722 /* X ^ !X is always true. */
12723 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12724 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12725 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12734 tem = fold_comparison (loc, code, type, op0, op1);
12735 if (tem != NULL_TREE)
12738 /* bool_var != 0 becomes bool_var. */
12739 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12740 && code == NE_EXPR)
12741 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12743 /* bool_var == 1 becomes bool_var. */
12744 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12745 && code == EQ_EXPR)
12746 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12748 /* bool_var != 1 becomes !bool_var. */
12749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12750 && code == NE_EXPR)
12751 return fold_convert_loc (loc, type,
12752 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12753 TREE_TYPE (arg0), arg0));
12755 /* bool_var == 0 becomes !bool_var. */
12756 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12757 && code == EQ_EXPR)
12758 return fold_convert_loc (loc, type,
12759 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12760 TREE_TYPE (arg0), arg0));
12762 /* !exp != 0 becomes !exp */
12763 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12764 && code == NE_EXPR)
12765 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12767 /* If this is an equality comparison of the address of two non-weak,
12768 unaliased symbols neither of which are extern (since we do not
12769 have access to attributes for externs), then we know the result. */
12770 if (TREE_CODE (arg0) == ADDR_EXPR
12771 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12772 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12773 && ! lookup_attribute ("alias",
12774 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12775 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12776 && TREE_CODE (arg1) == ADDR_EXPR
12777 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12778 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12779 && ! lookup_attribute ("alias",
12780 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12781 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12783 /* We know that we're looking at the address of two
12784 non-weak, unaliased, static _DECL nodes.
12786 It is both wasteful and incorrect to call operand_equal_p
12787 to compare the two ADDR_EXPR nodes. It is wasteful in that
12788 all we need to do is test pointer equality for the arguments
12789 to the two ADDR_EXPR nodes. It is incorrect to use
12790 operand_equal_p as that function is NOT equivalent to a
12791 C equality test. It can in fact return false for two
12792 objects which would test as equal using the C equality
12794 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12795 return constant_boolean_node (equal
12796 ? code == EQ_EXPR : code != EQ_EXPR,
12800 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12801 a MINUS_EXPR of a constant, we can convert it into a comparison with
12802 a revised constant as long as no overflow occurs. */
12803 if (TREE_CODE (arg1) == INTEGER_CST
12804 && (TREE_CODE (arg0) == PLUS_EXPR
12805 || TREE_CODE (arg0) == MINUS_EXPR)
12806 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12807 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12808 ? MINUS_EXPR : PLUS_EXPR,
12809 fold_convert_loc (loc, TREE_TYPE (arg0),
12811 TREE_OPERAND (arg0, 1)))
12812 && !TREE_OVERFLOW (tem))
12813 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12815 /* Similarly for a NEGATE_EXPR. */
12816 if (TREE_CODE (arg0) == NEGATE_EXPR
12817 && TREE_CODE (arg1) == INTEGER_CST
12818 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12820 && TREE_CODE (tem) == INTEGER_CST
12821 && !TREE_OVERFLOW (tem))
12822 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12824 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12825 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12826 && TREE_CODE (arg1) == INTEGER_CST
12827 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12828 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12829 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12830 fold_convert_loc (loc,
12833 TREE_OPERAND (arg0, 1)));
12835 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12836 if ((TREE_CODE (arg0) == PLUS_EXPR
12837 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12838 || TREE_CODE (arg0) == MINUS_EXPR)
12839 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12842 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12843 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12845 tree val = TREE_OPERAND (arg0, 1);
12846 return omit_two_operands_loc (loc, type,
12847 fold_build2_loc (loc, code, type,
12849 build_int_cst (TREE_TYPE (val),
12851 TREE_OPERAND (arg0, 0), arg1);
12854 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12855 if (TREE_CODE (arg0) == MINUS_EXPR
12856 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12857 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12860 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12862 return omit_two_operands_loc (loc, type,
12864 ? boolean_true_node : boolean_false_node,
12865 TREE_OPERAND (arg0, 1), arg1);
12868 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12869 for !=. Don't do this for ordered comparisons due to overflow. */
12870 if (TREE_CODE (arg0) == MINUS_EXPR
12871 && integer_zerop (arg1))
12872 return fold_build2_loc (loc, code, type,
12873 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12875 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12876 if (TREE_CODE (arg0) == ABS_EXPR
12877 && (integer_zerop (arg1) || real_zerop (arg1)))
12878 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12880 /* If this is an EQ or NE comparison with zero and ARG0 is
12881 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12882 two operations, but the latter can be done in one less insn
12883 on machines that have only two-operand insns or on which a
12884 constant cannot be the first operand. */
12885 if (TREE_CODE (arg0) == BIT_AND_EXPR
12886 && integer_zerop (arg1))
12888 tree arg00 = TREE_OPERAND (arg0, 0);
12889 tree arg01 = TREE_OPERAND (arg0, 1);
12890 if (TREE_CODE (arg00) == LSHIFT_EXPR
12891 && integer_onep (TREE_OPERAND (arg00, 0)))
12893 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12894 arg01, TREE_OPERAND (arg00, 1));
12895 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12896 build_int_cst (TREE_TYPE (arg0), 1));
12897 return fold_build2_loc (loc, code, type,
12898 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12901 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12902 && integer_onep (TREE_OPERAND (arg01, 0)))
12904 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12905 arg00, TREE_OPERAND (arg01, 1));
12906 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12907 build_int_cst (TREE_TYPE (arg0), 1));
12908 return fold_build2_loc (loc, code, type,
12909 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12914 /* If this is an NE or EQ comparison of zero against the result of a
12915 signed MOD operation whose second operand is a power of 2, make
12916 the MOD operation unsigned since it is simpler and equivalent. */
12917 if (integer_zerop (arg1)
12918 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12919 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12920 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12921 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12922 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12923 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12925 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12926 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12927 fold_convert_loc (loc, newtype,
12928 TREE_OPERAND (arg0, 0)),
12929 fold_convert_loc (loc, newtype,
12930 TREE_OPERAND (arg0, 1)));
12932 return fold_build2_loc (loc, code, type, newmod,
12933 fold_convert_loc (loc, newtype, arg1));
12936 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12937 C1 is a valid shift constant, and C2 is a power of two, i.e.
12939 if (TREE_CODE (arg0) == BIT_AND_EXPR
12940 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12941 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12943 && integer_pow2p (TREE_OPERAND (arg0, 1))
12944 && integer_zerop (arg1))
12946 tree itype = TREE_TYPE (arg0);
12947 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12948 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12950 /* Check for a valid shift count. */
12951 if (TREE_INT_CST_HIGH (arg001) == 0
12952 && TREE_INT_CST_LOW (arg001) < prec)
12954 tree arg01 = TREE_OPERAND (arg0, 1);
12955 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12956 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12957 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12958 can be rewritten as (X & (C2 << C1)) != 0. */
12959 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12961 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12962 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12963 return fold_build2_loc (loc, code, type, tem,
12964 fold_convert_loc (loc, itype, arg1));
12966 /* Otherwise, for signed (arithmetic) shifts,
12967 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12968 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12969 else if (!TYPE_UNSIGNED (itype))
12970 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12971 arg000, build_int_cst (itype, 0));
12972 /* Otherwise, of unsigned (logical) shifts,
12973 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12974 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12976 return omit_one_operand_loc (loc, type,
12977 code == EQ_EXPR ? integer_one_node
12978 : integer_zero_node,
12983 /* If we have (A & C) == C where C is a power of 2, convert this into
12984 (A & C) != 0. Similarly for NE_EXPR. */
12985 if (TREE_CODE (arg0) == BIT_AND_EXPR
12986 && integer_pow2p (TREE_OPERAND (arg0, 1))
12987 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12988 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12989 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12990 integer_zero_node));
12992 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12993 bit, then fold the expression into A < 0 or A >= 0. */
12994 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12998 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12999 Similarly for NE_EXPR. */
13000 if (TREE_CODE (arg0) == BIT_AND_EXPR
13001 && TREE_CODE (arg1) == INTEGER_CST
13002 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13004 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13005 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13006 TREE_OPERAND (arg0, 1));
13008 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13009 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13011 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13012 if (integer_nonzerop (dandnotc))
13013 return omit_one_operand_loc (loc, type, rslt, arg0);
13016 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13017 Similarly for NE_EXPR. */
13018 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13019 && TREE_CODE (arg1) == INTEGER_CST
13020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13022 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13024 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13025 TREE_OPERAND (arg0, 1),
13026 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13027 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13028 if (integer_nonzerop (candnotd))
13029 return omit_one_operand_loc (loc, type, rslt, arg0);
13032 /* If this is a comparison of a field, we may be able to simplify it. */
13033 if ((TREE_CODE (arg0) == COMPONENT_REF
13034 || TREE_CODE (arg0) == BIT_FIELD_REF)
13035 /* Handle the constant case even without -O
13036 to make sure the warnings are given. */
13037 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13039 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13044 /* Optimize comparisons of strlen vs zero to a compare of the
13045 first character of the string vs zero. To wit,
13046 strlen(ptr) == 0 => *ptr == 0
13047 strlen(ptr) != 0 => *ptr != 0
13048 Other cases should reduce to one of these two (or a constant)
13049 due to the return value of strlen being unsigned. */
13050 if (TREE_CODE (arg0) == CALL_EXPR
13051 && integer_zerop (arg1))
13053 tree fndecl = get_callee_fndecl (arg0);
13056 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13057 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13058 && call_expr_nargs (arg0) == 1
13059 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13061 tree iref = build_fold_indirect_ref_loc (loc,
13062 CALL_EXPR_ARG (arg0, 0));
13063 return fold_build2_loc (loc, code, type, iref,
13064 build_int_cst (TREE_TYPE (iref), 0));
13068 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13069 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13070 if (TREE_CODE (arg0) == RSHIFT_EXPR
13071 && integer_zerop (arg1)
13072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13074 tree arg00 = TREE_OPERAND (arg0, 0);
13075 tree arg01 = TREE_OPERAND (arg0, 1);
13076 tree itype = TREE_TYPE (arg00);
13077 if (TREE_INT_CST_HIGH (arg01) == 0
13078 && TREE_INT_CST_LOW (arg01)
13079 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13081 if (TYPE_UNSIGNED (itype))
13083 itype = signed_type_for (itype);
13084 arg00 = fold_convert_loc (loc, itype, arg00);
13086 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13087 type, arg00, build_zero_cst (itype));
13091 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13092 if (integer_zerop (arg1)
13093 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13094 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13095 TREE_OPERAND (arg0, 1));
13097 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13098 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13099 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13100 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13101 build_zero_cst (TREE_TYPE (arg0)));
13102 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13103 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13105 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13106 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13107 build_zero_cst (TREE_TYPE (arg0)));
13109 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13110 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13111 && TREE_CODE (arg1) == INTEGER_CST
13112 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13113 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13114 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13115 TREE_OPERAND (arg0, 1), arg1));
13117 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13118 (X & C) == 0 when C is a single bit. */
13119 if (TREE_CODE (arg0) == BIT_AND_EXPR
13120 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13121 && integer_zerop (arg1)
13122 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13124 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13125 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13126 TREE_OPERAND (arg0, 1));
13127 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13129 fold_convert_loc (loc, TREE_TYPE (arg0),
13133 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13134 constant C is a power of two, i.e. a single bit. */
13135 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13137 && integer_zerop (arg1)
13138 && integer_pow2p (TREE_OPERAND (arg0, 1))
13139 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13140 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13142 tree arg00 = TREE_OPERAND (arg0, 0);
13143 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13144 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13147 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13148 when is C is a power of two, i.e. a single bit. */
13149 if (TREE_CODE (arg0) == BIT_AND_EXPR
13150 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13151 && integer_zerop (arg1)
13152 && integer_pow2p (TREE_OPERAND (arg0, 1))
13153 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13154 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13156 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13157 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13158 arg000, TREE_OPERAND (arg0, 1));
13159 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13160 tem, build_int_cst (TREE_TYPE (tem), 0));
13163 if (integer_zerop (arg1)
13164 && tree_expr_nonzero_p (arg0))
13166 tree res = constant_boolean_node (code==NE_EXPR, type);
13167 return omit_one_operand_loc (loc, type, res, arg0);
13170 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13171 if (TREE_CODE (arg0) == NEGATE_EXPR
13172 && TREE_CODE (arg1) == NEGATE_EXPR)
13173 return fold_build2_loc (loc, code, type,
13174 TREE_OPERAND (arg0, 0),
13175 fold_convert_loc (loc, TREE_TYPE (arg0),
13176 TREE_OPERAND (arg1, 0)));
13178 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13179 if (TREE_CODE (arg0) == BIT_AND_EXPR
13180 && TREE_CODE (arg1) == BIT_AND_EXPR)
13182 tree arg00 = TREE_OPERAND (arg0, 0);
13183 tree arg01 = TREE_OPERAND (arg0, 1);
13184 tree arg10 = TREE_OPERAND (arg1, 0);
13185 tree arg11 = TREE_OPERAND (arg1, 1);
13186 tree itype = TREE_TYPE (arg0);
13188 if (operand_equal_p (arg01, arg11, 0))
13189 return fold_build2_loc (loc, code, type,
13190 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13191 fold_build2_loc (loc,
13192 BIT_XOR_EXPR, itype,
13195 build_zero_cst (itype));
13197 if (operand_equal_p (arg01, arg10, 0))
13198 return fold_build2_loc (loc, code, type,
13199 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13200 fold_build2_loc (loc,
13201 BIT_XOR_EXPR, itype,
13204 build_zero_cst (itype));
13206 if (operand_equal_p (arg00, arg11, 0))
13207 return fold_build2_loc (loc, code, type,
13208 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13209 fold_build2_loc (loc,
13210 BIT_XOR_EXPR, itype,
13213 build_zero_cst (itype));
13215 if (operand_equal_p (arg00, arg10, 0))
13216 return fold_build2_loc (loc, code, type,
13217 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13218 fold_build2_loc (loc,
13219 BIT_XOR_EXPR, itype,
13222 build_zero_cst (itype));
13225 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13226 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13228 tree arg00 = TREE_OPERAND (arg0, 0);
13229 tree arg01 = TREE_OPERAND (arg0, 1);
13230 tree arg10 = TREE_OPERAND (arg1, 0);
13231 tree arg11 = TREE_OPERAND (arg1, 1);
13232 tree itype = TREE_TYPE (arg0);
13234 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13235 operand_equal_p guarantees no side-effects so we don't need
13236 to use omit_one_operand on Z. */
13237 if (operand_equal_p (arg01, arg11, 0))
13238 return fold_build2_loc (loc, code, type, arg00,
13239 fold_convert_loc (loc, TREE_TYPE (arg00),
13241 if (operand_equal_p (arg01, arg10, 0))
13242 return fold_build2_loc (loc, code, type, arg00,
13243 fold_convert_loc (loc, TREE_TYPE (arg00),
13245 if (operand_equal_p (arg00, arg11, 0))
13246 return fold_build2_loc (loc, code, type, arg01,
13247 fold_convert_loc (loc, TREE_TYPE (arg01),
13249 if (operand_equal_p (arg00, arg10, 0))
13250 return fold_build2_loc (loc, code, type, arg01,
13251 fold_convert_loc (loc, TREE_TYPE (arg01),
13254 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13255 if (TREE_CODE (arg01) == INTEGER_CST
13256 && TREE_CODE (arg11) == INTEGER_CST)
13258 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13259 fold_convert_loc (loc, itype, arg11));
13260 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13261 return fold_build2_loc (loc, code, type, tem,
13262 fold_convert_loc (loc, itype, arg10));
13266 /* Attempt to simplify equality/inequality comparisons of complex
13267 values. Only lower the comparison if the result is known or
13268 can be simplified to a single scalar comparison. */
13269 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13270 || TREE_CODE (arg0) == COMPLEX_CST)
13271 && (TREE_CODE (arg1) == COMPLEX_EXPR
13272 || TREE_CODE (arg1) == COMPLEX_CST))
13274 tree real0, imag0, real1, imag1;
13277 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13279 real0 = TREE_OPERAND (arg0, 0);
13280 imag0 = TREE_OPERAND (arg0, 1);
13284 real0 = TREE_REALPART (arg0);
13285 imag0 = TREE_IMAGPART (arg0);
13288 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13290 real1 = TREE_OPERAND (arg1, 0);
13291 imag1 = TREE_OPERAND (arg1, 1);
13295 real1 = TREE_REALPART (arg1);
13296 imag1 = TREE_IMAGPART (arg1);
13299 rcond = fold_binary_loc (loc, code, type, real0, real1);
13300 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13302 if (integer_zerop (rcond))
13304 if (code == EQ_EXPR)
13305 return omit_two_operands_loc (loc, type, boolean_false_node,
13307 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13311 if (code == NE_EXPR)
13312 return omit_two_operands_loc (loc, type, boolean_true_node,
13314 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13318 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13319 if (icond && TREE_CODE (icond) == INTEGER_CST)
13321 if (integer_zerop (icond))
13323 if (code == EQ_EXPR)
13324 return omit_two_operands_loc (loc, type, boolean_false_node,
13326 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13330 if (code == NE_EXPR)
13331 return omit_two_operands_loc (loc, type, boolean_true_node,
13333 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13344 tem = fold_comparison (loc, code, type, op0, op1);
13345 if (tem != NULL_TREE)
13348 /* Transform comparisons of the form X +- C CMP X. */
13349 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13351 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13352 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13353 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13354 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13356 tree arg01 = TREE_OPERAND (arg0, 1);
13357 enum tree_code code0 = TREE_CODE (arg0);
13360 if (TREE_CODE (arg01) == REAL_CST)
13361 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13363 is_positive = tree_int_cst_sgn (arg01);
13365 /* (X - c) > X becomes false. */
13366 if (code == GT_EXPR
13367 && ((code0 == MINUS_EXPR && is_positive >= 0)
13368 || (code0 == PLUS_EXPR && is_positive <= 0)))
13370 if (TREE_CODE (arg01) == INTEGER_CST
13371 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13372 fold_overflow_warning (("assuming signed overflow does not "
13373 "occur when assuming that (X - c) > X "
13374 "is always false"),
13375 WARN_STRICT_OVERFLOW_ALL);
13376 return constant_boolean_node (0, type);
13379 /* Likewise (X + c) < X becomes false. */
13380 if (code == LT_EXPR
13381 && ((code0 == PLUS_EXPR && is_positive >= 0)
13382 || (code0 == MINUS_EXPR && is_positive <= 0)))
13384 if (TREE_CODE (arg01) == INTEGER_CST
13385 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13386 fold_overflow_warning (("assuming signed overflow does not "
13387 "occur when assuming that "
13388 "(X + c) < X is always false"),
13389 WARN_STRICT_OVERFLOW_ALL);
13390 return constant_boolean_node (0, type);
13393 /* Convert (X - c) <= X to true. */
13394 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13396 && ((code0 == MINUS_EXPR && is_positive >= 0)
13397 || (code0 == PLUS_EXPR && is_positive <= 0)))
13399 if (TREE_CODE (arg01) == INTEGER_CST
13400 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13401 fold_overflow_warning (("assuming signed overflow does not "
13402 "occur when assuming that "
13403 "(X - c) <= X is always true"),
13404 WARN_STRICT_OVERFLOW_ALL);
13405 return constant_boolean_node (1, type);
13408 /* Convert (X + c) >= X to true. */
13409 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13411 && ((code0 == PLUS_EXPR && is_positive >= 0)
13412 || (code0 == MINUS_EXPR && is_positive <= 0)))
13414 if (TREE_CODE (arg01) == INTEGER_CST
13415 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13416 fold_overflow_warning (("assuming signed overflow does not "
13417 "occur when assuming that "
13418 "(X + c) >= X is always true"),
13419 WARN_STRICT_OVERFLOW_ALL);
13420 return constant_boolean_node (1, type);
13423 if (TREE_CODE (arg01) == INTEGER_CST)
13425 /* Convert X + c > X and X - c < X to true for integers. */
13426 if (code == GT_EXPR
13427 && ((code0 == PLUS_EXPR && is_positive > 0)
13428 || (code0 == MINUS_EXPR && is_positive < 0)))
13430 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13431 fold_overflow_warning (("assuming signed overflow does "
13432 "not occur when assuming that "
13433 "(X + c) > X is always true"),
13434 WARN_STRICT_OVERFLOW_ALL);
13435 return constant_boolean_node (1, type);
13438 if (code == LT_EXPR
13439 && ((code0 == MINUS_EXPR && is_positive > 0)
13440 || (code0 == PLUS_EXPR && is_positive < 0)))
13442 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13443 fold_overflow_warning (("assuming signed overflow does "
13444 "not occur when assuming that "
13445 "(X - c) < X is always true"),
13446 WARN_STRICT_OVERFLOW_ALL);
13447 return constant_boolean_node (1, type);
13450 /* Convert X + c <= X and X - c >= X to false for integers. */
13451 if (code == LE_EXPR
13452 && ((code0 == PLUS_EXPR && is_positive > 0)
13453 || (code0 == MINUS_EXPR && is_positive < 0)))
13455 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13456 fold_overflow_warning (("assuming signed overflow does "
13457 "not occur when assuming that "
13458 "(X + c) <= X is always false"),
13459 WARN_STRICT_OVERFLOW_ALL);
13460 return constant_boolean_node (0, type);
13463 if (code == GE_EXPR
13464 && ((code0 == MINUS_EXPR && is_positive > 0)
13465 || (code0 == PLUS_EXPR && is_positive < 0)))
13467 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13468 fold_overflow_warning (("assuming signed overflow does "
13469 "not occur when assuming that "
13470 "(X - c) >= X is always false"),
13471 WARN_STRICT_OVERFLOW_ALL);
13472 return constant_boolean_node (0, type);
13477 /* Comparisons with the highest or lowest possible integer of
13478 the specified precision will have known values. */
13480 tree arg1_type = TREE_TYPE (arg1);
13481 unsigned int width = TYPE_PRECISION (arg1_type);
13483 if (TREE_CODE (arg1) == INTEGER_CST
13484 && width <= HOST_BITS_PER_DOUBLE_INT
13485 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13487 HOST_WIDE_INT signed_max_hi;
13488 unsigned HOST_WIDE_INT signed_max_lo;
13489 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13491 if (width <= HOST_BITS_PER_WIDE_INT)
13493 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13498 if (TYPE_UNSIGNED (arg1_type))
13500 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13506 max_lo = signed_max_lo;
13507 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13513 width -= HOST_BITS_PER_WIDE_INT;
13514 signed_max_lo = -1;
13515 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13520 if (TYPE_UNSIGNED (arg1_type))
13522 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13527 max_hi = signed_max_hi;
13528 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13532 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13533 && TREE_INT_CST_LOW (arg1) == max_lo)
13537 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13540 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13543 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13546 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13548 /* The GE_EXPR and LT_EXPR cases above are not normally
13549 reached because of previous transformations. */
13554 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13556 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13560 arg1 = const_binop (PLUS_EXPR, arg1,
13561 build_int_cst (TREE_TYPE (arg1), 1));
13562 return fold_build2_loc (loc, EQ_EXPR, type,
13563 fold_convert_loc (loc,
13564 TREE_TYPE (arg1), arg0),
13567 arg1 = const_binop (PLUS_EXPR, arg1,
13568 build_int_cst (TREE_TYPE (arg1), 1));
13569 return fold_build2_loc (loc, NE_EXPR, type,
13570 fold_convert_loc (loc, TREE_TYPE (arg1),
13576 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13578 && TREE_INT_CST_LOW (arg1) == min_lo)
13582 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13585 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13588 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13591 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13596 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13598 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13602 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13603 return fold_build2_loc (loc, NE_EXPR, type,
13604 fold_convert_loc (loc,
13605 TREE_TYPE (arg1), arg0),
13608 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13609 return fold_build2_loc (loc, EQ_EXPR, type,
13610 fold_convert_loc (loc, TREE_TYPE (arg1),
13617 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13618 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13619 && TYPE_UNSIGNED (arg1_type)
13620 /* We will flip the signedness of the comparison operator
13621 associated with the mode of arg1, so the sign bit is
13622 specified by this mode. Check that arg1 is the signed
13623 max associated with this sign bit. */
13624 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13625 /* signed_type does not work on pointer types. */
13626 && INTEGRAL_TYPE_P (arg1_type))
13628 /* The following case also applies to X < signed_max+1
13629 and X >= signed_max+1 because previous transformations. */
13630 if (code == LE_EXPR || code == GT_EXPR)
13633 st = signed_type_for (TREE_TYPE (arg1));
13634 return fold_build2_loc (loc,
13635 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13636 type, fold_convert_loc (loc, st, arg0),
13637 build_int_cst (st, 0));
13643 /* If we are comparing an ABS_EXPR with a constant, we can
13644 convert all the cases into explicit comparisons, but they may
13645 well not be faster than doing the ABS and one comparison.
13646 But ABS (X) <= C is a range comparison, which becomes a subtraction
13647 and a comparison, and is probably faster. */
13648 if (code == LE_EXPR
13649 && TREE_CODE (arg1) == INTEGER_CST
13650 && TREE_CODE (arg0) == ABS_EXPR
13651 && ! TREE_SIDE_EFFECTS (arg0)
13652 && (0 != (tem = negate_expr (arg1)))
13653 && TREE_CODE (tem) == INTEGER_CST
13654 && !TREE_OVERFLOW (tem))
13655 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13656 build2 (GE_EXPR, type,
13657 TREE_OPERAND (arg0, 0), tem),
13658 build2 (LE_EXPR, type,
13659 TREE_OPERAND (arg0, 0), arg1));
13661 /* Convert ABS_EXPR<x> >= 0 to true. */
13662 strict_overflow_p = false;
13663 if (code == GE_EXPR
13664 && (integer_zerop (arg1)
13665 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13666 && real_zerop (arg1)))
13667 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13669 if (strict_overflow_p)
13670 fold_overflow_warning (("assuming signed overflow does not occur "
13671 "when simplifying comparison of "
13672 "absolute value and zero"),
13673 WARN_STRICT_OVERFLOW_CONDITIONAL);
13674 return omit_one_operand_loc (loc, type,
13675 constant_boolean_node (true, type),
13679 /* Convert ABS_EXPR<x> < 0 to false. */
13680 strict_overflow_p = false;
13681 if (code == LT_EXPR
13682 && (integer_zerop (arg1) || real_zerop (arg1))
13683 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13685 if (strict_overflow_p)
13686 fold_overflow_warning (("assuming signed overflow does not occur "
13687 "when simplifying comparison of "
13688 "absolute value and zero"),
13689 WARN_STRICT_OVERFLOW_CONDITIONAL);
13690 return omit_one_operand_loc (loc, type,
13691 constant_boolean_node (false, type),
13695 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13696 and similarly for >= into !=. */
13697 if ((code == LT_EXPR || code == GE_EXPR)
13698 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13699 && TREE_CODE (arg1) == LSHIFT_EXPR
13700 && integer_onep (TREE_OPERAND (arg1, 0)))
13701 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13702 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13703 TREE_OPERAND (arg1, 1)),
13704 build_zero_cst (TREE_TYPE (arg0)));
13706 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13707 otherwise Y might be >= # of bits in X's type and thus e.g.
13708 (unsigned char) (1 << Y) for Y 15 might be 0.
13709 If the cast is widening, then 1 << Y should have unsigned type,
13710 otherwise if Y is number of bits in the signed shift type minus 1,
13711 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13712 31 might be 0xffffffff80000000. */
13713 if ((code == LT_EXPR || code == GE_EXPR)
13714 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13715 && CONVERT_EXPR_P (arg1)
13716 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13717 && (TYPE_PRECISION (TREE_TYPE (arg1))
13718 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13719 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13720 || (TYPE_PRECISION (TREE_TYPE (arg1))
13721 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13722 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13724 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13725 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13726 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13727 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13728 build_zero_cst (TREE_TYPE (arg0)));
13733 case UNORDERED_EXPR:
13741 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13743 t1 = fold_relational_const (code, type, arg0, arg1);
13744 if (t1 != NULL_TREE)
13748 /* If the first operand is NaN, the result is constant. */
13749 if (TREE_CODE (arg0) == REAL_CST
13750 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13751 && (code != LTGT_EXPR || ! flag_trapping_math))
13753 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13754 ? integer_zero_node
13755 : integer_one_node;
13756 return omit_one_operand_loc (loc, type, t1, arg1);
13759 /* If the second operand is NaN, the result is constant. */
13760 if (TREE_CODE (arg1) == REAL_CST
13761 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13762 && (code != LTGT_EXPR || ! flag_trapping_math))
13764 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13765 ? integer_zero_node
13766 : integer_one_node;
13767 return omit_one_operand_loc (loc, type, t1, arg0);
13770 /* Simplify unordered comparison of something with itself. */
13771 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13772 && operand_equal_p (arg0, arg1, 0))
13773 return constant_boolean_node (1, type);
13775 if (code == LTGT_EXPR
13776 && !flag_trapping_math
13777 && operand_equal_p (arg0, arg1, 0))
13778 return constant_boolean_node (0, type);
13780 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13782 tree targ0 = strip_float_extensions (arg0);
13783 tree targ1 = strip_float_extensions (arg1);
13784 tree newtype = TREE_TYPE (targ0);
13786 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13787 newtype = TREE_TYPE (targ1);
13789 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13790 return fold_build2_loc (loc, code, type,
13791 fold_convert_loc (loc, newtype, targ0),
13792 fold_convert_loc (loc, newtype, targ1));
13797 case COMPOUND_EXPR:
13798 /* When pedantic, a compound expression can be neither an lvalue
13799 nor an integer constant expression. */
13800 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13802 /* Don't let (0, 0) be null pointer constant. */
13803 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13804 : fold_convert_loc (loc, type, arg1);
13805 return pedantic_non_lvalue_loc (loc, tem);
13808 if ((TREE_CODE (arg0) == REAL_CST
13809 && TREE_CODE (arg1) == REAL_CST)
13810 || (TREE_CODE (arg0) == INTEGER_CST
13811 && TREE_CODE (arg1) == INTEGER_CST))
13812 return build_complex (type, arg0, arg1);
13813 if (TREE_CODE (arg0) == REALPART_EXPR
13814 && TREE_CODE (arg1) == IMAGPART_EXPR
13815 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13816 && operand_equal_p (TREE_OPERAND (arg0, 0),
13817 TREE_OPERAND (arg1, 0), 0))
13818 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13819 TREE_OPERAND (arg1, 0));
13823 /* An ASSERT_EXPR should never be passed to fold_binary. */
13824 gcc_unreachable ();
13826 case VEC_PACK_TRUNC_EXPR:
13827 case VEC_PACK_FIX_TRUNC_EXPR:
13829 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13832 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13833 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13834 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13837 elts = XALLOCAVEC (tree, nelts);
13838 if (!vec_cst_ctor_to_array (arg0, elts)
13839 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13842 for (i = 0; i < nelts; i++)
13844 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13845 ? NOP_EXPR : FIX_TRUNC_EXPR,
13846 TREE_TYPE (type), elts[i]);
13847 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13851 return build_vector (type, elts);
13854 case VEC_WIDEN_MULT_LO_EXPR:
13855 case VEC_WIDEN_MULT_HI_EXPR:
13856 case VEC_WIDEN_MULT_EVEN_EXPR:
13857 case VEC_WIDEN_MULT_ODD_EXPR:
13859 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13860 unsigned int out, ofs, scale;
13863 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13864 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13865 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13868 elts = XALLOCAVEC (tree, nelts * 4);
13869 if (!vec_cst_ctor_to_array (arg0, elts)
13870 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13873 if (code == VEC_WIDEN_MULT_LO_EXPR)
13874 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13875 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13876 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13877 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13878 scale = 1, ofs = 0;
13879 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13880 scale = 1, ofs = 1;
13882 for (out = 0; out < nelts; out++)
13884 unsigned int in1 = (out << scale) + ofs;
13885 unsigned int in2 = in1 + nelts * 2;
13888 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13889 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13891 if (t1 == NULL_TREE || t2 == NULL_TREE)
13893 elts[out] = const_binop (MULT_EXPR, t1, t2);
13894 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13898 return build_vector (type, elts);
13903 } /* switch (code) */
13906 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13907 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13911 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13913 switch (TREE_CODE (*tp))
13919 *walk_subtrees = 0;
13921 /* ... fall through ... */
13928 /* Return whether the sub-tree ST contains a label which is accessible from
13929 outside the sub-tree. */
13932 contains_label_p (tree st)
13935 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13938 /* Fold a ternary expression of code CODE and type TYPE with operands
13939 OP0, OP1, and OP2. Return the folded expression if folding is
13940 successful. Otherwise, return NULL_TREE. */
13943 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13944 tree op0, tree op1, tree op2)
13947 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13948 enum tree_code_class kind = TREE_CODE_CLASS (code);
13950 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13951 && TREE_CODE_LENGTH (code) == 3);
13953 /* Strip any conversions that don't change the mode. This is safe
13954 for every expression, except for a comparison expression because
13955 its signedness is derived from its operands. So, in the latter
13956 case, only strip conversions that don't change the signedness.
13958 Note that this is done as an internal manipulation within the
13959 constant folder, in order to find the simplest representation of
13960 the arguments so that their form can be studied. In any cases,
13961 the appropriate type conversions should be put back in the tree
13962 that will get out of the constant folder. */
13983 case COMPONENT_REF:
13984 if (TREE_CODE (arg0) == CONSTRUCTOR
13985 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13987 unsigned HOST_WIDE_INT idx;
13989 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13996 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13997 so all simple results must be passed through pedantic_non_lvalue. */
13998 if (TREE_CODE (arg0) == INTEGER_CST)
14000 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14001 tem = integer_zerop (arg0) ? op2 : op1;
14002 /* Only optimize constant conditions when the selected branch
14003 has the same type as the COND_EXPR. This avoids optimizing
14004 away "c ? x : throw", where the throw has a void type.
14005 Avoid throwing away that operand which contains label. */
14006 if ((!TREE_SIDE_EFFECTS (unused_op)
14007 || !contains_label_p (unused_op))
14008 && (! VOID_TYPE_P (TREE_TYPE (tem))
14009 || VOID_TYPE_P (type)))
14010 return pedantic_non_lvalue_loc (loc, tem);
14013 if (operand_equal_p (arg1, op2, 0))
14014 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14016 /* If we have A op B ? A : C, we may be able to convert this to a
14017 simpler expression, depending on the operation and the values
14018 of B and C. Signed zeros prevent all of these transformations,
14019 for reasons given above each one.
14021 Also try swapping the arguments and inverting the conditional. */
14022 if (COMPARISON_CLASS_P (arg0)
14023 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14024 arg1, TREE_OPERAND (arg0, 1))
14025 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14027 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14032 if (COMPARISON_CLASS_P (arg0)
14033 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14035 TREE_OPERAND (arg0, 1))
14036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14038 location_t loc0 = expr_location_or (arg0, loc);
14039 tem = fold_truth_not_expr (loc0, arg0);
14040 if (tem && COMPARISON_CLASS_P (tem))
14042 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14048 /* If the second operand is simpler than the third, swap them
14049 since that produces better jump optimization results. */
14050 if (truth_value_p (TREE_CODE (arg0))
14051 && tree_swap_operands_p (op1, op2, false))
14053 location_t loc0 = expr_location_or (arg0, loc);
14054 /* See if this can be inverted. If it can't, possibly because
14055 it was a floating-point inequality comparison, don't do
14057 tem = fold_truth_not_expr (loc0, arg0);
14059 return fold_build3_loc (loc, code, type, tem, op2, op1);
14062 /* Convert A ? 1 : 0 to simply A. */
14063 if (integer_onep (op1)
14064 && integer_zerop (op2)
14065 /* If we try to convert OP0 to our type, the
14066 call to fold will try to move the conversion inside
14067 a COND, which will recurse. In that case, the COND_EXPR
14068 is probably the best choice, so leave it alone. */
14069 && type == TREE_TYPE (arg0))
14070 return pedantic_non_lvalue_loc (loc, arg0);
14072 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14073 over COND_EXPR in cases such as floating point comparisons. */
14074 if (integer_zerop (op1)
14075 && integer_onep (op2)
14076 && truth_value_p (TREE_CODE (arg0)))
14077 return pedantic_non_lvalue_loc (loc,
14078 fold_convert_loc (loc, type,
14079 invert_truthvalue_loc (loc,
14082 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14083 if (TREE_CODE (arg0) == LT_EXPR
14084 && integer_zerop (TREE_OPERAND (arg0, 1))
14085 && integer_zerop (op2)
14086 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14088 /* sign_bit_p looks through both zero and sign extensions,
14089 but for this optimization only sign extensions are
14091 tree tem2 = TREE_OPERAND (arg0, 0);
14092 while (tem != tem2)
14094 if (TREE_CODE (tem2) != NOP_EXPR
14095 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14100 tem2 = TREE_OPERAND (tem2, 0);
14102 /* sign_bit_p only checks ARG1 bits within A's precision.
14103 If <sign bit of A> has wider type than A, bits outside
14104 of A's precision in <sign bit of A> need to be checked.
14105 If they are all 0, this optimization needs to be done
14106 in unsigned A's type, if they are all 1 in signed A's type,
14107 otherwise this can't be done. */
14109 && TYPE_PRECISION (TREE_TYPE (tem))
14110 < TYPE_PRECISION (TREE_TYPE (arg1))
14111 && TYPE_PRECISION (TREE_TYPE (tem))
14112 < TYPE_PRECISION (type))
14114 unsigned HOST_WIDE_INT mask_lo;
14115 HOST_WIDE_INT mask_hi;
14116 int inner_width, outer_width;
14119 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14120 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14121 if (outer_width > TYPE_PRECISION (type))
14122 outer_width = TYPE_PRECISION (type);
14124 if (outer_width > HOST_BITS_PER_WIDE_INT)
14126 mask_hi = ((unsigned HOST_WIDE_INT) -1
14127 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14133 mask_lo = ((unsigned HOST_WIDE_INT) -1
14134 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14136 if (inner_width > HOST_BITS_PER_WIDE_INT)
14138 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14139 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14143 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14144 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14146 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14147 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14149 tem_type = signed_type_for (TREE_TYPE (tem));
14150 tem = fold_convert_loc (loc, tem_type, tem);
14152 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14153 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14155 tem_type = unsigned_type_for (TREE_TYPE (tem));
14156 tem = fold_convert_loc (loc, tem_type, tem);
14164 fold_convert_loc (loc, type,
14165 fold_build2_loc (loc, BIT_AND_EXPR,
14166 TREE_TYPE (tem), tem,
14167 fold_convert_loc (loc,
14172 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14173 already handled above. */
14174 if (TREE_CODE (arg0) == BIT_AND_EXPR
14175 && integer_onep (TREE_OPERAND (arg0, 1))
14176 && integer_zerop (op2)
14177 && integer_pow2p (arg1))
14179 tree tem = TREE_OPERAND (arg0, 0);
14181 if (TREE_CODE (tem) == RSHIFT_EXPR
14182 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14183 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14184 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14185 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14186 TREE_OPERAND (tem, 0), arg1);
14189 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14190 is probably obsolete because the first operand should be a
14191 truth value (that's why we have the two cases above), but let's
14192 leave it in until we can confirm this for all front-ends. */
14193 if (integer_zerop (op2)
14194 && TREE_CODE (arg0) == NE_EXPR
14195 && integer_zerop (TREE_OPERAND (arg0, 1))
14196 && integer_pow2p (arg1)
14197 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14198 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14199 arg1, OEP_ONLY_CONST))
14200 return pedantic_non_lvalue_loc (loc,
14201 fold_convert_loc (loc, type,
14202 TREE_OPERAND (arg0, 0)));
14204 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14205 if (integer_zerop (op2)
14206 && truth_value_p (TREE_CODE (arg0))
14207 && truth_value_p (TREE_CODE (arg1)))
14208 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14209 fold_convert_loc (loc, type, arg0),
14212 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14213 if (integer_onep (op2)
14214 && truth_value_p (TREE_CODE (arg0))
14215 && truth_value_p (TREE_CODE (arg1)))
14217 location_t loc0 = expr_location_or (arg0, loc);
14218 /* Only perform transformation if ARG0 is easily inverted. */
14219 tem = fold_truth_not_expr (loc0, arg0);
14221 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14222 fold_convert_loc (loc, type, tem),
14226 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14227 if (integer_zerop (arg1)
14228 && truth_value_p (TREE_CODE (arg0))
14229 && truth_value_p (TREE_CODE (op2)))
14231 location_t loc0 = expr_location_or (arg0, loc);
14232 /* Only perform transformation if ARG0 is easily inverted. */
14233 tem = fold_truth_not_expr (loc0, arg0);
14235 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14236 fold_convert_loc (loc, type, tem),
14240 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14241 if (integer_onep (arg1)
14242 && truth_value_p (TREE_CODE (arg0))
14243 && truth_value_p (TREE_CODE (op2)))
14244 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14245 fold_convert_loc (loc, type, arg0),
14250 case VEC_COND_EXPR:
14251 if (TREE_CODE (arg0) == VECTOR_CST)
14253 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14254 return pedantic_non_lvalue_loc (loc, op1);
14255 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14256 return pedantic_non_lvalue_loc (loc, op2);
14261 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14262 of fold_ternary on them. */
14263 gcc_unreachable ();
14265 case BIT_FIELD_REF:
14266 if ((TREE_CODE (arg0) == VECTOR_CST
14267 || (TREE_CODE (arg0) == CONSTRUCTOR
14268 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14269 && (type == TREE_TYPE (TREE_TYPE (arg0))
14270 || (TREE_CODE (type) == VECTOR_TYPE
14271 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14273 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14274 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14275 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14276 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14279 && (idx % width) == 0
14280 && (n % width) == 0
14281 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14286 if (TREE_CODE (arg0) == VECTOR_CST)
14289 return VECTOR_CST_ELT (arg0, idx);
14291 tree *vals = XALLOCAVEC (tree, n);
14292 for (unsigned i = 0; i < n; ++i)
14293 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14294 return build_vector (type, vals);
14297 /* Constructor elements can be subvectors. */
14298 unsigned HOST_WIDE_INT k = 1;
14299 if (CONSTRUCTOR_NELTS (arg0) != 0)
14301 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14302 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14303 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14306 /* We keep an exact subset of the constructor elements. */
14307 if ((idx % k) == 0 && (n % k) == 0)
14309 if (CONSTRUCTOR_NELTS (arg0) == 0)
14310 return build_constructor (type, NULL);
14315 if (idx < CONSTRUCTOR_NELTS (arg0))
14316 return CONSTRUCTOR_ELT (arg0, idx)->value;
14317 return build_zero_cst (type);
14320 vec<constructor_elt, va_gc> *vals;
14321 vec_alloc (vals, n);
14322 for (unsigned i = 0;
14323 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14325 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14327 (arg0, idx + i)->value);
14328 return build_constructor (type, vals);
14330 /* The bitfield references a single constructor element. */
14331 else if (idx + n <= (idx / k + 1) * k)
14333 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14334 return build_zero_cst (type);
14336 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14338 return fold_build3_loc (loc, code, type,
14339 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14340 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14345 /* A bit-field-ref that referenced the full argument can be stripped. */
14346 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14347 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14348 && integer_zerop (op2))
14349 return fold_convert_loc (loc, type, arg0);
14351 /* On constants we can use native encode/interpret to constant
14352 fold (nearly) all BIT_FIELD_REFs. */
14353 if (CONSTANT_CLASS_P (arg0)
14354 && can_native_interpret_type_p (type)
14355 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14356 /* This limitation should not be necessary, we just need to
14357 round this up to mode size. */
14358 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14359 /* Need bit-shifting of the buffer to relax the following. */
14360 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14362 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14363 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14364 unsigned HOST_WIDE_INT clen;
14365 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14366 /* ??? We cannot tell native_encode_expr to start at
14367 some random byte only. So limit us to a reasonable amount
14371 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14372 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14374 && len * BITS_PER_UNIT >= bitpos + bitsize)
14376 tree v = native_interpret_expr (type,
14377 b + bitpos / BITS_PER_UNIT,
14378 bitsize / BITS_PER_UNIT);
14388 /* For integers we can decompose the FMA if possible. */
14389 if (TREE_CODE (arg0) == INTEGER_CST
14390 && TREE_CODE (arg1) == INTEGER_CST)
14391 return fold_build2_loc (loc, PLUS_EXPR, type,
14392 const_binop (MULT_EXPR, arg0, arg1), arg2);
14393 if (integer_zerop (arg2))
14394 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14396 return fold_fma (loc, type, arg0, arg1, arg2);
14398 case VEC_PERM_EXPR:
14399 if (TREE_CODE (arg2) == VECTOR_CST)
14401 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14402 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14404 bool need_mask_canon = false;
14405 bool all_in_vec0 = true;
14406 bool all_in_vec1 = true;
14407 bool maybe_identity = true;
14408 bool single_arg = (op0 == op1);
14409 bool changed = false;
14411 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14412 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14413 for (i = 0; i < nelts; i++)
14415 tree val = VECTOR_CST_ELT (arg2, i);
14416 if (TREE_CODE (val) != INTEGER_CST)
14419 sel[i] = TREE_INT_CST_LOW (val) & mask;
14420 if (TREE_INT_CST_HIGH (val)
14421 || ((unsigned HOST_WIDE_INT)
14422 TREE_INT_CST_LOW (val) != sel[i]))
14423 need_mask_canon = true;
14425 if (sel[i] < nelts)
14426 all_in_vec1 = false;
14428 all_in_vec0 = false;
14430 if ((sel[i] & (nelts-1)) != i)
14431 maybe_identity = false;
14434 if (maybe_identity)
14444 else if (all_in_vec1)
14447 for (i = 0; i < nelts; i++)
14449 need_mask_canon = true;
14452 if ((TREE_CODE (op0) == VECTOR_CST
14453 || TREE_CODE (op0) == CONSTRUCTOR)
14454 && (TREE_CODE (op1) == VECTOR_CST
14455 || TREE_CODE (op1) == CONSTRUCTOR))
14457 t = fold_vec_perm (type, op0, op1, sel);
14458 if (t != NULL_TREE)
14462 if (op0 == op1 && !single_arg)
14465 if (need_mask_canon && arg2 == op2)
14467 tree *tsel = XALLOCAVEC (tree, nelts);
14468 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14469 for (i = 0; i < nelts; i++)
14470 tsel[i] = build_int_cst (eltype, sel[i]);
14471 op2 = build_vector (TREE_TYPE (arg2), tsel);
14476 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14482 } /* switch (code) */
14485 /* Perform constant folding and related simplification of EXPR.
14486 The related simplifications include x*1 => x, x*0 => 0, etc.,
14487 and application of the associative law.
14488 NOP_EXPR conversions may be removed freely (as long as we
14489 are careful not to change the type of the overall expression).
14490 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14491 but we can constant-fold them if they have constant operands. */
14493 #ifdef ENABLE_FOLD_CHECKING
14494 # define fold(x) fold_1 (x)
14495 static tree fold_1 (tree);
14501 const tree t = expr;
14502 enum tree_code code = TREE_CODE (t);
14503 enum tree_code_class kind = TREE_CODE_CLASS (code);
14505 location_t loc = EXPR_LOCATION (expr);
14507 /* Return right away if a constant. */
14508 if (kind == tcc_constant)
14511 /* CALL_EXPR-like objects with variable numbers of operands are
14512 treated specially. */
14513 if (kind == tcc_vl_exp)
14515 if (code == CALL_EXPR)
14517 tem = fold_call_expr (loc, expr, false);
14518 return tem ? tem : expr;
14523 if (IS_EXPR_CODE_CLASS (kind))
14525 tree type = TREE_TYPE (t);
14526 tree op0, op1, op2;
14528 switch (TREE_CODE_LENGTH (code))
14531 op0 = TREE_OPERAND (t, 0);
14532 tem = fold_unary_loc (loc, code, type, op0);
14533 return tem ? tem : expr;
14535 op0 = TREE_OPERAND (t, 0);
14536 op1 = TREE_OPERAND (t, 1);
14537 tem = fold_binary_loc (loc, code, type, op0, op1);
14538 return tem ? tem : expr;
14540 op0 = TREE_OPERAND (t, 0);
14541 op1 = TREE_OPERAND (t, 1);
14542 op2 = TREE_OPERAND (t, 2);
14543 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14544 return tem ? tem : expr;
14554 tree op0 = TREE_OPERAND (t, 0);
14555 tree op1 = TREE_OPERAND (t, 1);
14557 if (TREE_CODE (op1) == INTEGER_CST
14558 && TREE_CODE (op0) == CONSTRUCTOR
14559 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14561 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14562 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14563 unsigned HOST_WIDE_INT begin = 0;
14565 /* Find a matching index by means of a binary search. */
14566 while (begin != end)
14568 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14569 tree index = (*elts)[middle].index;
14571 if (TREE_CODE (index) == INTEGER_CST
14572 && tree_int_cst_lt (index, op1))
14573 begin = middle + 1;
14574 else if (TREE_CODE (index) == INTEGER_CST
14575 && tree_int_cst_lt (op1, index))
14577 else if (TREE_CODE (index) == RANGE_EXPR
14578 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14579 begin = middle + 1;
14580 else if (TREE_CODE (index) == RANGE_EXPR
14581 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14584 return (*elts)[middle].value;
14591 /* Return a VECTOR_CST if possible. */
14594 tree type = TREE_TYPE (t);
14595 if (TREE_CODE (type) != VECTOR_TYPE)
14598 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14599 unsigned HOST_WIDE_INT idx, pos = 0;
14602 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14604 if (!CONSTANT_CLASS_P (value))
14606 if (TREE_CODE (value) == VECTOR_CST)
14608 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14609 vec[pos++] = VECTOR_CST_ELT (value, i);
14612 vec[pos++] = value;
14614 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14615 vec[pos] = build_zero_cst (TREE_TYPE (type));
14617 return build_vector (type, vec);
14621 return fold (DECL_INITIAL (t));
14625 } /* switch (code) */
14628 #ifdef ENABLE_FOLD_CHECKING
14631 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14632 hash_table <pointer_hash <tree_node> >);
14633 static void fold_check_failed (const_tree, const_tree);
14634 void print_fold_checksum (const_tree);
14636 /* When --enable-checking=fold, compute a digest of expr before
14637 and after actual fold call to see if fold did not accidentally
14638 change original expr. */
14644 struct md5_ctx ctx;
14645 unsigned char checksum_before[16], checksum_after[16];
14646 hash_table <pointer_hash <tree_node> > ht;
14649 md5_init_ctx (&ctx);
14650 fold_checksum_tree (expr, &ctx, ht);
14651 md5_finish_ctx (&ctx, checksum_before);
14654 ret = fold_1 (expr);
14656 md5_init_ctx (&ctx);
14657 fold_checksum_tree (expr, &ctx, ht);
14658 md5_finish_ctx (&ctx, checksum_after);
14661 if (memcmp (checksum_before, checksum_after, 16))
14662 fold_check_failed (expr, ret);
14668 print_fold_checksum (const_tree expr)
14670 struct md5_ctx ctx;
14671 unsigned char checksum[16], cnt;
14672 hash_table <pointer_hash <tree_node> > ht;
14675 md5_init_ctx (&ctx);
14676 fold_checksum_tree (expr, &ctx, ht);
14677 md5_finish_ctx (&ctx, checksum);
14679 for (cnt = 0; cnt < 16; ++cnt)
14680 fprintf (stderr, "%02x", checksum[cnt]);
14681 putc ('\n', stderr);
14685 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14687 internal_error ("fold check: original tree changed by fold");
14691 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14692 hash_table <pointer_hash <tree_node> > ht)
14695 enum tree_code code;
14696 union tree_node buf;
14702 slot = ht.find_slot (expr, INSERT);
14705 *slot = CONST_CAST_TREE (expr);
14706 code = TREE_CODE (expr);
14707 if (TREE_CODE_CLASS (code) == tcc_declaration
14708 && DECL_ASSEMBLER_NAME_SET_P (expr))
14710 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14711 memcpy ((char *) &buf, expr, tree_size (expr));
14712 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14713 expr = (tree) &buf;
14715 else if (TREE_CODE_CLASS (code) == tcc_type
14716 && (TYPE_POINTER_TO (expr)
14717 || TYPE_REFERENCE_TO (expr)
14718 || TYPE_CACHED_VALUES_P (expr)
14719 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14720 || TYPE_NEXT_VARIANT (expr)))
14722 /* Allow these fields to be modified. */
14724 memcpy ((char *) &buf, expr, tree_size (expr));
14725 expr = tmp = (tree) &buf;
14726 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14727 TYPE_POINTER_TO (tmp) = NULL;
14728 TYPE_REFERENCE_TO (tmp) = NULL;
14729 TYPE_NEXT_VARIANT (tmp) = NULL;
14730 if (TYPE_CACHED_VALUES_P (tmp))
14732 TYPE_CACHED_VALUES_P (tmp) = 0;
14733 TYPE_CACHED_VALUES (tmp) = NULL;
14736 md5_process_bytes (expr, tree_size (expr), ctx);
14737 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14738 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14739 if (TREE_CODE_CLASS (code) != tcc_type
14740 && TREE_CODE_CLASS (code) != tcc_declaration
14741 && code != TREE_LIST
14742 && code != SSA_NAME
14743 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14744 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14745 switch (TREE_CODE_CLASS (code))
14751 md5_process_bytes (TREE_STRING_POINTER (expr),
14752 TREE_STRING_LENGTH (expr), ctx);
14755 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14756 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14759 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14760 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14766 case tcc_exceptional:
14770 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14771 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14772 expr = TREE_CHAIN (expr);
14773 goto recursive_label;
14776 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14777 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14783 case tcc_expression:
14784 case tcc_reference:
14785 case tcc_comparison:
14788 case tcc_statement:
14790 len = TREE_OPERAND_LENGTH (expr);
14791 for (i = 0; i < len; ++i)
14792 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14794 case tcc_declaration:
14795 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14796 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14797 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14799 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14800 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14801 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14802 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14803 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14805 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14806 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14808 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14810 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14811 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14812 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14816 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14817 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14818 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14819 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14820 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14821 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14822 if (INTEGRAL_TYPE_P (expr)
14823 || SCALAR_FLOAT_TYPE_P (expr))
14825 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14826 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14828 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14829 if (TREE_CODE (expr) == RECORD_TYPE
14830 || TREE_CODE (expr) == UNION_TYPE
14831 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14832 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14833 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14840 /* Helper function for outputting the checksum of a tree T. When
14841 debugging with gdb, you can "define mynext" to be "next" followed
14842 by "call debug_fold_checksum (op0)", then just trace down till the
14845 DEBUG_FUNCTION void
14846 debug_fold_checksum (const_tree t)
14849 unsigned char checksum[16];
14850 struct md5_ctx ctx;
14851 hash_table <pointer_hash <tree_node> > ht;
14854 md5_init_ctx (&ctx);
14855 fold_checksum_tree (t, &ctx, ht);
14856 md5_finish_ctx (&ctx, checksum);
14859 for (i = 0; i < 16; i++)
14860 fprintf (stderr, "%d ", checksum[i]);
14862 fprintf (stderr, "\n");
14867 /* Fold a unary tree expression with code CODE of type TYPE with an
14868 operand OP0. LOC is the location of the resulting expression.
14869 Return a folded expression if successful. Otherwise, return a tree
14870 expression with code CODE of type TYPE with an operand OP0. */
14873 fold_build1_stat_loc (location_t loc,
14874 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14877 #ifdef ENABLE_FOLD_CHECKING
14878 unsigned char checksum_before[16], checksum_after[16];
14879 struct md5_ctx ctx;
14880 hash_table <pointer_hash <tree_node> > ht;
14883 md5_init_ctx (&ctx);
14884 fold_checksum_tree (op0, &ctx, ht);
14885 md5_finish_ctx (&ctx, checksum_before);
14889 tem = fold_unary_loc (loc, code, type, op0);
14891 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14893 #ifdef ENABLE_FOLD_CHECKING
14894 md5_init_ctx (&ctx);
14895 fold_checksum_tree (op0, &ctx, ht);
14896 md5_finish_ctx (&ctx, checksum_after);
14899 if (memcmp (checksum_before, checksum_after, 16))
14900 fold_check_failed (op0, tem);
14905 /* Fold a binary tree expression with code CODE of type TYPE with
14906 operands OP0 and OP1. LOC is the location of the resulting
14907 expression. Return a folded expression if successful. Otherwise,
14908 return a tree expression with code CODE of type TYPE with operands
14912 fold_build2_stat_loc (location_t loc,
14913 enum tree_code code, tree type, tree op0, tree op1
14917 #ifdef ENABLE_FOLD_CHECKING
14918 unsigned char checksum_before_op0[16],
14919 checksum_before_op1[16],
14920 checksum_after_op0[16],
14921 checksum_after_op1[16];
14922 struct md5_ctx ctx;
14923 hash_table <pointer_hash <tree_node> > ht;
14926 md5_init_ctx (&ctx);
14927 fold_checksum_tree (op0, &ctx, ht);
14928 md5_finish_ctx (&ctx, checksum_before_op0);
14931 md5_init_ctx (&ctx);
14932 fold_checksum_tree (op1, &ctx, ht);
14933 md5_finish_ctx (&ctx, checksum_before_op1);
14937 tem = fold_binary_loc (loc, code, type, op0, op1);
14939 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14941 #ifdef ENABLE_FOLD_CHECKING
14942 md5_init_ctx (&ctx);
14943 fold_checksum_tree (op0, &ctx, ht);
14944 md5_finish_ctx (&ctx, checksum_after_op0);
14947 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14948 fold_check_failed (op0, tem);
14950 md5_init_ctx (&ctx);
14951 fold_checksum_tree (op1, &ctx, ht);
14952 md5_finish_ctx (&ctx, checksum_after_op1);
14955 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14956 fold_check_failed (op1, tem);
14961 /* Fold a ternary tree expression with code CODE of type TYPE with
14962 operands OP0, OP1, and OP2. Return a folded expression if
14963 successful. Otherwise, return a tree expression with code CODE of
14964 type TYPE with operands OP0, OP1, and OP2. */
14967 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14968 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14971 #ifdef ENABLE_FOLD_CHECKING
14972 unsigned char checksum_before_op0[16],
14973 checksum_before_op1[16],
14974 checksum_before_op2[16],
14975 checksum_after_op0[16],
14976 checksum_after_op1[16],
14977 checksum_after_op2[16];
14978 struct md5_ctx ctx;
14979 hash_table <pointer_hash <tree_node> > ht;
14982 md5_init_ctx (&ctx);
14983 fold_checksum_tree (op0, &ctx, ht);
14984 md5_finish_ctx (&ctx, checksum_before_op0);
14987 md5_init_ctx (&ctx);
14988 fold_checksum_tree (op1, &ctx, ht);
14989 md5_finish_ctx (&ctx, checksum_before_op1);
14992 md5_init_ctx (&ctx);
14993 fold_checksum_tree (op2, &ctx, ht);
14994 md5_finish_ctx (&ctx, checksum_before_op2);
14998 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14999 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15001 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15003 #ifdef ENABLE_FOLD_CHECKING
15004 md5_init_ctx (&ctx);
15005 fold_checksum_tree (op0, &ctx, ht);
15006 md5_finish_ctx (&ctx, checksum_after_op0);
15009 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15010 fold_check_failed (op0, tem);
15012 md5_init_ctx (&ctx);
15013 fold_checksum_tree (op1, &ctx, ht);
15014 md5_finish_ctx (&ctx, checksum_after_op1);
15017 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15018 fold_check_failed (op1, tem);
15020 md5_init_ctx (&ctx);
15021 fold_checksum_tree (op2, &ctx, ht);
15022 md5_finish_ctx (&ctx, checksum_after_op2);
15025 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15026 fold_check_failed (op2, tem);
15031 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15032 arguments in ARGARRAY, and a null static chain.
15033 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15034 of type TYPE from the given operands as constructed by build_call_array. */
15037 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15038 int nargs, tree *argarray)
15041 #ifdef ENABLE_FOLD_CHECKING
15042 unsigned char checksum_before_fn[16],
15043 checksum_before_arglist[16],
15044 checksum_after_fn[16],
15045 checksum_after_arglist[16];
15046 struct md5_ctx ctx;
15047 hash_table <pointer_hash <tree_node> > ht;
15051 md5_init_ctx (&ctx);
15052 fold_checksum_tree (fn, &ctx, ht);
15053 md5_finish_ctx (&ctx, checksum_before_fn);
15056 md5_init_ctx (&ctx);
15057 for (i = 0; i < nargs; i++)
15058 fold_checksum_tree (argarray[i], &ctx, ht);
15059 md5_finish_ctx (&ctx, checksum_before_arglist);
15063 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15065 #ifdef ENABLE_FOLD_CHECKING
15066 md5_init_ctx (&ctx);
15067 fold_checksum_tree (fn, &ctx, ht);
15068 md5_finish_ctx (&ctx, checksum_after_fn);
15071 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15072 fold_check_failed (fn, tem);
15074 md5_init_ctx (&ctx);
15075 for (i = 0; i < nargs; i++)
15076 fold_checksum_tree (argarray[i], &ctx, ht);
15077 md5_finish_ctx (&ctx, checksum_after_arglist);
15080 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15081 fold_check_failed (NULL_TREE, tem);
15086 /* Perform constant folding and related simplification of initializer
15087 expression EXPR. These behave identically to "fold_buildN" but ignore
15088 potential run-time traps and exceptions that fold must preserve. */
15090 #define START_FOLD_INIT \
15091 int saved_signaling_nans = flag_signaling_nans;\
15092 int saved_trapping_math = flag_trapping_math;\
15093 int saved_rounding_math = flag_rounding_math;\
15094 int saved_trapv = flag_trapv;\
15095 int saved_folding_initializer = folding_initializer;\
15096 flag_signaling_nans = 0;\
15097 flag_trapping_math = 0;\
15098 flag_rounding_math = 0;\
15100 folding_initializer = 1;
15102 #define END_FOLD_INIT \
15103 flag_signaling_nans = saved_signaling_nans;\
15104 flag_trapping_math = saved_trapping_math;\
15105 flag_rounding_math = saved_rounding_math;\
15106 flag_trapv = saved_trapv;\
15107 folding_initializer = saved_folding_initializer;
15110 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15111 tree type, tree op)
15116 result = fold_build1_loc (loc, code, type, op);
15123 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15124 tree type, tree op0, tree op1)
15129 result = fold_build2_loc (loc, code, type, op0, op1);
15136 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15137 tree type, tree op0, tree op1, tree op2)
15142 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15149 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15150 int nargs, tree *argarray)
15155 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15161 #undef START_FOLD_INIT
15162 #undef END_FOLD_INIT
15164 /* Determine if first argument is a multiple of second argument. Return 0 if
15165 it is not, or we cannot easily determined it to be.
15167 An example of the sort of thing we care about (at this point; this routine
15168 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15169 fold cases do now) is discovering that
15171 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15177 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15179 This code also handles discovering that
15181 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15183 is a multiple of 8 so we don't have to worry about dealing with a
15184 possible remainder.
15186 Note that we *look* inside a SAVE_EXPR only to determine how it was
15187 calculated; it is not safe for fold to do much of anything else with the
15188 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15189 at run time. For example, the latter example above *cannot* be implemented
15190 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15191 evaluation time of the original SAVE_EXPR is not necessarily the same at
15192 the time the new expression is evaluated. The only optimization of this
15193 sort that would be valid is changing
15195 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15199 SAVE_EXPR (I) * SAVE_EXPR (J)
15201 (where the same SAVE_EXPR (J) is used in the original and the
15202 transformed version). */
15205 multiple_of_p (tree type, const_tree top, const_tree bottom)
15207 if (operand_equal_p (top, bottom, 0))
15210 if (TREE_CODE (type) != INTEGER_TYPE)
15213 switch (TREE_CODE (top))
15216 /* Bitwise and provides a power of two multiple. If the mask is
15217 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15218 if (!integer_pow2p (bottom))
15223 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15224 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15228 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15229 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15232 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15236 op1 = TREE_OPERAND (top, 1);
15237 /* const_binop may not detect overflow correctly,
15238 so check for it explicitly here. */
15239 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15240 > TREE_INT_CST_LOW (op1)
15241 && TREE_INT_CST_HIGH (op1) == 0
15242 && 0 != (t1 = fold_convert (type,
15243 const_binop (LSHIFT_EXPR,
15246 && !TREE_OVERFLOW (t1))
15247 return multiple_of_p (type, t1, bottom);
15252 /* Can't handle conversions from non-integral or wider integral type. */
15253 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15254 || (TYPE_PRECISION (type)
15255 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15258 /* .. fall through ... */
15261 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15264 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15265 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15268 if (TREE_CODE (bottom) != INTEGER_CST
15269 || integer_zerop (bottom)
15270 || (TYPE_UNSIGNED (type)
15271 && (tree_int_cst_sgn (top) < 0
15272 || tree_int_cst_sgn (bottom) < 0)))
15274 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15282 /* Return true if CODE or TYPE is known to be non-negative. */
15285 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15287 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15288 && truth_value_p (code))
15289 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15290 have a signed:1 type (where the value is -1 and 0). */
15295 /* Return true if (CODE OP0) is known to be non-negative. If the return
15296 value is based on the assumption that signed overflow is undefined,
15297 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15298 *STRICT_OVERFLOW_P. */
15301 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15302 bool *strict_overflow_p)
15304 if (TYPE_UNSIGNED (type))
15310 /* We can't return 1 if flag_wrapv is set because
15311 ABS_EXPR<INT_MIN> = INT_MIN. */
15312 if (!INTEGRAL_TYPE_P (type))
15314 if (TYPE_OVERFLOW_UNDEFINED (type))
15316 *strict_overflow_p = true;
15321 case NON_LVALUE_EXPR:
15323 case FIX_TRUNC_EXPR:
15324 return tree_expr_nonnegative_warnv_p (op0,
15325 strict_overflow_p);
15329 tree inner_type = TREE_TYPE (op0);
15330 tree outer_type = type;
15332 if (TREE_CODE (outer_type) == REAL_TYPE)
15334 if (TREE_CODE (inner_type) == REAL_TYPE)
15335 return tree_expr_nonnegative_warnv_p (op0,
15336 strict_overflow_p);
15337 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15339 if (TYPE_UNSIGNED (inner_type))
15341 return tree_expr_nonnegative_warnv_p (op0,
15342 strict_overflow_p);
15345 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15347 if (TREE_CODE (inner_type) == REAL_TYPE)
15348 return tree_expr_nonnegative_warnv_p (op0,
15349 strict_overflow_p);
15350 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15351 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15352 && TYPE_UNSIGNED (inner_type);
15358 return tree_simple_nonnegative_warnv_p (code, type);
15361 /* We don't know sign of `t', so be conservative and return false. */
15365 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15366 value is based on the assumption that signed overflow is undefined,
15367 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15368 *STRICT_OVERFLOW_P. */
15371 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15372 tree op1, bool *strict_overflow_p)
15374 if (TYPE_UNSIGNED (type))
15379 case POINTER_PLUS_EXPR:
15381 if (FLOAT_TYPE_P (type))
15382 return (tree_expr_nonnegative_warnv_p (op0,
15384 && tree_expr_nonnegative_warnv_p (op1,
15385 strict_overflow_p));
15387 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15388 both unsigned and at least 2 bits shorter than the result. */
15389 if (TREE_CODE (type) == INTEGER_TYPE
15390 && TREE_CODE (op0) == NOP_EXPR
15391 && TREE_CODE (op1) == NOP_EXPR)
15393 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15394 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15395 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15396 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15398 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15399 TYPE_PRECISION (inner2)) + 1;
15400 return prec < TYPE_PRECISION (type);
15406 if (FLOAT_TYPE_P (type))
15408 /* x * x for floating point x is always non-negative. */
15409 if (operand_equal_p (op0, op1, 0))
15411 return (tree_expr_nonnegative_warnv_p (op0,
15413 && tree_expr_nonnegative_warnv_p (op1,
15414 strict_overflow_p));
15417 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15418 both unsigned and their total bits is shorter than the result. */
15419 if (TREE_CODE (type) == INTEGER_TYPE
15420 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15421 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15423 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15424 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15426 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15427 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15430 bool unsigned0 = TYPE_UNSIGNED (inner0);
15431 bool unsigned1 = TYPE_UNSIGNED (inner1);
15433 if (TREE_CODE (op0) == INTEGER_CST)
15434 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15436 if (TREE_CODE (op1) == INTEGER_CST)
15437 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15439 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15440 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15442 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15443 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15444 : TYPE_PRECISION (inner0);
15446 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15447 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15448 : TYPE_PRECISION (inner1);
15450 return precision0 + precision1 < TYPE_PRECISION (type);
15457 return (tree_expr_nonnegative_warnv_p (op0,
15459 || tree_expr_nonnegative_warnv_p (op1,
15460 strict_overflow_p));
15466 case TRUNC_DIV_EXPR:
15467 case CEIL_DIV_EXPR:
15468 case FLOOR_DIV_EXPR:
15469 case ROUND_DIV_EXPR:
15470 return (tree_expr_nonnegative_warnv_p (op0,
15472 && tree_expr_nonnegative_warnv_p (op1,
15473 strict_overflow_p));
15475 case TRUNC_MOD_EXPR:
15476 case CEIL_MOD_EXPR:
15477 case FLOOR_MOD_EXPR:
15478 case ROUND_MOD_EXPR:
15479 return tree_expr_nonnegative_warnv_p (op0,
15480 strict_overflow_p);
15482 return tree_simple_nonnegative_warnv_p (code, type);
15485 /* We don't know sign of `t', so be conservative and return false. */
15489 /* Return true if T is known to be non-negative. If the return
15490 value is based on the assumption that signed overflow is undefined,
15491 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15492 *STRICT_OVERFLOW_P. */
15495 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15497 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15500 switch (TREE_CODE (t))
15503 return tree_int_cst_sgn (t) >= 0;
15506 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15509 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15512 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15514 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15515 strict_overflow_p));
15517 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15520 /* We don't know sign of `t', so be conservative and return false. */
15524 /* Return true if T is known to be non-negative. If the return
15525 value is based on the assumption that signed overflow is undefined,
15526 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15527 *STRICT_OVERFLOW_P. */
15530 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15531 tree arg0, tree arg1, bool *strict_overflow_p)
15533 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15534 switch (DECL_FUNCTION_CODE (fndecl))
15536 CASE_FLT_FN (BUILT_IN_ACOS):
15537 CASE_FLT_FN (BUILT_IN_ACOSH):
15538 CASE_FLT_FN (BUILT_IN_CABS):
15539 CASE_FLT_FN (BUILT_IN_COSH):
15540 CASE_FLT_FN (BUILT_IN_ERFC):
15541 CASE_FLT_FN (BUILT_IN_EXP):
15542 CASE_FLT_FN (BUILT_IN_EXP10):
15543 CASE_FLT_FN (BUILT_IN_EXP2):
15544 CASE_FLT_FN (BUILT_IN_FABS):
15545 CASE_FLT_FN (BUILT_IN_FDIM):
15546 CASE_FLT_FN (BUILT_IN_HYPOT):
15547 CASE_FLT_FN (BUILT_IN_POW10):
15548 CASE_INT_FN (BUILT_IN_FFS):
15549 CASE_INT_FN (BUILT_IN_PARITY):
15550 CASE_INT_FN (BUILT_IN_POPCOUNT):
15551 case BUILT_IN_BSWAP32:
15552 case BUILT_IN_BSWAP64:
15556 CASE_FLT_FN (BUILT_IN_SQRT):
15557 /* sqrt(-0.0) is -0.0. */
15558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15560 return tree_expr_nonnegative_warnv_p (arg0,
15561 strict_overflow_p);
15563 CASE_FLT_FN (BUILT_IN_ASINH):
15564 CASE_FLT_FN (BUILT_IN_ATAN):
15565 CASE_FLT_FN (BUILT_IN_ATANH):
15566 CASE_FLT_FN (BUILT_IN_CBRT):
15567 CASE_FLT_FN (BUILT_IN_CEIL):
15568 CASE_FLT_FN (BUILT_IN_ERF):
15569 CASE_FLT_FN (BUILT_IN_EXPM1):
15570 CASE_FLT_FN (BUILT_IN_FLOOR):
15571 CASE_FLT_FN (BUILT_IN_FMOD):
15572 CASE_FLT_FN (BUILT_IN_FREXP):
15573 CASE_FLT_FN (BUILT_IN_ICEIL):
15574 CASE_FLT_FN (BUILT_IN_IFLOOR):
15575 CASE_FLT_FN (BUILT_IN_IRINT):
15576 CASE_FLT_FN (BUILT_IN_IROUND):
15577 CASE_FLT_FN (BUILT_IN_LCEIL):
15578 CASE_FLT_FN (BUILT_IN_LDEXP):
15579 CASE_FLT_FN (BUILT_IN_LFLOOR):
15580 CASE_FLT_FN (BUILT_IN_LLCEIL):
15581 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15582 CASE_FLT_FN (BUILT_IN_LLRINT):
15583 CASE_FLT_FN (BUILT_IN_LLROUND):
15584 CASE_FLT_FN (BUILT_IN_LRINT):
15585 CASE_FLT_FN (BUILT_IN_LROUND):
15586 CASE_FLT_FN (BUILT_IN_MODF):
15587 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15588 CASE_FLT_FN (BUILT_IN_RINT):
15589 CASE_FLT_FN (BUILT_IN_ROUND):
15590 CASE_FLT_FN (BUILT_IN_SCALB):
15591 CASE_FLT_FN (BUILT_IN_SCALBLN):
15592 CASE_FLT_FN (BUILT_IN_SCALBN):
15593 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15594 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15595 CASE_FLT_FN (BUILT_IN_SINH):
15596 CASE_FLT_FN (BUILT_IN_TANH):
15597 CASE_FLT_FN (BUILT_IN_TRUNC):
15598 /* True if the 1st argument is nonnegative. */
15599 return tree_expr_nonnegative_warnv_p (arg0,
15600 strict_overflow_p);
15602 CASE_FLT_FN (BUILT_IN_FMAX):
15603 /* True if the 1st OR 2nd arguments are nonnegative. */
15604 return (tree_expr_nonnegative_warnv_p (arg0,
15606 || (tree_expr_nonnegative_warnv_p (arg1,
15607 strict_overflow_p)));
15609 CASE_FLT_FN (BUILT_IN_FMIN):
15610 /* True if the 1st AND 2nd arguments are nonnegative. */
15611 return (tree_expr_nonnegative_warnv_p (arg0,
15613 && (tree_expr_nonnegative_warnv_p (arg1,
15614 strict_overflow_p)));
15616 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15617 /* True if the 2nd argument is nonnegative. */
15618 return tree_expr_nonnegative_warnv_p (arg1,
15619 strict_overflow_p);
15621 CASE_FLT_FN (BUILT_IN_POWI):
15622 /* True if the 1st argument is nonnegative or the second
15623 argument is an even integer. */
15624 if (TREE_CODE (arg1) == INTEGER_CST
15625 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15627 return tree_expr_nonnegative_warnv_p (arg0,
15628 strict_overflow_p);
15630 CASE_FLT_FN (BUILT_IN_POW):
15631 /* True if the 1st argument is nonnegative or the second
15632 argument is an even integer valued real. */
15633 if (TREE_CODE (arg1) == REAL_CST)
15638 c = TREE_REAL_CST (arg1);
15639 n = real_to_integer (&c);
15642 REAL_VALUE_TYPE cint;
15643 real_from_integer (&cint, VOIDmode, n,
15644 n < 0 ? -1 : 0, 0);
15645 if (real_identical (&c, &cint))
15649 return tree_expr_nonnegative_warnv_p (arg0,
15650 strict_overflow_p);
15655 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15659 /* Return true if T is known to be non-negative. If the return
15660 value is based on the assumption that signed overflow is undefined,
15661 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15662 *STRICT_OVERFLOW_P. */
15665 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15667 enum tree_code code = TREE_CODE (t);
15668 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15675 tree temp = TARGET_EXPR_SLOT (t);
15676 t = TARGET_EXPR_INITIAL (t);
15678 /* If the initializer is non-void, then it's a normal expression
15679 that will be assigned to the slot. */
15680 if (!VOID_TYPE_P (t))
15681 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15683 /* Otherwise, the initializer sets the slot in some way. One common
15684 way is an assignment statement at the end of the initializer. */
15687 if (TREE_CODE (t) == BIND_EXPR)
15688 t = expr_last (BIND_EXPR_BODY (t));
15689 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15690 || TREE_CODE (t) == TRY_CATCH_EXPR)
15691 t = expr_last (TREE_OPERAND (t, 0));
15692 else if (TREE_CODE (t) == STATEMENT_LIST)
15697 if (TREE_CODE (t) == MODIFY_EXPR
15698 && TREE_OPERAND (t, 0) == temp)
15699 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15700 strict_overflow_p);
15707 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15708 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15710 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15711 get_callee_fndecl (t),
15714 strict_overflow_p);
15716 case COMPOUND_EXPR:
15718 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15719 strict_overflow_p);
15721 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15722 strict_overflow_p);
15724 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15725 strict_overflow_p);
15728 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15732 /* We don't know sign of `t', so be conservative and return false. */
15736 /* Return true if T is known to be non-negative. If the return
15737 value is based on the assumption that signed overflow is undefined,
15738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15739 *STRICT_OVERFLOW_P. */
15742 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15744 enum tree_code code;
15745 if (t == error_mark_node)
15748 code = TREE_CODE (t);
15749 switch (TREE_CODE_CLASS (code))
15752 case tcc_comparison:
15753 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15755 TREE_OPERAND (t, 0),
15756 TREE_OPERAND (t, 1),
15757 strict_overflow_p);
15760 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15762 TREE_OPERAND (t, 0),
15763 strict_overflow_p);
15766 case tcc_declaration:
15767 case tcc_reference:
15768 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15776 case TRUTH_AND_EXPR:
15777 case TRUTH_OR_EXPR:
15778 case TRUTH_XOR_EXPR:
15779 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15781 TREE_OPERAND (t, 0),
15782 TREE_OPERAND (t, 1),
15783 strict_overflow_p);
15784 case TRUTH_NOT_EXPR:
15785 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15787 TREE_OPERAND (t, 0),
15788 strict_overflow_p);
15795 case WITH_SIZE_EXPR:
15797 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15800 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15804 /* Return true if `t' is known to be non-negative. Handle warnings
15805 about undefined signed overflow. */
15808 tree_expr_nonnegative_p (tree t)
15810 bool ret, strict_overflow_p;
15812 strict_overflow_p = false;
15813 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15814 if (strict_overflow_p)
15815 fold_overflow_warning (("assuming signed overflow does not occur when "
15816 "determining that expression is always "
15818 WARN_STRICT_OVERFLOW_MISC);
15823 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15824 For floating point we further ensure that T is not denormal.
15825 Similar logic is present in nonzero_address in rtlanal.h.
15827 If the return value is based on the assumption that signed overflow
15828 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15829 change *STRICT_OVERFLOW_P. */
15832 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15833 bool *strict_overflow_p)
15838 return tree_expr_nonzero_warnv_p (op0,
15839 strict_overflow_p);
15843 tree inner_type = TREE_TYPE (op0);
15844 tree outer_type = type;
15846 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15847 && tree_expr_nonzero_warnv_p (op0,
15848 strict_overflow_p));
15852 case NON_LVALUE_EXPR:
15853 return tree_expr_nonzero_warnv_p (op0,
15854 strict_overflow_p);
15863 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15864 For floating point we further ensure that T is not denormal.
15865 Similar logic is present in nonzero_address in rtlanal.h.
15867 If the return value is based on the assumption that signed overflow
15868 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15869 change *STRICT_OVERFLOW_P. */
15872 tree_binary_nonzero_warnv_p (enum tree_code code,
15875 tree op1, bool *strict_overflow_p)
15877 bool sub_strict_overflow_p;
15880 case POINTER_PLUS_EXPR:
15882 if (TYPE_OVERFLOW_UNDEFINED (type))
15884 /* With the presence of negative values it is hard
15885 to say something. */
15886 sub_strict_overflow_p = false;
15887 if (!tree_expr_nonnegative_warnv_p (op0,
15888 &sub_strict_overflow_p)
15889 || !tree_expr_nonnegative_warnv_p (op1,
15890 &sub_strict_overflow_p))
15892 /* One of operands must be positive and the other non-negative. */
15893 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15894 overflows, on a twos-complement machine the sum of two
15895 nonnegative numbers can never be zero. */
15896 return (tree_expr_nonzero_warnv_p (op0,
15898 || tree_expr_nonzero_warnv_p (op1,
15899 strict_overflow_p));
15904 if (TYPE_OVERFLOW_UNDEFINED (type))
15906 if (tree_expr_nonzero_warnv_p (op0,
15908 && tree_expr_nonzero_warnv_p (op1,
15909 strict_overflow_p))
15911 *strict_overflow_p = true;
15918 sub_strict_overflow_p = false;
15919 if (tree_expr_nonzero_warnv_p (op0,
15920 &sub_strict_overflow_p)
15921 && tree_expr_nonzero_warnv_p (op1,
15922 &sub_strict_overflow_p))
15924 if (sub_strict_overflow_p)
15925 *strict_overflow_p = true;
15930 sub_strict_overflow_p = false;
15931 if (tree_expr_nonzero_warnv_p (op0,
15932 &sub_strict_overflow_p))
15934 if (sub_strict_overflow_p)
15935 *strict_overflow_p = true;
15937 /* When both operands are nonzero, then MAX must be too. */
15938 if (tree_expr_nonzero_warnv_p (op1,
15939 strict_overflow_p))
15942 /* MAX where operand 0 is positive is positive. */
15943 return tree_expr_nonnegative_warnv_p (op0,
15944 strict_overflow_p);
15946 /* MAX where operand 1 is positive is positive. */
15947 else if (tree_expr_nonzero_warnv_p (op1,
15948 &sub_strict_overflow_p)
15949 && tree_expr_nonnegative_warnv_p (op1,
15950 &sub_strict_overflow_p))
15952 if (sub_strict_overflow_p)
15953 *strict_overflow_p = true;
15959 return (tree_expr_nonzero_warnv_p (op1,
15961 || tree_expr_nonzero_warnv_p (op0,
15962 strict_overflow_p));
15971 /* Return true when T is an address and is known to be nonzero.
15972 For floating point we further ensure that T is not denormal.
15973 Similar logic is present in nonzero_address in rtlanal.h.
15975 If the return value is based on the assumption that signed overflow
15976 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15977 change *STRICT_OVERFLOW_P. */
15980 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15982 bool sub_strict_overflow_p;
15983 switch (TREE_CODE (t))
15986 return !integer_zerop (t);
15990 tree base = TREE_OPERAND (t, 0);
15991 if (!DECL_P (base))
15992 base = get_base_address (base);
15997 /* Weak declarations may link to NULL. Other things may also be NULL
15998 so protect with -fdelete-null-pointer-checks; but not variables
15999 allocated on the stack. */
16001 && (flag_delete_null_pointer_checks
16002 || (DECL_CONTEXT (base)
16003 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16004 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16005 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16007 /* Constants are never weak. */
16008 if (CONSTANT_CLASS_P (base))
16015 sub_strict_overflow_p = false;
16016 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16017 &sub_strict_overflow_p)
16018 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16019 &sub_strict_overflow_p))
16021 if (sub_strict_overflow_p)
16022 *strict_overflow_p = true;
16033 /* Return true when T is an address and is known to be nonzero.
16034 For floating point we further ensure that T is not denormal.
16035 Similar logic is present in nonzero_address in rtlanal.h.
16037 If the return value is based on the assumption that signed overflow
16038 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16039 change *STRICT_OVERFLOW_P. */
16042 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16044 tree type = TREE_TYPE (t);
16045 enum tree_code code;
16047 /* Doing something useful for floating point would need more work. */
16048 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16051 code = TREE_CODE (t);
16052 switch (TREE_CODE_CLASS (code))
16055 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16056 strict_overflow_p);
16058 case tcc_comparison:
16059 return tree_binary_nonzero_warnv_p (code, type,
16060 TREE_OPERAND (t, 0),
16061 TREE_OPERAND (t, 1),
16062 strict_overflow_p);
16064 case tcc_declaration:
16065 case tcc_reference:
16066 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16074 case TRUTH_NOT_EXPR:
16075 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16076 strict_overflow_p);
16078 case TRUTH_AND_EXPR:
16079 case TRUTH_OR_EXPR:
16080 case TRUTH_XOR_EXPR:
16081 return tree_binary_nonzero_warnv_p (code, type,
16082 TREE_OPERAND (t, 0),
16083 TREE_OPERAND (t, 1),
16084 strict_overflow_p);
16091 case WITH_SIZE_EXPR:
16093 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16095 case COMPOUND_EXPR:
16098 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16099 strict_overflow_p);
16102 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16103 strict_overflow_p);
16106 return alloca_call_p (t);
16114 /* Return true when T is an address and is known to be nonzero.
16115 Handle warnings about undefined signed overflow. */
16118 tree_expr_nonzero_p (tree t)
16120 bool ret, strict_overflow_p;
16122 strict_overflow_p = false;
16123 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16124 if (strict_overflow_p)
16125 fold_overflow_warning (("assuming signed overflow does not occur when "
16126 "determining that expression is always "
16128 WARN_STRICT_OVERFLOW_MISC);
16132 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16133 attempt to fold the expression to a constant without modifying TYPE,
16136 If the expression could be simplified to a constant, then return
16137 the constant. If the expression would not be simplified to a
16138 constant, then return NULL_TREE. */
16141 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16143 tree tem = fold_binary (code, type, op0, op1);
16144 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16147 /* Given the components of a unary expression CODE, TYPE and OP0,
16148 attempt to fold the expression to a constant without modifying
16151 If the expression could be simplified to a constant, then return
16152 the constant. If the expression would not be simplified to a
16153 constant, then return NULL_TREE. */
16156 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16158 tree tem = fold_unary (code, type, op0);
16159 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16162 /* If EXP represents referencing an element in a constant string
16163 (either via pointer arithmetic or array indexing), return the
16164 tree representing the value accessed, otherwise return NULL. */
16167 fold_read_from_constant_string (tree exp)
16169 if ((TREE_CODE (exp) == INDIRECT_REF
16170 || TREE_CODE (exp) == ARRAY_REF)
16171 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16173 tree exp1 = TREE_OPERAND (exp, 0);
16176 location_t loc = EXPR_LOCATION (exp);
16178 if (TREE_CODE (exp) == INDIRECT_REF)
16179 string = string_constant (exp1, &index);
16182 tree low_bound = array_ref_low_bound (exp);
16183 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16185 /* Optimize the special-case of a zero lower bound.
16187 We convert the low_bound to sizetype to avoid some problems
16188 with constant folding. (E.g. suppose the lower bound is 1,
16189 and its mode is QI. Without the conversion,l (ARRAY
16190 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16191 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16192 if (! integer_zerop (low_bound))
16193 index = size_diffop_loc (loc, index,
16194 fold_convert_loc (loc, sizetype, low_bound));
16200 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16201 && TREE_CODE (string) == STRING_CST
16202 && TREE_CODE (index) == INTEGER_CST
16203 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16204 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16206 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16207 return build_int_cst_type (TREE_TYPE (exp),
16208 (TREE_STRING_POINTER (string)
16209 [TREE_INT_CST_LOW (index)]));
16214 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16215 an integer constant, real, or fixed-point constant.
16217 TYPE is the type of the result. */
16220 fold_negate_const (tree arg0, tree type)
16222 tree t = NULL_TREE;
16224 switch (TREE_CODE (arg0))
16228 double_int val = tree_to_double_int (arg0);
16230 val = val.neg_with_overflow (&overflow);
16231 t = force_fit_type_double (type, val, 1,
16232 (overflow | TREE_OVERFLOW (arg0))
16233 && !TYPE_UNSIGNED (type));
16238 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16243 FIXED_VALUE_TYPE f;
16244 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16245 &(TREE_FIXED_CST (arg0)), NULL,
16246 TYPE_SATURATING (type));
16247 t = build_fixed (type, f);
16248 /* Propagate overflow flags. */
16249 if (overflow_p | TREE_OVERFLOW (arg0))
16250 TREE_OVERFLOW (t) = 1;
16255 gcc_unreachable ();
16261 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16262 an integer constant or real constant.
16264 TYPE is the type of the result. */
16267 fold_abs_const (tree arg0, tree type)
16269 tree t = NULL_TREE;
16271 switch (TREE_CODE (arg0))
16275 double_int val = tree_to_double_int (arg0);
16277 /* If the value is unsigned or non-negative, then the absolute value
16278 is the same as the ordinary value. */
16279 if (TYPE_UNSIGNED (type)
16280 || !val.is_negative ())
16283 /* If the value is negative, then the absolute value is
16288 val = val.neg_with_overflow (&overflow);
16289 t = force_fit_type_double (type, val, -1,
16290 overflow | TREE_OVERFLOW (arg0));
16296 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16297 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16303 gcc_unreachable ();
16309 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16310 constant. TYPE is the type of the result. */
16313 fold_not_const (const_tree arg0, tree type)
16317 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16319 val = ~tree_to_double_int (arg0);
16320 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16323 /* Given CODE, a relational operator, the target type, TYPE and two
16324 constant operands OP0 and OP1, return the result of the
16325 relational operation. If the result is not a compile time
16326 constant, then return NULL_TREE. */
16329 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16331 int result, invert;
16333 /* From here on, the only cases we handle are when the result is
16334 known to be a constant. */
16336 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16338 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16339 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16341 /* Handle the cases where either operand is a NaN. */
16342 if (real_isnan (c0) || real_isnan (c1))
16352 case UNORDERED_EXPR:
16366 if (flag_trapping_math)
16372 gcc_unreachable ();
16375 return constant_boolean_node (result, type);
16378 return constant_boolean_node (real_compare (code, c0, c1), type);
16381 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16383 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16384 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16385 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16388 /* Handle equality/inequality of complex constants. */
16389 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16391 tree rcond = fold_relational_const (code, type,
16392 TREE_REALPART (op0),
16393 TREE_REALPART (op1));
16394 tree icond = fold_relational_const (code, type,
16395 TREE_IMAGPART (op0),
16396 TREE_IMAGPART (op1));
16397 if (code == EQ_EXPR)
16398 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16399 else if (code == NE_EXPR)
16400 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16405 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16407 unsigned count = VECTOR_CST_NELTS (op0);
16408 tree *elts = XALLOCAVEC (tree, count);
16409 gcc_assert (VECTOR_CST_NELTS (op1) == count
16410 && TYPE_VECTOR_SUBPARTS (type) == count);
16412 for (unsigned i = 0; i < count; i++)
16414 tree elem_type = TREE_TYPE (type);
16415 tree elem0 = VECTOR_CST_ELT (op0, i);
16416 tree elem1 = VECTOR_CST_ELT (op1, i);
16418 tree tem = fold_relational_const (code, elem_type,
16421 if (tem == NULL_TREE)
16424 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16427 return build_vector (type, elts);
16430 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16432 To compute GT, swap the arguments and do LT.
16433 To compute GE, do LT and invert the result.
16434 To compute LE, swap the arguments, do LT and invert the result.
16435 To compute NE, do EQ and invert the result.
16437 Therefore, the code below must handle only EQ and LT. */
16439 if (code == LE_EXPR || code == GT_EXPR)
16444 code = swap_tree_comparison (code);
16447 /* Note that it is safe to invert for real values here because we
16448 have already handled the one case that it matters. */
16451 if (code == NE_EXPR || code == GE_EXPR)
16454 code = invert_tree_comparison (code, false);
16457 /* Compute a result for LT or EQ if args permit;
16458 Otherwise return T. */
16459 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16461 if (code == EQ_EXPR)
16462 result = tree_int_cst_equal (op0, op1);
16463 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16464 result = INT_CST_LT_UNSIGNED (op0, op1);
16466 result = INT_CST_LT (op0, op1);
16473 return constant_boolean_node (result, type);
16476 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16477 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16481 fold_build_cleanup_point_expr (tree type, tree expr)
16483 /* If the expression does not have side effects then we don't have to wrap
16484 it with a cleanup point expression. */
16485 if (!TREE_SIDE_EFFECTS (expr))
16488 /* If the expression is a return, check to see if the expression inside the
16489 return has no side effects or the right hand side of the modify expression
16490 inside the return. If either don't have side effects set we don't need to
16491 wrap the expression in a cleanup point expression. Note we don't check the
16492 left hand side of the modify because it should always be a return decl. */
16493 if (TREE_CODE (expr) == RETURN_EXPR)
16495 tree op = TREE_OPERAND (expr, 0);
16496 if (!op || !TREE_SIDE_EFFECTS (op))
16498 op = TREE_OPERAND (op, 1);
16499 if (!TREE_SIDE_EFFECTS (op))
16503 return build1 (CLEANUP_POINT_EXPR, type, expr);
16506 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16507 of an indirection through OP0, or NULL_TREE if no simplification is
16511 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16517 subtype = TREE_TYPE (sub);
16518 if (!POINTER_TYPE_P (subtype))
16521 if (TREE_CODE (sub) == ADDR_EXPR)
16523 tree op = TREE_OPERAND (sub, 0);
16524 tree optype = TREE_TYPE (op);
16525 /* *&CONST_DECL -> to the value of the const decl. */
16526 if (TREE_CODE (op) == CONST_DECL)
16527 return DECL_INITIAL (op);
16528 /* *&p => p; make sure to handle *&"str"[cst] here. */
16529 if (type == optype)
16531 tree fop = fold_read_from_constant_string (op);
16537 /* *(foo *)&fooarray => fooarray[0] */
16538 else if (TREE_CODE (optype) == ARRAY_TYPE
16539 && type == TREE_TYPE (optype)
16540 && (!in_gimple_form
16541 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16543 tree type_domain = TYPE_DOMAIN (optype);
16544 tree min_val = size_zero_node;
16545 if (type_domain && TYPE_MIN_VALUE (type_domain))
16546 min_val = TYPE_MIN_VALUE (type_domain);
16548 && TREE_CODE (min_val) != INTEGER_CST)
16550 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16551 NULL_TREE, NULL_TREE);
16553 /* *(foo *)&complexfoo => __real__ complexfoo */
16554 else if (TREE_CODE (optype) == COMPLEX_TYPE
16555 && type == TREE_TYPE (optype))
16556 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16557 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16558 else if (TREE_CODE (optype) == VECTOR_TYPE
16559 && type == TREE_TYPE (optype))
16561 tree part_width = TYPE_SIZE (type);
16562 tree index = bitsize_int (0);
16563 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16567 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16568 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16570 tree op00 = TREE_OPERAND (sub, 0);
16571 tree op01 = TREE_OPERAND (sub, 1);
16574 if (TREE_CODE (op00) == ADDR_EXPR)
16577 op00 = TREE_OPERAND (op00, 0);
16578 op00type = TREE_TYPE (op00);
16580 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16581 if (TREE_CODE (op00type) == VECTOR_TYPE
16582 && type == TREE_TYPE (op00type))
16584 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16585 tree part_width = TYPE_SIZE (type);
16586 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16587 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16588 tree index = bitsize_int (indexi);
16590 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16591 return fold_build3_loc (loc,
16592 BIT_FIELD_REF, type, op00,
16593 part_width, index);
16596 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16597 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16598 && type == TREE_TYPE (op00type))
16600 tree size = TYPE_SIZE_UNIT (type);
16601 if (tree_int_cst_equal (size, op01))
16602 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16604 /* ((foo *)&fooarray)[1] => fooarray[1] */
16605 else if (TREE_CODE (op00type) == ARRAY_TYPE
16606 && type == TREE_TYPE (op00type))
16608 tree type_domain = TYPE_DOMAIN (op00type);
16609 tree min_val = size_zero_node;
16610 if (type_domain && TYPE_MIN_VALUE (type_domain))
16611 min_val = TYPE_MIN_VALUE (type_domain);
16612 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16613 TYPE_SIZE_UNIT (type));
16614 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16615 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16616 NULL_TREE, NULL_TREE);
16621 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16622 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16623 && type == TREE_TYPE (TREE_TYPE (subtype))
16624 && (!in_gimple_form
16625 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16628 tree min_val = size_zero_node;
16629 sub = build_fold_indirect_ref_loc (loc, sub);
16630 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16631 if (type_domain && TYPE_MIN_VALUE (type_domain))
16632 min_val = TYPE_MIN_VALUE (type_domain);
16634 && TREE_CODE (min_val) != INTEGER_CST)
16636 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16643 /* Builds an expression for an indirection through T, simplifying some
16647 build_fold_indirect_ref_loc (location_t loc, tree t)
16649 tree type = TREE_TYPE (TREE_TYPE (t));
16650 tree sub = fold_indirect_ref_1 (loc, type, t);
16655 return build1_loc (loc, INDIRECT_REF, type, t);
16658 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16661 fold_indirect_ref_loc (location_t loc, tree t)
16663 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16671 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16672 whose result is ignored. The type of the returned tree need not be
16673 the same as the original expression. */
16676 fold_ignored_result (tree t)
16678 if (!TREE_SIDE_EFFECTS (t))
16679 return integer_zero_node;
16682 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16685 t = TREE_OPERAND (t, 0);
16689 case tcc_comparison:
16690 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16691 t = TREE_OPERAND (t, 0);
16692 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16693 t = TREE_OPERAND (t, 1);
16698 case tcc_expression:
16699 switch (TREE_CODE (t))
16701 case COMPOUND_EXPR:
16702 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16704 t = TREE_OPERAND (t, 0);
16708 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16709 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16711 t = TREE_OPERAND (t, 0);
16724 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16725 This can only be applied to objects of a sizetype. */
16728 round_up_loc (location_t loc, tree value, int divisor)
16730 tree div = NULL_TREE;
16732 gcc_assert (divisor > 0);
16736 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16737 have to do anything. Only do this when we are not given a const,
16738 because in that case, this check is more expensive than just
16740 if (TREE_CODE (value) != INTEGER_CST)
16742 div = build_int_cst (TREE_TYPE (value), divisor);
16744 if (multiple_of_p (TREE_TYPE (value), value, div))
16748 /* If divisor is a power of two, simplify this to bit manipulation. */
16749 if (divisor == (divisor & -divisor))
16751 if (TREE_CODE (value) == INTEGER_CST)
16753 double_int val = tree_to_double_int (value);
16756 if ((val.low & (divisor - 1)) == 0)
16759 overflow_p = TREE_OVERFLOW (value);
16760 val.low &= ~(divisor - 1);
16761 val.low += divisor;
16769 return force_fit_type_double (TREE_TYPE (value), val,
16776 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16777 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16778 t = build_int_cst (TREE_TYPE (value), -divisor);
16779 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16785 div = build_int_cst (TREE_TYPE (value), divisor);
16786 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16787 value = size_binop_loc (loc, MULT_EXPR, value, div);
16793 /* Likewise, but round down. */
16796 round_down_loc (location_t loc, tree value, int divisor)
16798 tree div = NULL_TREE;
16800 gcc_assert (divisor > 0);
16804 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16805 have to do anything. Only do this when we are not given a const,
16806 because in that case, this check is more expensive than just
16808 if (TREE_CODE (value) != INTEGER_CST)
16810 div = build_int_cst (TREE_TYPE (value), divisor);
16812 if (multiple_of_p (TREE_TYPE (value), value, div))
16816 /* If divisor is a power of two, simplify this to bit manipulation. */
16817 if (divisor == (divisor & -divisor))
16821 t = build_int_cst (TREE_TYPE (value), -divisor);
16822 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16827 div = build_int_cst (TREE_TYPE (value), divisor);
16828 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16829 value = size_binop_loc (loc, MULT_EXPR, value, div);
16835 /* Returns the pointer to the base of the object addressed by EXP and
16836 extracts the information about the offset of the access, storing it
16837 to PBITPOS and POFFSET. */
16840 split_address_to_core_and_offset (tree exp,
16841 HOST_WIDE_INT *pbitpos, tree *poffset)
16844 enum machine_mode mode;
16845 int unsignedp, volatilep;
16846 HOST_WIDE_INT bitsize;
16847 location_t loc = EXPR_LOCATION (exp);
16849 if (TREE_CODE (exp) == ADDR_EXPR)
16851 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16852 poffset, &mode, &unsignedp, &volatilep,
16854 core = build_fold_addr_expr_loc (loc, core);
16860 *poffset = NULL_TREE;
16866 /* Returns true if addresses of E1 and E2 differ by a constant, false
16867 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16870 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16873 HOST_WIDE_INT bitpos1, bitpos2;
16874 tree toffset1, toffset2, tdiff, type;
16876 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16877 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16879 if (bitpos1 % BITS_PER_UNIT != 0
16880 || bitpos2 % BITS_PER_UNIT != 0
16881 || !operand_equal_p (core1, core2, 0))
16884 if (toffset1 && toffset2)
16886 type = TREE_TYPE (toffset1);
16887 if (type != TREE_TYPE (toffset2))
16888 toffset2 = fold_convert (type, toffset2);
16890 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16891 if (!cst_and_fits_in_hwi (tdiff))
16894 *diff = int_cst_value (tdiff);
16896 else if (toffset1 || toffset2)
16898 /* If only one of the offsets is non-constant, the difference cannot
16905 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16909 /* Simplify the floating point expression EXP when the sign of the
16910 result is not significant. Return NULL_TREE if no simplification
16914 fold_strip_sign_ops (tree exp)
16917 location_t loc = EXPR_LOCATION (exp);
16919 switch (TREE_CODE (exp))
16923 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16924 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16928 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16930 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16931 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16932 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16933 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16934 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16935 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16938 case COMPOUND_EXPR:
16939 arg0 = TREE_OPERAND (exp, 0);
16940 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16942 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16946 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16947 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16949 return fold_build3_loc (loc,
16950 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16951 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16952 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16957 const enum built_in_function fcode = builtin_mathfn_code (exp);
16960 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16961 /* Strip copysign function call, return the 1st argument. */
16962 arg0 = CALL_EXPR_ARG (exp, 0);
16963 arg1 = CALL_EXPR_ARG (exp, 1);
16964 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16967 /* Strip sign ops from the argument of "odd" math functions. */
16968 if (negate_mathfn_p (fcode))
16970 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16972 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);