1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
82 #include "tree-ssa-operands.h"
83 #include "tree-into-ssa.h"
85 #ifndef LOAD_EXTEND_OP
86 #define LOAD_EXTEND_OP(M) UNKNOWN
89 /* Nonzero if we are folding constants inside an initializer; zero
91 int folding_initializer = 0;
93 /* The following constants represent a bit based encoding of GCC's
94 comparison operators. This encoding simplifies transformations
95 on relational comparison operators, such as AND and OR. */
96 enum comparison_code {
115 static bool negate_expr_p (tree);
116 static tree negate_expr (tree);
117 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
118 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
148 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
149 static bool reorder_operands_p (const_tree, const_tree);
150 static tree fold_negate_const (tree, tree);
151 static tree fold_not_const (const_tree, tree);
152 static tree fold_relational_const (enum tree_code, tree, tree, tree);
153 static tree fold_convert_const (enum tree_code, tree, tree);
154 static tree fold_view_convert_expr (tree, tree);
155 static bool vec_cst_ctor_to_array (tree, tree *);
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
162 expr_location_or (tree t, location_t loc)
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
172 protected_set_expr_location_unshare (tree x, location_t loc)
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
181 SET_EXPR_LOCATION (x, loc);
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
266 if (gimple_no_warning_p (stmt))
269 /* Use the smallest code level when deciding to issue the
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
278 locus = input_location;
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t)
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
379 return !wi::only_sign_bit_p (t);
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
386 negate_expr_p (tree t)
393 type = TREE_TYPE (t);
396 switch (TREE_CODE (t))
399 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
412 return !TYPE_OVERFLOW_SANITIZED (type);
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
442 return negate_expr_p (TREE_OPERAND (t, 0));
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type)))
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
459 && !HONOR_SIGNED_ZEROS (element_mode (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return negate_expr_p (TREE_OPERAND (t, 1));
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type) == REAL_TYPE)
506 tree tem = strip_float_extensions (t);
508 return negate_expr_p (tem);
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t)))
515 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
522 tree op1 = TREE_OPERAND (t, 1);
523 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
540 fold_negate_expr (location_t loc, tree t)
542 tree type = TREE_TYPE (t);
545 switch (TREE_CODE (t))
547 /* Convert - (~A) to A + 1. */
549 if (INTEGRAL_TYPE_P (type))
550 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
551 build_one_cst (type));
555 tem = fold_negate_const (t, type);
556 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
557 || (ANY_INTEGRAL_TYPE_P (type)
558 && !TYPE_OVERFLOW_TRAPS (type)
559 && TYPE_OVERFLOW_WRAPS (type))
560 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
565 tem = fold_negate_const (t, type);
569 tem = fold_negate_const (t, type);
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
577 return build_complex (type, rpart, ipart);
583 int count = TYPE_VECTOR_SUBPARTS (type), i;
584 tree *elts = XALLOCAVEC (tree, count);
586 for (i = 0; i < count; i++)
588 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elts[i] == NULL_TREE)
593 return build_vector (type, elts);
597 if (negate_expr_p (t))
598 return fold_build2_loc (loc, COMPLEX_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
600 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
604 if (negate_expr_p (t))
605 return fold_build1_loc (loc, CONJ_EXPR, type,
606 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
610 if (!TYPE_OVERFLOW_SANITIZED (type))
611 return TREE_OPERAND (t, 0);
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
616 && !HONOR_SIGNED_ZEROS (element_mode (type)))
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
641 && !HONOR_SIGNED_ZEROS (element_mode (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 if (TYPE_UNSIGNED (type))
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
770 tem = fold_negate_expr (loc, t);
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
847 var = op1, neg_var_p = neg1_p;
849 /* Now do any needed negations. */
851 *minus_litp = *litp, *litp = 0;
853 *conp = negate_expr (*conp);
855 var = negate_expr (var);
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
864 else if (TREE_CONSTANT (in))
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
901 if (code == PLUS_EXPR)
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
916 else if (code == MINUS_EXPR)
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
979 res = wi::bit_or (arg1, arg2);
983 res = wi::bit_xor (arg1, arg2);
987 res = wi::bit_and (arg1, arg2);
992 if (wi::neg_p (arg2))
995 if (code == RSHIFT_EXPR)
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1007 res = wi::lshift (arg1, arg2);
1012 if (wi::neg_p (arg2))
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1018 code = RROTATE_EXPR;
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1024 res = wi::lrotate (arg1, arg2);
1028 res = wi::add (arg1, arg2, sign, &overflow);
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1050 case FLOOR_DIV_EXPR:
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1062 case ROUND_DIV_EXPR:
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1068 case TRUNC_MOD_EXPR:
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1074 case FLOOR_MOD_EXPR:
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1086 case ROUND_MOD_EXPR:
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 res = wi::min (arg1, arg2, sign);
1097 res = wi::max (arg1, arg2, sign);
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1126 /* Sanity check for the recursive cases. */
1133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1135 if (code == POINTER_PLUS_EXPR)
1136 return int_const_binop (PLUS_EXPR,
1137 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1139 return int_const_binop (code, arg1, arg2);
1142 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1152 /* The following codes are handled by real_arithmetic. */
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code == RDIV_EXPR
1182 && real_equal (&d2, &dconst0)
1183 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1))
1190 else if (REAL_VALUE_ISNAN (d2))
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1214 t = build_real (type, result);
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1220 if (TREE_CODE (arg1) == FIXED_CST)
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1229 /* The following codes are handled by fixed_arithmetic. */
1235 case TRUNC_DIV_EXPR:
1236 if (TREE_CODE (arg2) != FIXED_CST)
1238 f2 = TREE_FIXED_CST (arg2);
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1247 f2.data.high = w2.elt (1);
1248 f2.data.low = w2.elt (0);
1257 f1 = TREE_FIXED_CST (arg1);
1258 type = TREE_TYPE (arg1);
1259 sat_p = TYPE_SATURATING (type);
1260 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1261 t = build_fixed (type, result);
1262 /* Propagate overflow flags. */
1263 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1264 TREE_OVERFLOW (t) = 1;
1268 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1281 real = const_binop (code, r1, r2);
1282 imag = const_binop (code, i1, i2);
1286 if (COMPLEX_FLOAT_TYPE_P (type))
1287 return do_mpc_arg2 (arg1, arg2, type,
1288 /* do_nonfinite= */ folding_initializer,
1291 real = const_binop (MINUS_EXPR,
1292 const_binop (MULT_EXPR, r1, r2),
1293 const_binop (MULT_EXPR, i1, i2));
1294 imag = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r1, i2),
1296 const_binop (MULT_EXPR, i1, r2));
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1305 case TRUNC_DIV_EXPR:
1307 case FLOOR_DIV_EXPR:
1308 case ROUND_DIV_EXPR:
1309 if (flag_complex_method == 0)
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r2, r2),
1321 const_binop (MULT_EXPR, i2, i2));
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r1, r2),
1325 const_binop (MULT_EXPR, i1, i2));
1327 = const_binop (MINUS_EXPR,
1328 const_binop (MULT_EXPR, i1, r2),
1329 const_binop (MULT_EXPR, r1, i2));
1331 real = const_binop (code, t1, magsquared);
1332 imag = const_binop (code, t2, magsquared);
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1342 fold_abs_const (r2, TREE_TYPE (type)),
1343 fold_abs_const (i2, TREE_TYPE (type)));
1345 if (integer_nonzerop (compare))
1347 /* In the TRUE branch, we compute
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1354 tree ratio = const_binop (code, r2, i2);
1355 tree div = const_binop (PLUS_EXPR, i2,
1356 const_binop (MULT_EXPR, r2, ratio));
1357 real = const_binop (MULT_EXPR, r1, ratio);
1358 real = const_binop (PLUS_EXPR, real, i1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, i1, ratio);
1362 imag = const_binop (MINUS_EXPR, imag, r1);
1363 imag = const_binop (code, imag, div);
1367 /* In the FALSE branch, we compute
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1374 tree ratio = const_binop (code, i2, r2);
1375 tree div = const_binop (PLUS_EXPR, r2,
1376 const_binop (MULT_EXPR, i2, ratio));
1378 real = const_binop (MULT_EXPR, i1, ratio);
1379 real = const_binop (PLUS_EXPR, real, r1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, r1, ratio);
1383 imag = const_binop (MINUS_EXPR, i1, imag);
1384 imag = const_binop (code, imag, div);
1394 return build_complex (type, real, imag);
1397 if (TREE_CODE (arg1) == VECTOR_CST
1398 && TREE_CODE (arg2) == VECTOR_CST)
1400 tree type = TREE_TYPE (arg1);
1401 int count = TYPE_VECTOR_SUBPARTS (type), i;
1402 tree *elts = XALLOCAVEC (tree, count);
1404 for (i = 0; i < count; i++)
1406 tree elem1 = VECTOR_CST_ELT (arg1, i);
1407 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409 elts[i] = const_binop (code, elem1, elem2);
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts[i] == NULL_TREE)
1417 return build_vector (type, elts);
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1) == VECTOR_CST
1422 && TREE_CODE (arg2) == INTEGER_CST)
1424 tree type = TREE_TYPE (arg1);
1425 int count = TYPE_VECTOR_SUBPARTS (type), i;
1426 tree *elts = XALLOCAVEC (tree, count);
1428 for (i = 0; i < count; i++)
1430 tree elem1 = VECTOR_CST_ELT (arg1, i);
1432 elts[i] = const_binop (code, elem1, arg2);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts[i] == NULL_TREE)
1440 return build_vector (type, elts);
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1449 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1451 if (TREE_CODE_CLASS (code) == tcc_comparison)
1452 return fold_relational_const (code, type, arg1, arg2);
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1459 if ((TREE_CODE (arg1) == REAL_CST
1460 && TREE_CODE (arg2) == REAL_CST)
1461 || (TREE_CODE (arg1) == INTEGER_CST
1462 && TREE_CODE (arg2) == INTEGER_CST))
1463 return build_complex (type, arg1, arg2);
1466 case VEC_PACK_TRUNC_EXPR:
1467 case VEC_PACK_FIX_TRUNC_EXPR:
1469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1474 if (TREE_CODE (arg1) != VECTOR_CST
1475 || TREE_CODE (arg2) != VECTOR_CST)
1478 elts = XALLOCAVEC (tree, nelts);
1479 if (!vec_cst_ctor_to_array (arg1, elts)
1480 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1483 for (i = 0; i < nelts; i++)
1485 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR : FIX_TRUNC_EXPR,
1487 TREE_TYPE (type), elts[i]);
1488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1492 return build_vector (type, elts);
1495 case VEC_WIDEN_MULT_LO_EXPR:
1496 case VEC_WIDEN_MULT_HI_EXPR:
1497 case VEC_WIDEN_MULT_EVEN_EXPR:
1498 case VEC_WIDEN_MULT_ODD_EXPR:
1500 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1501 unsigned int out, ofs, scale;
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1506 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1509 elts = XALLOCAVEC (tree, nelts * 4);
1510 if (!vec_cst_ctor_to_array (arg1, elts)
1511 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1514 if (code == VEC_WIDEN_MULT_LO_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1516 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1517 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1518 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1523 for (out = 0; out < nelts; out++)
1525 unsigned int in1 = (out << scale) + ofs;
1526 unsigned int in2 = in1 + nelts * 2;
1529 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1530 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1532 if (t1 == NULL_TREE || t2 == NULL_TREE)
1534 elts[out] = const_binop (MULT_EXPR, t1, t2);
1535 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1539 return build_vector (type, elts);
1545 if (TREE_CODE_CLASS (code) != tcc_binary)
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1552 return const_binop (code, arg1, arg2);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1559 const_unop (enum tree_code code, tree type, tree arg0)
1565 case FIX_TRUNC_EXPR:
1566 case FIXED_CONVERT_EXPR:
1567 return fold_convert_const (code, type, arg0);
1569 case ADDR_SPACE_CONVERT_EXPR:
1570 if (integer_zerop (arg0))
1571 return fold_convert_const (code, type, arg0);
1574 case VIEW_CONVERT_EXPR:
1575 return fold_view_convert_expr (type, arg0);
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1582 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1583 if (tem && CONSTANT_CLASS_P (tem))
1589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1590 return fold_abs_const (arg0, type);
1594 if (TREE_CODE (arg0) == COMPLEX_CST)
1596 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1598 return build_complex (type, TREE_REALPART (arg0), ipart);
1603 if (TREE_CODE (arg0) == INTEGER_CST)
1604 return fold_not_const (arg0, type);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0) == VECTOR_CST)
1610 unsigned count = VECTOR_CST_NELTS (arg0), i;
1612 elements = XALLOCAVEC (tree, count);
1613 for (i = 0; i < count; i++)
1615 elem = VECTOR_CST_ELT (arg0, i);
1616 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1617 if (elem == NULL_TREE)
1622 return build_vector (type, elements);
1626 case TRUTH_NOT_EXPR:
1627 if (TREE_CODE (arg0) == INTEGER_CST)
1628 return constant_boolean_node (integer_zerop (arg0), type);
1632 if (TREE_CODE (arg0) == COMPLEX_CST)
1633 return fold_convert (type, TREE_REALPART (arg0));
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_IMAGPART (arg0));
1641 case VEC_UNPACK_LO_EXPR:
1642 case VEC_UNPACK_HI_EXPR:
1643 case VEC_UNPACK_FLOAT_LO_EXPR:
1644 case VEC_UNPACK_FLOAT_HI_EXPR:
1646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1648 enum tree_code subcode;
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1651 if (TREE_CODE (arg0) != VECTOR_CST)
1654 elts = XALLOCAVEC (tree, nelts * 2);
1655 if (!vec_cst_ctor_to_array (arg0, elts))
1658 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1659 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1662 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1665 subcode = FLOAT_EXPR;
1667 for (i = 0; i < nelts; i++)
1669 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1670 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1674 return build_vector (type, elts);
1677 case REDUC_MIN_EXPR:
1678 case REDUC_MAX_EXPR:
1679 case REDUC_PLUS_EXPR:
1681 unsigned int nelts, i;
1683 enum tree_code subcode;
1685 if (TREE_CODE (arg0) != VECTOR_CST)
1687 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1689 elts = XALLOCAVEC (tree, nelts);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1695 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1696 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1697 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1698 default: gcc_unreachable ();
1701 for (i = 1; i < nelts; i++)
1703 elts[0] = const_binop (subcode, elts[0], elts[i]);
1704 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1722 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1724 return build_int_cst (sizetype_tab[(int) kind], number);
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1733 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1735 tree type = TREE_TYPE (arg0);
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1740 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1746 /* And some specific cases even faster than that. */
1747 if (code == PLUS_EXPR)
1749 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1751 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1754 else if (code == MINUS_EXPR)
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1759 else if (code == MULT_EXPR)
1761 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code, arg0, arg1, -1);
1771 return fold_build2_loc (loc, code, type, arg0, arg1);
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1779 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1781 tree type = TREE_TYPE (arg0);
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type))
1789 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1791 if (type == sizetype)
1793 else if (type == bitsizetype)
1794 ctype = sbitsizetype;
1796 ctype = signed_type_for (type);
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1802 return size_binop_loc (loc, MINUS_EXPR,
1803 fold_convert_loc (loc, ctype, arg0),
1804 fold_convert_loc (loc, ctype, arg1));
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0, arg1))
1811 return build_int_cst (ctype, 0);
1812 else if (tree_int_cst_lt (arg1, arg0))
1813 return fold_convert_loc (loc, ctype,
1814 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1816 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1817 fold_convert_loc (loc, ctype,
1818 size_binop_loc (loc,
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1827 fold_convert_const_int_from_int (tree type, const_tree arg1)
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type, wi::to_widest (arg1),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1834 TREE_OVERFLOW (arg1));
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1843 bool overflow = false;
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r))
1873 val = wi::zero (TYPE_PRECISION (type));
1876 /* See if R is less than the lower bound or greater than the
1881 tree lt = TYPE_MIN_VALUE (type);
1882 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1883 if (real_less (&r, &l))
1892 tree ut = TYPE_MAX_VALUE (type);
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (real_less (&u, &r))
1905 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1907 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1915 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1918 double_int temp, temp_trunc;
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp = TREE_FIXED_CST (arg1).data;
1923 mode = TREE_FIXED_CST (arg1).mode;
1924 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1926 temp = temp.rshift (GET_MODE_FBIT (mode),
1927 HOST_BITS_PER_DOUBLE_INT,
1928 SIGNED_FIXED_POINT_MODE_P (mode));
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1937 temp = double_int_zero;
1938 temp_trunc = double_int_zero;
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode)
1944 && temp_trunc.is_negative ()
1945 && TREE_FIXED_CST (arg1).data != temp_trunc)
1946 temp += double_int_one;
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t = force_fit_type (type, temp, -1,
1951 (temp.is_negative ()
1952 && (TYPE_UNSIGNED (type)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1954 | TREE_OVERFLOW (arg1));
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1963 fold_convert_const_real_from_real (tree type, const_tree arg1)
1965 REAL_VALUE_TYPE value;
1968 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1969 t = build_real (type, value);
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1979 && !MODE_HAS_NANS (TYPE_MODE (type)))
1980 TREE_OVERFLOW (t) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1984 && REAL_VALUE_ISINF (value)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1986 TREE_OVERFLOW (t) = 1;
1988 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1996 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1998 REAL_VALUE_TYPE value;
2001 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2002 t = build_real (type, value);
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2012 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2014 FIXED_VALUE_TYPE value;
2018 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2019 TYPE_SATURATING (type));
2020 t = build_fixed (type, value);
2022 /* Propagate overflow flags. */
2023 if (overflow_p | TREE_OVERFLOW (arg1))
2024 TREE_OVERFLOW (t) = 1;
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2032 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2034 FIXED_VALUE_TYPE value;
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2041 di.low = TREE_INT_CST_ELT (arg1, 0);
2042 if (TREE_INT_CST_NUNITS (arg1) == 1)
2043 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2045 di.high = TREE_INT_CST_ELT (arg1, 1);
2047 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2049 TYPE_SATURATING (type));
2050 t = build_fixed (type, value);
2052 /* Propagate overflow flags. */
2053 if (overflow_p | TREE_OVERFLOW (arg1))
2054 TREE_OVERFLOW (t) = 1;
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2062 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2064 FIXED_VALUE_TYPE value;
2068 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2069 &TREE_REAL_CST (arg1),
2070 TYPE_SATURATING (type));
2071 t = build_fixed (type, value);
2073 /* Propagate overflow flags. */
2074 if (overflow_p | TREE_OVERFLOW (arg1))
2075 TREE_OVERFLOW (t) = 1;
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2083 fold_convert_const (enum tree_code code, tree type, tree arg1)
2085 if (TREE_TYPE (arg1) == type)
2088 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2089 || TREE_CODE (type) == OFFSET_TYPE)
2091 if (TREE_CODE (arg1) == INTEGER_CST)
2092 return fold_convert_const_int_from_int (type, arg1);
2093 else if (TREE_CODE (arg1) == REAL_CST)
2094 return fold_convert_const_int_from_real (code, type, arg1);
2095 else if (TREE_CODE (arg1) == FIXED_CST)
2096 return fold_convert_const_int_from_fixed (type, arg1);
2098 else if (TREE_CODE (type) == REAL_TYPE)
2100 if (TREE_CODE (arg1) == INTEGER_CST)
2101 return build_real_from_int_cst (type, arg1);
2102 else if (TREE_CODE (arg1) == REAL_CST)
2103 return fold_convert_const_real_from_real (type, arg1);
2104 else if (TREE_CODE (arg1) == FIXED_CST)
2105 return fold_convert_const_real_from_fixed (type, arg1);
2107 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2109 if (TREE_CODE (arg1) == FIXED_CST)
2110 return fold_convert_const_fixed_from_fixed (type, arg1);
2111 else if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_fixed_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_fixed_from_real (type, arg1);
2119 /* Construct a vector of zero elements of vector type TYPE. */
2122 build_zero_vector (tree type)
2126 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2127 return build_vector_from_val (type, t);
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2133 fold_convertible_p (const_tree type, const_tree arg)
2135 tree orig = TREE_TYPE (arg);
2140 if (TREE_CODE (arg) == ERROR_MARK
2141 || TREE_CODE (type) == ERROR_MARK
2142 || TREE_CODE (orig) == ERROR_MARK)
2145 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2148 switch (TREE_CODE (type))
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2153 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2154 || TREE_CODE (orig) == OFFSET_TYPE)
2156 return (TREE_CODE (orig) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2160 case FIXED_POINT_TYPE:
2164 return TREE_CODE (type) == TREE_CODE (orig);
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2175 fold_convert_loc (location_t loc, tree type, tree arg)
2177 tree orig = TREE_TYPE (arg);
2183 if (TREE_CODE (arg) == ERROR_MARK
2184 || TREE_CODE (type) == ERROR_MARK
2185 || TREE_CODE (orig) == ERROR_MARK)
2186 return error_mark_node;
2188 switch (TREE_CODE (type))
2191 case REFERENCE_TYPE:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2196 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2199 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2203 tem = fold_convert_const (NOP_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2207 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE)
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210 if (TREE_CODE (orig) == COMPLEX_TYPE)
2211 return fold_convert_loc (loc, type,
2212 fold_build1_loc (loc, REALPART_EXPR,
2213 TREE_TYPE (orig), arg));
2214 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2216 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2219 if (TREE_CODE (arg) == INTEGER_CST)
2221 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2222 if (tem != NULL_TREE)
2225 else if (TREE_CODE (arg) == REAL_CST)
2227 tem = fold_convert_const (NOP_EXPR, type, arg);
2228 if (tem != NULL_TREE)
2231 else if (TREE_CODE (arg) == FIXED_CST)
2233 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2238 switch (TREE_CODE (orig))
2241 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2248 case FIXED_POINT_TYPE:
2249 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2252 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert_loc (loc, type, tem);
2259 case FIXED_POINT_TYPE:
2260 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2261 || TREE_CODE (arg) == REAL_CST)
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 goto fold_convert_exit;
2268 switch (TREE_CODE (orig))
2270 case FIXED_POINT_TYPE:
2275 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2278 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2279 return fold_convert_loc (loc, type, tem);
2286 switch (TREE_CODE (orig))
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2292 case FIXED_POINT_TYPE:
2293 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2294 fold_convert_loc (loc, TREE_TYPE (type), arg),
2295 fold_convert_loc (loc, TREE_TYPE (type),
2296 integer_zero_node));
2301 if (TREE_CODE (arg) == COMPLEX_EXPR)
2303 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 0));
2305 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 1));
2307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2310 arg = save_expr (arg);
2311 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2313 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2314 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2323 if (integer_zerop (arg))
2324 return build_zero_vector (type);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2327 || TREE_CODE (orig) == VECTOR_TYPE);
2328 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2331 tem = fold_ignored_result (arg);
2332 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2335 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2336 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2340 protected_set_expr_location_unshare (tem, loc);
2344 /* Return false if expr can be assumed not to be an lvalue, true
2348 maybe_lvalue_p (const_tree x)
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x))
2364 case ARRAY_RANGE_REF:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2395 non_lvalue_loc (location_t loc, tree x)
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2402 if (! maybe_lvalue_p (x))
2404 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2411 pedantic_non_lvalue_loc (location_t loc, tree x)
2413 return protected_set_expr_location_unshare (x, loc);
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2422 invert_tree_comparison (enum tree_code code, bool honor_nans)
2424 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2425 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2435 return honor_nans ? UNLE_EXPR : LE_EXPR;
2437 return honor_nans ? UNLT_EXPR : LT_EXPR;
2439 return honor_nans ? UNGE_EXPR : GE_EXPR;
2441 return honor_nans ? UNGT_EXPR : GT_EXPR;
2455 return UNORDERED_EXPR;
2456 case UNORDERED_EXPR:
2457 return ORDERED_EXPR;
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2467 swap_tree_comparison (enum tree_code code)
2474 case UNORDERED_EXPR:
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code)
2522 return COMPCODE_ORD;
2523 case UNORDERED_EXPR:
2524 return COMPCODE_UNORD;
2526 return COMPCODE_UNLT;
2528 return COMPCODE_UNEQ;
2530 return COMPCODE_UNLE;
2532 return COMPCODE_UNGT;
2534 return COMPCODE_LTGT;
2536 return COMPCODE_UNGE;
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code)
2564 return ORDERED_EXPR;
2565 case COMPCODE_UNORD:
2566 return UNORDERED_EXPR;
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2591 combine_comparisons (location_t loc,
2592 enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (ll_arg);
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2666 enum tree_code tcode;
2668 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2669 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between.
2699 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2700 not values of expressions. OEP_CONSTANT_ADDRESS_OF in addition to
2701 OEP_ADDRESS_OF is used for ADDR_EXPR with TREE_CONSTANT flag set and we
2702 further ignore any side effects on SAVE_EXPRs then. */
2705 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2707 /* If either is ERROR_MARK, they aren't equal. */
2708 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2709 || TREE_TYPE (arg0) == error_mark_node
2710 || TREE_TYPE (arg1) == error_mark_node)
2713 /* Similar, if either does not have a type (like a released SSA name),
2714 they aren't equal. */
2715 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2718 /* Check equality of integer constants before bailing out due to
2719 precision differences. */
2720 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2722 /* Address of INTEGER_CST is not defined; check that we did not forget
2723 to drop the OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2724 gcc_checking_assert (!(flags
2725 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2726 return tree_int_cst_equal (arg0, arg1);
2729 if (!(flags & OEP_ADDRESS_OF))
2731 /* If both types don't have the same signedness, then we can't consider
2732 them equal. We must check this before the STRIP_NOPS calls
2733 because they may change the signedness of the arguments. As pointers
2734 strictly don't have a signedness, require either two pointers or
2735 two non-pointers as well. */
2736 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2737 || POINTER_TYPE_P (TREE_TYPE (arg0))
2738 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2741 /* We cannot consider pointers to different address space equal. */
2742 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2743 && POINTER_TYPE_P (TREE_TYPE (arg1))
2744 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2745 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2748 /* If both types don't have the same precision, then it is not safe
2750 if (element_precision (TREE_TYPE (arg0))
2751 != element_precision (TREE_TYPE (arg1)))
2758 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2759 sanity check once the issue is solved. */
2761 /* Addresses of conversions and SSA_NAMEs (and many other things)
2762 are not defined. Check that we did not forget to drop the
2763 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2764 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2765 && TREE_CODE (arg0) != SSA_NAME);
2768 /* In case both args are comparisons but with different comparison
2769 code, try to swap the comparison operands of one arg to produce
2770 a match and compare that variant. */
2771 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2772 && COMPARISON_CLASS_P (arg0)
2773 && COMPARISON_CLASS_P (arg1))
2775 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2777 if (TREE_CODE (arg0) == swap_code)
2778 return operand_equal_p (TREE_OPERAND (arg0, 0),
2779 TREE_OPERAND (arg1, 1), flags)
2780 && operand_equal_p (TREE_OPERAND (arg0, 1),
2781 TREE_OPERAND (arg1, 0), flags);
2784 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2786 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2787 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2789 else if (flags & OEP_ADDRESS_OF)
2791 /* If we are interested in comparing addresses ignore
2792 MEM_REF wrappings of the base that can appear just for
2794 if (TREE_CODE (arg0) == MEM_REF
2796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2797 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2798 && integer_zerop (TREE_OPERAND (arg0, 1)))
2800 else if (TREE_CODE (arg1) == MEM_REF
2802 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2803 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2804 && integer_zerop (TREE_OPERAND (arg1, 1)))
2812 /* This is needed for conversions and for COMPONENT_REF.
2813 Might as well play it safe and always test this. */
2814 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2815 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2816 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2819 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2820 We don't care about side effects in that case because the SAVE_EXPR
2821 takes care of that for us. In all other cases, two expressions are
2822 equal if they have no side effects. If we have two identical
2823 expressions with side effects that should be treated the same due
2824 to the only side effects being identical SAVE_EXPR's, that will
2825 be detected in the recursive calls below.
2826 If we are taking an invariant address of two identical objects
2827 they are necessarily equal as well. */
2828 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2829 && (TREE_CODE (arg0) == SAVE_EXPR
2830 || (flags & OEP_CONSTANT_ADDRESS_OF)
2831 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2834 /* Next handle constant cases, those for which we can return 1 even
2835 if ONLY_CONST is set. */
2836 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2837 switch (TREE_CODE (arg0))
2840 return tree_int_cst_equal (arg0, arg1);
2843 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2844 TREE_FIXED_CST (arg1));
2847 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2851 if (!HONOR_SIGNED_ZEROS (arg0))
2853 /* If we do not distinguish between signed and unsigned zero,
2854 consider them equal. */
2855 if (real_zerop (arg0) && real_zerop (arg1))
2864 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2867 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2869 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2870 VECTOR_CST_ELT (arg1, i), flags))
2877 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2879 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2883 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2884 && ! memcmp (TREE_STRING_POINTER (arg0),
2885 TREE_STRING_POINTER (arg1),
2886 TREE_STRING_LENGTH (arg0)));
2889 gcc_checking_assert (!(flags
2890 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2891 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2892 flags | OEP_ADDRESS_OF
2893 | OEP_CONSTANT_ADDRESS_OF);
2895 /* In GIMPLE empty constructors are allowed in initializers of
2897 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2898 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2903 if (flags & OEP_ONLY_CONST)
2906 /* Define macros to test an operand from arg0 and arg1 for equality and a
2907 variant that allows null and views null as being different from any
2908 non-null value. In the latter case, if either is null, the both
2909 must be; otherwise, do the normal comparison. */
2910 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2911 TREE_OPERAND (arg1, N), flags)
2913 #define OP_SAME_WITH_NULL(N) \
2914 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2915 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2917 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2920 /* Two conversions are equal only if signedness and modes match. */
2921 switch (TREE_CODE (arg0))
2924 case FIX_TRUNC_EXPR:
2925 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2926 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2936 case tcc_comparison:
2938 if (OP_SAME (0) && OP_SAME (1))
2941 /* For commutative ops, allow the other order. */
2942 return (commutative_tree_code (TREE_CODE (arg0))
2943 && operand_equal_p (TREE_OPERAND (arg0, 0),
2944 TREE_OPERAND (arg1, 1), flags)
2945 && operand_equal_p (TREE_OPERAND (arg0, 1),
2946 TREE_OPERAND (arg1, 0), flags));
2949 /* If either of the pointer (or reference) expressions we are
2950 dereferencing contain a side effect, these cannot be equal,
2951 but their addresses can be. */
2952 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2953 && (TREE_SIDE_EFFECTS (arg0)
2954 || TREE_SIDE_EFFECTS (arg1)))
2957 switch (TREE_CODE (arg0))
2960 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF))
2961 && (TYPE_ALIGN (TREE_TYPE (arg0))
2962 != TYPE_ALIGN (TREE_TYPE (arg1))))
2964 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2971 case TARGET_MEM_REF:
2973 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)))
2975 /* Require equal access sizes */
2976 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2977 && (!TYPE_SIZE (TREE_TYPE (arg0))
2978 || !TYPE_SIZE (TREE_TYPE (arg1))
2979 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2980 TYPE_SIZE (TREE_TYPE (arg1)),
2983 /* Verify that access happens in similar types. */
2984 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2986 /* Verify that accesses are TBAA compatible. */
2987 if (flag_strict_aliasing
2988 && (!alias_ptr_types_compatible_p
2989 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2990 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2991 || (MR_DEPENDENCE_CLIQUE (arg0)
2992 != MR_DEPENDENCE_CLIQUE (arg1))
2993 || (MR_DEPENDENCE_BASE (arg0)
2994 != MR_DEPENDENCE_BASE (arg1))))
2996 /* Verify that alignment is compatible. */
2997 if (TYPE_ALIGN (TREE_TYPE (arg0))
2998 != TYPE_ALIGN (TREE_TYPE (arg1)))
3001 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3002 return (OP_SAME (0) && OP_SAME (1)
3003 /* TARGET_MEM_REF require equal extra operands. */
3004 && (TREE_CODE (arg0) != TARGET_MEM_REF
3005 || (OP_SAME_WITH_NULL (2)
3006 && OP_SAME_WITH_NULL (3)
3007 && OP_SAME_WITH_NULL (4))));
3010 case ARRAY_RANGE_REF:
3011 /* Operands 2 and 3 may be null.
3012 Compare the array index by value if it is constant first as we
3013 may have different types but same value here. */
3016 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3017 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3018 TREE_OPERAND (arg1, 1))
3020 && OP_SAME_WITH_NULL (2)
3021 && OP_SAME_WITH_NULL (3));
3024 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3025 may be NULL when we're called to compare MEM_EXPRs. */
3026 if (!OP_SAME_WITH_NULL (0)
3029 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3030 return OP_SAME_WITH_NULL (2);
3035 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3036 return OP_SAME (1) && OP_SAME (2);
3042 case tcc_expression:
3043 switch (TREE_CODE (arg0))
3046 /* Be sure we pass right ADDRESS_OF flag. */
3047 gcc_checking_assert (!(flags
3049 | OEP_CONSTANT_ADDRESS_OF)));
3050 return operand_equal_p (TREE_OPERAND (arg0, 0),
3051 TREE_OPERAND (arg1, 0),
3052 flags | OEP_ADDRESS_OF);
3054 case TRUTH_NOT_EXPR:
3057 case TRUTH_ANDIF_EXPR:
3058 case TRUTH_ORIF_EXPR:
3059 return OP_SAME (0) && OP_SAME (1);
3062 case WIDEN_MULT_PLUS_EXPR:
3063 case WIDEN_MULT_MINUS_EXPR:
3066 /* The multiplcation operands are commutative. */
3069 case TRUTH_AND_EXPR:
3071 case TRUTH_XOR_EXPR:
3072 if (OP_SAME (0) && OP_SAME (1))
3075 /* Otherwise take into account this is a commutative operation. */
3076 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3077 TREE_OPERAND (arg1, 1), flags)
3078 && operand_equal_p (TREE_OPERAND (arg0, 1),
3079 TREE_OPERAND (arg1, 0), flags));
3084 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3091 switch (TREE_CODE (arg0))
3094 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3095 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3096 /* If not both CALL_EXPRs are either internal or normal function
3097 functions, then they are not equal. */
3099 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3101 /* If the CALL_EXPRs call different internal functions, then they
3103 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3108 /* If the CALL_EXPRs call different functions, then they are not
3110 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3116 unsigned int cef = call_expr_flags (arg0);
3117 if (flags & OEP_PURE_SAME)
3118 cef &= ECF_CONST | ECF_PURE;
3125 /* Now see if all the arguments are the same. */
3127 const_call_expr_arg_iterator iter0, iter1;
3129 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3130 a1 = first_const_call_expr_arg (arg1, &iter1);
3132 a0 = next_const_call_expr_arg (&iter0),
3133 a1 = next_const_call_expr_arg (&iter1))
3134 if (! operand_equal_p (a0, a1, flags))
3137 /* If we get here and both argument lists are exhausted
3138 then the CALL_EXPRs are equal. */
3139 return ! (a0 || a1);
3145 case tcc_declaration:
3146 /* Consider __builtin_sqrt equal to sqrt. */
3147 return (TREE_CODE (arg0) == FUNCTION_DECL
3148 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3149 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3150 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3152 case tcc_exceptional:
3153 if (TREE_CODE (arg0) == CONSTRUCTOR)
3155 /* In GIMPLE constructors are used only to build vectors from
3156 elements. Individual elements in the constructor must be
3157 indexed in increasing order and form an initial sequence.
3159 We make no effort to compare constructors in generic.
3160 (see sem_variable::equals in ipa-icf which can do so for
3162 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3163 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3166 /* Be sure that vectors constructed have the same representation.
3167 We only tested element precision and modes to match.
3168 Vectors may be BLKmode and thus also check that the number of
3170 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3171 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3174 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3175 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3176 unsigned int len = vec_safe_length (v0);
3178 if (len != vec_safe_length (v1))
3181 for (unsigned int i = 0; i < len; i++)
3183 constructor_elt *c0 = &(*v0)[i];
3184 constructor_elt *c1 = &(*v1)[i];
3186 if (!operand_equal_p (c0->value, c1->value, flags)
3187 /* In GIMPLE the indexes can be either NULL or matching i.
3188 Double check this so we won't get false
3189 positives for GENERIC. */
3191 && (TREE_CODE (c0->index) != INTEGER_CST
3192 || !compare_tree_int (c0->index, i)))
3194 && (TREE_CODE (c1->index) != INTEGER_CST
3195 || !compare_tree_int (c1->index, i))))
3207 #undef OP_SAME_WITH_NULL
3210 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3211 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3213 When in doubt, return 0. */
3216 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3218 int unsignedp1, unsignedpo;
3219 tree primarg0, primarg1, primother;
3220 unsigned int correct_width;
3222 if (operand_equal_p (arg0, arg1, 0))
3225 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3226 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3229 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3230 and see if the inner values are the same. This removes any
3231 signedness comparison, which doesn't matter here. */
3232 primarg0 = arg0, primarg1 = arg1;
3233 STRIP_NOPS (primarg0);
3234 STRIP_NOPS (primarg1);
3235 if (operand_equal_p (primarg0, primarg1, 0))
3238 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3239 actual comparison operand, ARG0.
3241 First throw away any conversions to wider types
3242 already present in the operands. */
3244 primarg1 = get_narrower (arg1, &unsignedp1);
3245 primother = get_narrower (other, &unsignedpo);
3247 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3248 if (unsignedp1 == unsignedpo
3249 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3250 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3252 tree type = TREE_TYPE (arg0);
3254 /* Make sure shorter operand is extended the right way
3255 to match the longer operand. */
3256 primarg1 = fold_convert (signed_or_unsigned_type_for
3257 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3259 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3266 /* See if ARG is an expression that is either a comparison or is performing
3267 arithmetic on comparisons. The comparisons must only be comparing
3268 two different values, which will be stored in *CVAL1 and *CVAL2; if
3269 they are nonzero it means that some operands have already been found.
3270 No variables may be used anywhere else in the expression except in the
3271 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3272 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3274 If this is true, return 1. Otherwise, return zero. */
3277 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3279 enum tree_code code = TREE_CODE (arg);
3280 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3282 /* We can handle some of the tcc_expression cases here. */
3283 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3285 else if (tclass == tcc_expression
3286 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3287 || code == COMPOUND_EXPR))
3288 tclass = tcc_binary;
3290 else if (tclass == tcc_expression && code == SAVE_EXPR
3291 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3293 /* If we've already found a CVAL1 or CVAL2, this expression is
3294 two complex to handle. */
3295 if (*cval1 || *cval2)
3305 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3308 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3309 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3310 cval1, cval2, save_p));
3315 case tcc_expression:
3316 if (code == COND_EXPR)
3317 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3318 cval1, cval2, save_p)
3319 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3320 cval1, cval2, save_p)
3321 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3322 cval1, cval2, save_p));
3325 case tcc_comparison:
3326 /* First see if we can handle the first operand, then the second. For
3327 the second operand, we know *CVAL1 can't be zero. It must be that
3328 one side of the comparison is each of the values; test for the
3329 case where this isn't true by failing if the two operands
3332 if (operand_equal_p (TREE_OPERAND (arg, 0),
3333 TREE_OPERAND (arg, 1), 0))
3337 *cval1 = TREE_OPERAND (arg, 0);
3338 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3340 else if (*cval2 == 0)
3341 *cval2 = TREE_OPERAND (arg, 0);
3342 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3347 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3349 else if (*cval2 == 0)
3350 *cval2 = TREE_OPERAND (arg, 1);
3351 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3363 /* ARG is a tree that is known to contain just arithmetic operations and
3364 comparisons. Evaluate the operations in the tree substituting NEW0 for
3365 any occurrence of OLD0 as an operand of a comparison and likewise for
3369 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3370 tree old1, tree new1)
3372 tree type = TREE_TYPE (arg);
3373 enum tree_code code = TREE_CODE (arg);
3374 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3376 /* We can handle some of the tcc_expression cases here. */
3377 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3379 else if (tclass == tcc_expression
3380 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3381 tclass = tcc_binary;
3386 return fold_build1_loc (loc, code, type,
3387 eval_subst (loc, TREE_OPERAND (arg, 0),
3388 old0, new0, old1, new1));
3391 return fold_build2_loc (loc, code, type,
3392 eval_subst (loc, TREE_OPERAND (arg, 0),
3393 old0, new0, old1, new1),
3394 eval_subst (loc, TREE_OPERAND (arg, 1),
3395 old0, new0, old1, new1));
3397 case tcc_expression:
3401 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3405 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3409 return fold_build3_loc (loc, code, type,
3410 eval_subst (loc, TREE_OPERAND (arg, 0),
3411 old0, new0, old1, new1),
3412 eval_subst (loc, TREE_OPERAND (arg, 1),
3413 old0, new0, old1, new1),
3414 eval_subst (loc, TREE_OPERAND (arg, 2),
3415 old0, new0, old1, new1));
3419 /* Fall through - ??? */
3421 case tcc_comparison:
3423 tree arg0 = TREE_OPERAND (arg, 0);
3424 tree arg1 = TREE_OPERAND (arg, 1);
3426 /* We need to check both for exact equality and tree equality. The
3427 former will be true if the operand has a side-effect. In that
3428 case, we know the operand occurred exactly once. */
3430 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3432 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3435 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3437 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3440 return fold_build2_loc (loc, code, type, arg0, arg1);
3448 /* Return a tree for the case when the result of an expression is RESULT
3449 converted to TYPE and OMITTED was previously an operand of the expression
3450 but is now not needed (e.g., we folded OMITTED * 0).
3452 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3453 the conversion of RESULT to TYPE. */
3456 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3458 tree t = fold_convert_loc (loc, type, result);
3460 /* If the resulting operand is an empty statement, just return the omitted
3461 statement casted to void. */
3462 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3463 return build1_loc (loc, NOP_EXPR, void_type_node,
3464 fold_ignored_result (omitted));
3466 if (TREE_SIDE_EFFECTS (omitted))
3467 return build2_loc (loc, COMPOUND_EXPR, type,
3468 fold_ignored_result (omitted), t);
3470 return non_lvalue_loc (loc, t);
3473 /* Return a tree for the case when the result of an expression is RESULT
3474 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3475 of the expression but are now not needed.
3477 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3478 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3479 evaluated before OMITTED2. Otherwise, if neither has side effects,
3480 just do the conversion of RESULT to TYPE. */
3483 omit_two_operands_loc (location_t loc, tree type, tree result,
3484 tree omitted1, tree omitted2)
3486 tree t = fold_convert_loc (loc, type, result);
3488 if (TREE_SIDE_EFFECTS (omitted2))
3489 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3490 if (TREE_SIDE_EFFECTS (omitted1))
3491 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3493 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3497 /* Return a simplified tree node for the truth-negation of ARG. This
3498 never alters ARG itself. We assume that ARG is an operation that
3499 returns a truth value (0 or 1).
3501 FIXME: one would think we would fold the result, but it causes
3502 problems with the dominator optimizer. */
3505 fold_truth_not_expr (location_t loc, tree arg)
3507 tree type = TREE_TYPE (arg);
3508 enum tree_code code = TREE_CODE (arg);
3509 location_t loc1, loc2;
3511 /* If this is a comparison, we can simply invert it, except for
3512 floating-point non-equality comparisons, in which case we just
3513 enclose a TRUTH_NOT_EXPR around what we have. */
3515 if (TREE_CODE_CLASS (code) == tcc_comparison)
3517 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3518 if (FLOAT_TYPE_P (op_type)
3519 && flag_trapping_math
3520 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3521 && code != NE_EXPR && code != EQ_EXPR)
3524 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3525 if (code == ERROR_MARK)
3528 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3529 TREE_OPERAND (arg, 1));
3535 return constant_boolean_node (integer_zerop (arg), type);
3537 case TRUTH_AND_EXPR:
3538 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3539 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3540 return build2_loc (loc, TRUTH_OR_EXPR, type,
3541 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3542 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3545 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3546 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3547 return build2_loc (loc, TRUTH_AND_EXPR, type,
3548 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3549 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3551 case TRUTH_XOR_EXPR:
3552 /* Here we can invert either operand. We invert the first operand
3553 unless the second operand is a TRUTH_NOT_EXPR in which case our
3554 result is the XOR of the first operand with the inside of the
3555 negation of the second operand. */
3557 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3558 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3559 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3561 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3562 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3563 TREE_OPERAND (arg, 1));
3565 case TRUTH_ANDIF_EXPR:
3566 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3567 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3568 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3569 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3570 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3572 case TRUTH_ORIF_EXPR:
3573 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3574 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3575 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3576 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3577 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3579 case TRUTH_NOT_EXPR:
3580 return TREE_OPERAND (arg, 0);
3584 tree arg1 = TREE_OPERAND (arg, 1);
3585 tree arg2 = TREE_OPERAND (arg, 2);
3587 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3588 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3590 /* A COND_EXPR may have a throw as one operand, which
3591 then has void type. Just leave void operands
3593 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3594 VOID_TYPE_P (TREE_TYPE (arg1))
3595 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3596 VOID_TYPE_P (TREE_TYPE (arg2))
3597 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3601 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3602 return build2_loc (loc, COMPOUND_EXPR, type,
3603 TREE_OPERAND (arg, 0),
3604 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3606 case NON_LVALUE_EXPR:
3607 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3608 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3611 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3612 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3614 /* ... fall through ... */
3617 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3618 return build1_loc (loc, TREE_CODE (arg), type,
3619 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3622 if (!integer_onep (TREE_OPERAND (arg, 1)))
3624 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3627 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3629 case CLEANUP_POINT_EXPR:
3630 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3631 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3632 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3639 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3640 assume that ARG is an operation that returns a truth value (0 or 1
3641 for scalars, 0 or -1 for vectors). Return the folded expression if
3642 folding is successful. Otherwise, return NULL_TREE. */
3645 fold_invert_truthvalue (location_t loc, tree arg)
3647 tree type = TREE_TYPE (arg);
3648 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3654 /* Return a simplified tree node for the truth-negation of ARG. This
3655 never alters ARG itself. We assume that ARG is an operation that
3656 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3659 invert_truthvalue_loc (location_t loc, tree arg)
3661 if (TREE_CODE (arg) == ERROR_MARK)
3664 tree type = TREE_TYPE (arg);
3665 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3671 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3672 with code CODE. This optimization is unsafe. */
3674 distribute_real_division (location_t loc, enum tree_code code, tree type,
3675 tree arg0, tree arg1)
3677 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3678 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3680 /* (A / C) +- (B / C) -> (A +- B) / C. */
3682 && operand_equal_p (TREE_OPERAND (arg0, 1),
3683 TREE_OPERAND (arg1, 1), 0))
3684 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3685 fold_build2_loc (loc, code, type,
3686 TREE_OPERAND (arg0, 0),
3687 TREE_OPERAND (arg1, 0)),
3688 TREE_OPERAND (arg0, 1));
3690 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3691 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3692 TREE_OPERAND (arg1, 0), 0)
3693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3694 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3696 REAL_VALUE_TYPE r0, r1;
3697 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3698 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3700 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3702 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3703 real_arithmetic (&r0, code, &r0, &r1);
3704 return fold_build2_loc (loc, MULT_EXPR, type,
3705 TREE_OPERAND (arg0, 0),
3706 build_real (type, r0));
3712 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3713 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3716 make_bit_field_ref (location_t loc, tree inner, tree type,
3717 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3719 tree result, bftype;
3723 tree size = TYPE_SIZE (TREE_TYPE (inner));
3724 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3725 || POINTER_TYPE_P (TREE_TYPE (inner)))
3726 && tree_fits_shwi_p (size)
3727 && tree_to_shwi (size) == bitsize)
3728 return fold_convert_loc (loc, type, inner);
3732 if (TYPE_PRECISION (bftype) != bitsize
3733 || TYPE_UNSIGNED (bftype) == !unsignedp)
3734 bftype = build_nonstandard_integer_type (bitsize, 0);
3736 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3737 size_int (bitsize), bitsize_int (bitpos));
3740 result = fold_convert_loc (loc, type, result);
3745 /* Optimize a bit-field compare.
3747 There are two cases: First is a compare against a constant and the
3748 second is a comparison of two items where the fields are at the same
3749 bit position relative to the start of a chunk (byte, halfword, word)
3750 large enough to contain it. In these cases we can avoid the shift
3751 implicit in bitfield extractions.
3753 For constants, we emit a compare of the shifted constant with the
3754 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3755 compared. For two fields at the same position, we do the ANDs with the
3756 similar mask and compare the result of the ANDs.
3758 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3759 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3760 are the left and right operands of the comparison, respectively.
3762 If the optimization described above can be done, we return the resulting
3763 tree. Otherwise we return zero. */
3766 optimize_bit_field_compare (location_t loc, enum tree_code code,
3767 tree compare_type, tree lhs, tree rhs)
3769 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3770 tree type = TREE_TYPE (lhs);
3772 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3773 machine_mode lmode, rmode, nmode;
3774 int lunsignedp, runsignedp;
3775 int lvolatilep = 0, rvolatilep = 0;
3776 tree linner, rinner = NULL_TREE;
3780 /* Get all the information about the extractions being done. If the bit size
3781 if the same as the size of the underlying object, we aren't doing an
3782 extraction at all and so can do nothing. We also don't want to
3783 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3784 then will no longer be able to replace it. */
3785 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3786 &lunsignedp, &lvolatilep, false);
3787 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3788 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3793 /* If this is not a constant, we can only do something if bit positions,
3794 sizes, and signedness are the same. */
3795 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3796 &runsignedp, &rvolatilep, false);
3798 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3799 || lunsignedp != runsignedp || offset != 0
3800 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3804 /* See if we can find a mode to refer to this field. We should be able to,
3805 but fail if we can't. */
3806 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3807 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3808 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3809 TYPE_ALIGN (TREE_TYPE (rinner))),
3811 if (nmode == VOIDmode)
3814 /* Set signed and unsigned types of the precision of this mode for the
3816 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3818 /* Compute the bit position and size for the new reference and our offset
3819 within it. If the new reference is the same size as the original, we
3820 won't optimize anything, so return zero. */
3821 nbitsize = GET_MODE_BITSIZE (nmode);
3822 nbitpos = lbitpos & ~ (nbitsize - 1);
3824 if (nbitsize == lbitsize)
3827 if (BYTES_BIG_ENDIAN)
3828 lbitpos = nbitsize - lbitsize - lbitpos;
3830 /* Make the mask to be used against the extracted field. */
3831 mask = build_int_cst_type (unsigned_type, -1);
3832 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3833 mask = const_binop (RSHIFT_EXPR, mask,
3834 size_int (nbitsize - lbitsize - lbitpos));
3837 /* If not comparing with constant, just rework the comparison
3839 return fold_build2_loc (loc, code, compare_type,
3840 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3841 make_bit_field_ref (loc, linner,
3846 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3847 make_bit_field_ref (loc, rinner,
3853 /* Otherwise, we are handling the constant case. See if the constant is too
3854 big for the field. Warn and return a tree of for 0 (false) if so. We do
3855 this not only for its own sake, but to avoid having to test for this
3856 error case below. If we didn't, we might generate wrong code.
3858 For unsigned fields, the constant shifted right by the field length should
3859 be all zero. For signed fields, the high-order bits should agree with
3864 if (wi::lrshift (rhs, lbitsize) != 0)
3866 warning (0, "comparison is always %d due to width of bit-field",
3868 return constant_boolean_node (code == NE_EXPR, compare_type);
3873 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3874 if (tem != 0 && tem != -1)
3876 warning (0, "comparison is always %d due to width of bit-field",
3878 return constant_boolean_node (code == NE_EXPR, compare_type);
3882 /* Single-bit compares should always be against zero. */
3883 if (lbitsize == 1 && ! integer_zerop (rhs))
3885 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3886 rhs = build_int_cst (type, 0);
3889 /* Make a new bitfield reference, shift the constant over the
3890 appropriate number of bits and mask it with the computed mask
3891 (in case this was a signed field). If we changed it, make a new one. */
3892 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3894 rhs = const_binop (BIT_AND_EXPR,
3895 const_binop (LSHIFT_EXPR,
3896 fold_convert_loc (loc, unsigned_type, rhs),
3897 size_int (lbitpos)),
3900 lhs = build2_loc (loc, code, compare_type,
3901 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3905 /* Subroutine for fold_truth_andor_1: decode a field reference.
3907 If EXP is a comparison reference, we return the innermost reference.
3909 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3910 set to the starting bit number.
3912 If the innermost field can be completely contained in a mode-sized
3913 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3915 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3916 otherwise it is not changed.
3918 *PUNSIGNEDP is set to the signedness of the field.
3920 *PMASK is set to the mask used. This is either contained in a
3921 BIT_AND_EXPR or derived from the width of the field.
3923 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3925 Return 0 if this is not a component reference or is one that we can't
3926 do anything with. */
3929 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3930 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3931 int *punsignedp, int *pvolatilep,
3932 tree *pmask, tree *pand_mask)
3934 tree outer_type = 0;
3936 tree mask, inner, offset;
3938 unsigned int precision;
3940 /* All the optimizations using this function assume integer fields.
3941 There are problems with FP fields since the type_for_size call
3942 below can fail for, e.g., XFmode. */
3943 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3946 /* We are interested in the bare arrangement of bits, so strip everything
3947 that doesn't affect the machine mode. However, record the type of the
3948 outermost expression if it may matter below. */
3949 if (CONVERT_EXPR_P (exp)
3950 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3951 outer_type = TREE_TYPE (exp);
3954 if (TREE_CODE (exp) == BIT_AND_EXPR)
3956 and_mask = TREE_OPERAND (exp, 1);
3957 exp = TREE_OPERAND (exp, 0);
3958 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3959 if (TREE_CODE (and_mask) != INTEGER_CST)
3963 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3964 punsignedp, pvolatilep, false);
3965 if ((inner == exp && and_mask == 0)
3966 || *pbitsize < 0 || offset != 0
3967 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3970 /* If the number of bits in the reference is the same as the bitsize of
3971 the outer type, then the outer type gives the signedness. Otherwise
3972 (in case of a small bitfield) the signedness is unchanged. */
3973 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3974 *punsignedp = TYPE_UNSIGNED (outer_type);
3976 /* Compute the mask to access the bitfield. */
3977 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3978 precision = TYPE_PRECISION (unsigned_type);
3980 mask = build_int_cst_type (unsigned_type, -1);
3982 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3983 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3985 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3987 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3988 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3991 *pand_mask = and_mask;
3995 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3996 bit positions and MASK is SIGNED. */
3999 all_ones_mask_p (const_tree mask, unsigned int size)
4001 tree type = TREE_TYPE (mask);
4002 unsigned int precision = TYPE_PRECISION (type);
4004 /* If this function returns true when the type of the mask is
4005 UNSIGNED, then there will be errors. In particular see
4006 gcc.c-torture/execute/990326-1.c. There does not appear to be
4007 any documentation paper trail as to why this is so. But the pre
4008 wide-int worked with that restriction and it has been preserved
4010 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4013 return wi::mask (size, false, precision) == mask;
4016 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4017 represents the sign bit of EXP's type. If EXP represents a sign
4018 or zero extension, also test VAL against the unextended type.
4019 The return value is the (sub)expression whose sign bit is VAL,
4020 or NULL_TREE otherwise. */
4023 sign_bit_p (tree exp, const_tree val)
4028 /* Tree EXP must have an integral type. */
4029 t = TREE_TYPE (exp);
4030 if (! INTEGRAL_TYPE_P (t))
4033 /* Tree VAL must be an integer constant. */
4034 if (TREE_CODE (val) != INTEGER_CST
4035 || TREE_OVERFLOW (val))
4038 width = TYPE_PRECISION (t);
4039 if (wi::only_sign_bit_p (val, width))
4042 /* Handle extension from a narrower type. */
4043 if (TREE_CODE (exp) == NOP_EXPR
4044 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4045 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4050 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4051 to be evaluated unconditionally. */
4054 simple_operand_p (const_tree exp)
4056 /* Strip any conversions that don't change the machine mode. */
4059 return (CONSTANT_CLASS_P (exp)
4060 || TREE_CODE (exp) == SSA_NAME
4062 && ! TREE_ADDRESSABLE (exp)
4063 && ! TREE_THIS_VOLATILE (exp)
4064 && ! DECL_NONLOCAL (exp)
4065 /* Don't regard global variables as simple. They may be
4066 allocated in ways unknown to the compiler (shared memory,
4067 #pragma weak, etc). */
4068 && ! TREE_PUBLIC (exp)
4069 && ! DECL_EXTERNAL (exp)
4070 /* Weakrefs are not safe to be read, since they can be NULL.
4071 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4072 have DECL_WEAK flag set. */
4073 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4074 /* Loading a static variable is unduly expensive, but global
4075 registers aren't expensive. */
4076 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4079 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4080 to be evaluated unconditionally.
4081 I addition to simple_operand_p, we assume that comparisons, conversions,
4082 and logic-not operations are simple, if their operands are simple, too. */
4085 simple_operand_p_2 (tree exp)
4087 enum tree_code code;
4089 if (TREE_SIDE_EFFECTS (exp)
4090 || tree_could_trap_p (exp))
4093 while (CONVERT_EXPR_P (exp))
4094 exp = TREE_OPERAND (exp, 0);
4096 code = TREE_CODE (exp);
4098 if (TREE_CODE_CLASS (code) == tcc_comparison)
4099 return (simple_operand_p (TREE_OPERAND (exp, 0))
4100 && simple_operand_p (TREE_OPERAND (exp, 1)));
4102 if (code == TRUTH_NOT_EXPR)
4103 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4105 return simple_operand_p (exp);
4109 /* The following functions are subroutines to fold_range_test and allow it to
4110 try to change a logical combination of comparisons into a range test.
4113 X == 2 || X == 3 || X == 4 || X == 5
4117 (unsigned) (X - 2) <= 3
4119 We describe each set of comparisons as being either inside or outside
4120 a range, using a variable named like IN_P, and then describe the
4121 range with a lower and upper bound. If one of the bounds is omitted,
4122 it represents either the highest or lowest value of the type.
4124 In the comments below, we represent a range by two numbers in brackets
4125 preceded by a "+" to designate being inside that range, or a "-" to
4126 designate being outside that range, so the condition can be inverted by
4127 flipping the prefix. An omitted bound is represented by a "-". For
4128 example, "- [-, 10]" means being outside the range starting at the lowest
4129 possible value and ending at 10, in other words, being greater than 10.
4130 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4133 We set up things so that the missing bounds are handled in a consistent
4134 manner so neither a missing bound nor "true" and "false" need to be
4135 handled using a special case. */
4137 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4138 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4139 and UPPER1_P are nonzero if the respective argument is an upper bound
4140 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4141 must be specified for a comparison. ARG1 will be converted to ARG0's
4142 type if both are specified. */
4145 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4146 tree arg1, int upper1_p)
4152 /* If neither arg represents infinity, do the normal operation.
4153 Else, if not a comparison, return infinity. Else handle the special
4154 comparison rules. Note that most of the cases below won't occur, but
4155 are handled for consistency. */
4157 if (arg0 != 0 && arg1 != 0)
4159 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4160 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4162 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4165 if (TREE_CODE_CLASS (code) != tcc_comparison)
4168 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4169 for neither. In real maths, we cannot assume open ended ranges are
4170 the same. But, this is computer arithmetic, where numbers are finite.
4171 We can therefore make the transformation of any unbounded range with
4172 the value Z, Z being greater than any representable number. This permits
4173 us to treat unbounded ranges as equal. */
4174 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4175 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4179 result = sgn0 == sgn1;
4182 result = sgn0 != sgn1;
4185 result = sgn0 < sgn1;
4188 result = sgn0 <= sgn1;
4191 result = sgn0 > sgn1;
4194 result = sgn0 >= sgn1;
4200 return constant_boolean_node (result, type);
4203 /* Helper routine for make_range. Perform one step for it, return
4204 new expression if the loop should continue or NULL_TREE if it should
4208 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4209 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4210 bool *strict_overflow_p)
4212 tree arg0_type = TREE_TYPE (arg0);
4213 tree n_low, n_high, low = *p_low, high = *p_high;
4214 int in_p = *p_in_p, n_in_p;
4218 case TRUTH_NOT_EXPR:
4219 /* We can only do something if the range is testing for zero. */
4220 if (low == NULL_TREE || high == NULL_TREE
4221 || ! integer_zerop (low) || ! integer_zerop (high))
4226 case EQ_EXPR: case NE_EXPR:
4227 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4228 /* We can only do something if the range is testing for zero
4229 and if the second operand is an integer constant. Note that
4230 saying something is "in" the range we make is done by
4231 complementing IN_P since it will set in the initial case of
4232 being not equal to zero; "out" is leaving it alone. */
4233 if (low == NULL_TREE || high == NULL_TREE
4234 || ! integer_zerop (low) || ! integer_zerop (high)
4235 || TREE_CODE (arg1) != INTEGER_CST)
4240 case NE_EXPR: /* - [c, c] */
4243 case EQ_EXPR: /* + [c, c] */
4244 in_p = ! in_p, low = high = arg1;
4246 case GT_EXPR: /* - [-, c] */
4247 low = 0, high = arg1;
4249 case GE_EXPR: /* + [c, -] */
4250 in_p = ! in_p, low = arg1, high = 0;
4252 case LT_EXPR: /* - [c, -] */
4253 low = arg1, high = 0;
4255 case LE_EXPR: /* + [-, c] */
4256 in_p = ! in_p, low = 0, high = arg1;
4262 /* If this is an unsigned comparison, we also know that EXP is
4263 greater than or equal to zero. We base the range tests we make
4264 on that fact, so we record it here so we can parse existing
4265 range tests. We test arg0_type since often the return type
4266 of, e.g. EQ_EXPR, is boolean. */
4267 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4269 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4271 build_int_cst (arg0_type, 0),
4275 in_p = n_in_p, low = n_low, high = n_high;
4277 /* If the high bound is missing, but we have a nonzero low
4278 bound, reverse the range so it goes from zero to the low bound
4280 if (high == 0 && low && ! integer_zerop (low))
4283 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4284 build_int_cst (TREE_TYPE (low), 1), 0);
4285 low = build_int_cst (arg0_type, 0);
4295 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4296 low and high are non-NULL, then normalize will DTRT. */
4297 if (!TYPE_UNSIGNED (arg0_type)
4298 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4300 if (low == NULL_TREE)
4301 low = TYPE_MIN_VALUE (arg0_type);
4302 if (high == NULL_TREE)
4303 high = TYPE_MAX_VALUE (arg0_type);
4306 /* (-x) IN [a,b] -> x in [-b, -a] */
4307 n_low = range_binop (MINUS_EXPR, exp_type,
4308 build_int_cst (exp_type, 0),
4310 n_high = range_binop (MINUS_EXPR, exp_type,
4311 build_int_cst (exp_type, 0),
4313 if (n_high != 0 && TREE_OVERFLOW (n_high))
4319 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4320 build_int_cst (exp_type, 1));
4324 if (TREE_CODE (arg1) != INTEGER_CST)
4327 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4328 move a constant to the other side. */
4329 if (!TYPE_UNSIGNED (arg0_type)
4330 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4333 /* If EXP is signed, any overflow in the computation is undefined,
4334 so we don't worry about it so long as our computations on
4335 the bounds don't overflow. For unsigned, overflow is defined
4336 and this is exactly the right thing. */
4337 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4338 arg0_type, low, 0, arg1, 0);
4339 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4340 arg0_type, high, 1, arg1, 0);
4341 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4342 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4345 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4346 *strict_overflow_p = true;
4349 /* Check for an unsigned range which has wrapped around the maximum
4350 value thus making n_high < n_low, and normalize it. */
4351 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4353 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4354 build_int_cst (TREE_TYPE (n_high), 1), 0);
4355 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4356 build_int_cst (TREE_TYPE (n_low), 1), 0);
4358 /* If the range is of the form +/- [ x+1, x ], we won't
4359 be able to normalize it. But then, it represents the
4360 whole range or the empty set, so make it
4362 if (tree_int_cst_equal (n_low, low)
4363 && tree_int_cst_equal (n_high, high))
4369 low = n_low, high = n_high;
4377 case NON_LVALUE_EXPR:
4378 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4381 if (! INTEGRAL_TYPE_P (arg0_type)
4382 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4383 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4386 n_low = low, n_high = high;
4389 n_low = fold_convert_loc (loc, arg0_type, n_low);
4392 n_high = fold_convert_loc (loc, arg0_type, n_high);
4394 /* If we're converting arg0 from an unsigned type, to exp,
4395 a signed type, we will be doing the comparison as unsigned.
4396 The tests above have already verified that LOW and HIGH
4399 So we have to ensure that we will handle large unsigned
4400 values the same way that the current signed bounds treat
4403 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4407 /* For fixed-point modes, we need to pass the saturating flag
4408 as the 2nd parameter. */
4409 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4411 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4412 TYPE_SATURATING (arg0_type));
4415 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4417 /* A range without an upper bound is, naturally, unbounded.
4418 Since convert would have cropped a very large value, use
4419 the max value for the destination type. */
4421 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4422 : TYPE_MAX_VALUE (arg0_type);
4424 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4425 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4426 fold_convert_loc (loc, arg0_type,
4428 build_int_cst (arg0_type, 1));
4430 /* If the low bound is specified, "and" the range with the
4431 range for which the original unsigned value will be
4435 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4436 1, fold_convert_loc (loc, arg0_type,
4441 in_p = (n_in_p == in_p);
4445 /* Otherwise, "or" the range with the range of the input
4446 that will be interpreted as negative. */
4447 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4448 1, fold_convert_loc (loc, arg0_type,
4453 in_p = (in_p != n_in_p);
4467 /* Given EXP, a logical expression, set the range it is testing into
4468 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4469 actually being tested. *PLOW and *PHIGH will be made of the same
4470 type as the returned expression. If EXP is not a comparison, we
4471 will most likely not be returning a useful value and range. Set
4472 *STRICT_OVERFLOW_P to true if the return value is only valid
4473 because signed overflow is undefined; otherwise, do not change
4474 *STRICT_OVERFLOW_P. */
4477 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4478 bool *strict_overflow_p)
4480 enum tree_code code;
4481 tree arg0, arg1 = NULL_TREE;
4482 tree exp_type, nexp;
4485 location_t loc = EXPR_LOCATION (exp);
4487 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4488 and see if we can refine the range. Some of the cases below may not
4489 happen, but it doesn't seem worth worrying about this. We "continue"
4490 the outer loop when we've changed something; otherwise we "break"
4491 the switch, which will "break" the while. */
4494 low = high = build_int_cst (TREE_TYPE (exp), 0);
4498 code = TREE_CODE (exp);
4499 exp_type = TREE_TYPE (exp);
4502 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4504 if (TREE_OPERAND_LENGTH (exp) > 0)
4505 arg0 = TREE_OPERAND (exp, 0);
4506 if (TREE_CODE_CLASS (code) == tcc_binary
4507 || TREE_CODE_CLASS (code) == tcc_comparison
4508 || (TREE_CODE_CLASS (code) == tcc_expression
4509 && TREE_OPERAND_LENGTH (exp) > 1))
4510 arg1 = TREE_OPERAND (exp, 1);
4512 if (arg0 == NULL_TREE)
4515 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4516 &high, &in_p, strict_overflow_p);
4517 if (nexp == NULL_TREE)
4522 /* If EXP is a constant, we can evaluate whether this is true or false. */
4523 if (TREE_CODE (exp) == INTEGER_CST)
4525 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4527 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4533 *pin_p = in_p, *plow = low, *phigh = high;
4537 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4538 type, TYPE, return an expression to test if EXP is in (or out of, depending
4539 on IN_P) the range. Return 0 if the test couldn't be created. */
4542 build_range_check (location_t loc, tree type, tree exp, int in_p,
4543 tree low, tree high)
4545 tree etype = TREE_TYPE (exp), value;
4547 /* Disable this optimization for function pointer expressions
4548 on targets that require function pointer canonicalization. */
4549 if (targetm.have_canonicalize_funcptr_for_compare ()
4550 && TREE_CODE (etype) == POINTER_TYPE
4551 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4556 value = build_range_check (loc, type, exp, 1, low, high);
4558 return invert_truthvalue_loc (loc, value);
4563 if (low == 0 && high == 0)
4564 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4567 return fold_build2_loc (loc, LE_EXPR, type, exp,
4568 fold_convert_loc (loc, etype, high));
4571 return fold_build2_loc (loc, GE_EXPR, type, exp,
4572 fold_convert_loc (loc, etype, low));
4574 if (operand_equal_p (low, high, 0))
4575 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4576 fold_convert_loc (loc, etype, low));
4578 if (integer_zerop (low))
4580 if (! TYPE_UNSIGNED (etype))
4582 etype = unsigned_type_for (etype);
4583 high = fold_convert_loc (loc, etype, high);
4584 exp = fold_convert_loc (loc, etype, exp);
4586 return build_range_check (loc, type, exp, 1, 0, high);
4589 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4590 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4592 int prec = TYPE_PRECISION (etype);
4594 if (wi::mask (prec - 1, false, prec) == high)
4596 if (TYPE_UNSIGNED (etype))
4598 tree signed_etype = signed_type_for (etype);
4599 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4601 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4603 etype = signed_etype;
4604 exp = fold_convert_loc (loc, etype, exp);
4606 return fold_build2_loc (loc, GT_EXPR, type, exp,
4607 build_int_cst (etype, 0));
4611 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4612 This requires wrap-around arithmetics for the type of the expression.
4613 First make sure that arithmetics in this type is valid, then make sure
4614 that it wraps around. */
4615 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4616 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4617 TYPE_UNSIGNED (etype));
4619 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4621 tree utype, minv, maxv;
4623 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4624 for the type in question, as we rely on this here. */
4625 utype = unsigned_type_for (etype);
4626 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4627 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4628 build_int_cst (TREE_TYPE (maxv), 1), 1);
4629 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4631 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4638 high = fold_convert_loc (loc, etype, high);
4639 low = fold_convert_loc (loc, etype, low);
4640 exp = fold_convert_loc (loc, etype, exp);
4642 value = const_binop (MINUS_EXPR, high, low);
4645 if (POINTER_TYPE_P (etype))
4647 if (value != 0 && !TREE_OVERFLOW (value))
4649 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4650 return build_range_check (loc, type,
4651 fold_build_pointer_plus_loc (loc, exp, low),
4652 1, build_int_cst (etype, 0), value);
4657 if (value != 0 && !TREE_OVERFLOW (value))
4658 return build_range_check (loc, type,
4659 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4660 1, build_int_cst (etype, 0), value);
4665 /* Return the predecessor of VAL in its type, handling the infinite case. */
4668 range_predecessor (tree val)
4670 tree type = TREE_TYPE (val);
4672 if (INTEGRAL_TYPE_P (type)
4673 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4676 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4677 build_int_cst (TREE_TYPE (val), 1), 0);
4680 /* Return the successor of VAL in its type, handling the infinite case. */
4683 range_successor (tree val)
4685 tree type = TREE_TYPE (val);
4687 if (INTEGRAL_TYPE_P (type)
4688 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4691 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4692 build_int_cst (TREE_TYPE (val), 1), 0);
4695 /* Given two ranges, see if we can merge them into one. Return 1 if we
4696 can, 0 if we can't. Set the output range into the specified parameters. */
4699 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4700 tree high0, int in1_p, tree low1, tree high1)
4708 int lowequal = ((low0 == 0 && low1 == 0)
4709 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4710 low0, 0, low1, 0)));
4711 int highequal = ((high0 == 0 && high1 == 0)
4712 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4713 high0, 1, high1, 1)));
4715 /* Make range 0 be the range that starts first, or ends last if they
4716 start at the same value. Swap them if it isn't. */
4717 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4720 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4721 high1, 1, high0, 1))))
4723 temp = in0_p, in0_p = in1_p, in1_p = temp;
4724 tem = low0, low0 = low1, low1 = tem;
4725 tem = high0, high0 = high1, high1 = tem;
4728 /* Now flag two cases, whether the ranges are disjoint or whether the
4729 second range is totally subsumed in the first. Note that the tests
4730 below are simplified by the ones above. */
4731 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4732 high0, 1, low1, 0));
4733 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4734 high1, 1, high0, 1));
4736 /* We now have four cases, depending on whether we are including or
4737 excluding the two ranges. */
4740 /* If they don't overlap, the result is false. If the second range
4741 is a subset it is the result. Otherwise, the range is from the start
4742 of the second to the end of the first. */
4744 in_p = 0, low = high = 0;
4746 in_p = 1, low = low1, high = high1;
4748 in_p = 1, low = low1, high = high0;
4751 else if (in0_p && ! in1_p)
4753 /* If they don't overlap, the result is the first range. If they are
4754 equal, the result is false. If the second range is a subset of the
4755 first, and the ranges begin at the same place, we go from just after
4756 the end of the second range to the end of the first. If the second
4757 range is not a subset of the first, or if it is a subset and both
4758 ranges end at the same place, the range starts at the start of the
4759 first range and ends just before the second range.
4760 Otherwise, we can't describe this as a single range. */
4762 in_p = 1, low = low0, high = high0;
4763 else if (lowequal && highequal)
4764 in_p = 0, low = high = 0;
4765 else if (subset && lowequal)
4767 low = range_successor (high1);
4772 /* We are in the weird situation where high0 > high1 but
4773 high1 has no successor. Punt. */
4777 else if (! subset || highequal)
4780 high = range_predecessor (low1);
4784 /* low0 < low1 but low1 has no predecessor. Punt. */
4792 else if (! in0_p && in1_p)
4794 /* If they don't overlap, the result is the second range. If the second
4795 is a subset of the first, the result is false. Otherwise,
4796 the range starts just after the first range and ends at the
4797 end of the second. */
4799 in_p = 1, low = low1, high = high1;
4800 else if (subset || highequal)
4801 in_p = 0, low = high = 0;
4804 low = range_successor (high0);
4809 /* high1 > high0 but high0 has no successor. Punt. */
4817 /* The case where we are excluding both ranges. Here the complex case
4818 is if they don't overlap. In that case, the only time we have a
4819 range is if they are adjacent. If the second is a subset of the
4820 first, the result is the first. Otherwise, the range to exclude
4821 starts at the beginning of the first range and ends at the end of the
4825 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4826 range_successor (high0),
4828 in_p = 0, low = low0, high = high1;
4831 /* Canonicalize - [min, x] into - [-, x]. */
4832 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4833 switch (TREE_CODE (TREE_TYPE (low0)))
4836 if (TYPE_PRECISION (TREE_TYPE (low0))
4837 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4841 if (tree_int_cst_equal (low0,
4842 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4846 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4847 && integer_zerop (low0))
4854 /* Canonicalize - [x, max] into - [x, -]. */
4855 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4856 switch (TREE_CODE (TREE_TYPE (high1)))
4859 if (TYPE_PRECISION (TREE_TYPE (high1))
4860 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4864 if (tree_int_cst_equal (high1,
4865 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4869 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4870 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4872 build_int_cst (TREE_TYPE (high1), 1),
4880 /* The ranges might be also adjacent between the maximum and
4881 minimum values of the given type. For
4882 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4883 return + [x + 1, y - 1]. */
4884 if (low0 == 0 && high1 == 0)
4886 low = range_successor (high0);
4887 high = range_predecessor (low1);
4888 if (low == 0 || high == 0)
4898 in_p = 0, low = low0, high = high0;
4900 in_p = 0, low = low0, high = high1;
4903 *pin_p = in_p, *plow = low, *phigh = high;
4908 /* Subroutine of fold, looking inside expressions of the form
4909 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4910 of the COND_EXPR. This function is being used also to optimize
4911 A op B ? C : A, by reversing the comparison first.
4913 Return a folded expression whose code is not a COND_EXPR
4914 anymore, or NULL_TREE if no folding opportunity is found. */
4917 fold_cond_expr_with_comparison (location_t loc, tree type,
4918 tree arg0, tree arg1, tree arg2)
4920 enum tree_code comp_code = TREE_CODE (arg0);
4921 tree arg00 = TREE_OPERAND (arg0, 0);
4922 tree arg01 = TREE_OPERAND (arg0, 1);
4923 tree arg1_type = TREE_TYPE (arg1);
4929 /* If we have A op 0 ? A : -A, consider applying the following
4932 A == 0? A : -A same as -A
4933 A != 0? A : -A same as A
4934 A >= 0? A : -A same as abs (A)
4935 A > 0? A : -A same as abs (A)
4936 A <= 0? A : -A same as -abs (A)
4937 A < 0? A : -A same as -abs (A)
4939 None of these transformations work for modes with signed
4940 zeros. If A is +/-0, the first two transformations will
4941 change the sign of the result (from +0 to -0, or vice
4942 versa). The last four will fix the sign of the result,
4943 even though the original expressions could be positive or
4944 negative, depending on the sign of A.
4946 Note that all these transformations are correct if A is
4947 NaN, since the two alternatives (A and -A) are also NaNs. */
4948 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4949 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4950 ? real_zerop (arg01)
4951 : integer_zerop (arg01))
4952 && ((TREE_CODE (arg2) == NEGATE_EXPR
4953 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4954 /* In the case that A is of the form X-Y, '-A' (arg2) may
4955 have already been folded to Y-X, check for that. */
4956 || (TREE_CODE (arg1) == MINUS_EXPR
4957 && TREE_CODE (arg2) == MINUS_EXPR
4958 && operand_equal_p (TREE_OPERAND (arg1, 0),
4959 TREE_OPERAND (arg2, 1), 0)
4960 && operand_equal_p (TREE_OPERAND (arg1, 1),
4961 TREE_OPERAND (arg2, 0), 0))))
4966 tem = fold_convert_loc (loc, arg1_type, arg1);
4967 return pedantic_non_lvalue_loc (loc,
4968 fold_convert_loc (loc, type,
4969 negate_expr (tem)));
4972 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4975 if (flag_trapping_math)
4980 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4981 arg1 = fold_convert_loc (loc, signed_type_for
4982 (TREE_TYPE (arg1)), arg1);
4983 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4984 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4987 if (flag_trapping_math)
4991 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4992 arg1 = fold_convert_loc (loc, signed_type_for
4993 (TREE_TYPE (arg1)), arg1);
4994 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4995 return negate_expr (fold_convert_loc (loc, type, tem));
4997 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5001 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5002 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5003 both transformations are correct when A is NaN: A != 0
5004 is then true, and A == 0 is false. */
5006 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5007 && integer_zerop (arg01) && integer_zerop (arg2))
5009 if (comp_code == NE_EXPR)
5010 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5011 else if (comp_code == EQ_EXPR)
5012 return build_zero_cst (type);
5015 /* Try some transformations of A op B ? A : B.
5017 A == B? A : B same as B
5018 A != B? A : B same as A
5019 A >= B? A : B same as max (A, B)
5020 A > B? A : B same as max (B, A)
5021 A <= B? A : B same as min (A, B)
5022 A < B? A : B same as min (B, A)
5024 As above, these transformations don't work in the presence
5025 of signed zeros. For example, if A and B are zeros of
5026 opposite sign, the first two transformations will change
5027 the sign of the result. In the last four, the original
5028 expressions give different results for (A=+0, B=-0) and
5029 (A=-0, B=+0), but the transformed expressions do not.
5031 The first two transformations are correct if either A or B
5032 is a NaN. In the first transformation, the condition will
5033 be false, and B will indeed be chosen. In the case of the
5034 second transformation, the condition A != B will be true,
5035 and A will be chosen.
5037 The conversions to max() and min() are not correct if B is
5038 a number and A is not. The conditions in the original
5039 expressions will be false, so all four give B. The min()
5040 and max() versions would give a NaN instead. */
5041 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5042 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5043 /* Avoid these transformations if the COND_EXPR may be used
5044 as an lvalue in the C++ front-end. PR c++/19199. */
5046 || VECTOR_TYPE_P (type)
5047 || (! lang_GNU_CXX ()
5048 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5049 || ! maybe_lvalue_p (arg1)
5050 || ! maybe_lvalue_p (arg2)))
5052 tree comp_op0 = arg00;
5053 tree comp_op1 = arg01;
5054 tree comp_type = TREE_TYPE (comp_op0);
5056 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5057 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5067 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5069 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5074 /* In C++ a ?: expression can be an lvalue, so put the
5075 operand which will be used if they are equal first
5076 so that we can convert this back to the
5077 corresponding COND_EXPR. */
5078 if (!HONOR_NANS (arg1))
5080 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5081 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5082 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5083 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5084 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5085 comp_op1, comp_op0);
5086 return pedantic_non_lvalue_loc (loc,
5087 fold_convert_loc (loc, type, tem));
5094 if (!HONOR_NANS (arg1))
5096 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5097 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5098 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5099 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5100 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5101 comp_op1, comp_op0);
5102 return pedantic_non_lvalue_loc (loc,
5103 fold_convert_loc (loc, type, tem));
5107 if (!HONOR_NANS (arg1))
5108 return pedantic_non_lvalue_loc (loc,
5109 fold_convert_loc (loc, type, arg2));
5112 if (!HONOR_NANS (arg1))
5113 return pedantic_non_lvalue_loc (loc,
5114 fold_convert_loc (loc, type, arg1));
5117 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5122 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5123 we might still be able to simplify this. For example,
5124 if C1 is one less or one more than C2, this might have started
5125 out as a MIN or MAX and been transformed by this function.
5126 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5128 if (INTEGRAL_TYPE_P (type)
5129 && TREE_CODE (arg01) == INTEGER_CST
5130 && TREE_CODE (arg2) == INTEGER_CST)
5134 if (TREE_CODE (arg1) == INTEGER_CST)
5136 /* We can replace A with C1 in this case. */
5137 arg1 = fold_convert_loc (loc, type, arg01);
5138 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5141 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5142 MIN_EXPR, to preserve the signedness of the comparison. */
5143 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5145 && operand_equal_p (arg01,
5146 const_binop (PLUS_EXPR, arg2,
5147 build_int_cst (type, 1)),
5150 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5151 fold_convert_loc (loc, TREE_TYPE (arg00),
5153 return pedantic_non_lvalue_loc (loc,
5154 fold_convert_loc (loc, type, tem));
5159 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5161 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5163 && operand_equal_p (arg01,
5164 const_binop (MINUS_EXPR, arg2,
5165 build_int_cst (type, 1)),
5168 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5169 fold_convert_loc (loc, TREE_TYPE (arg00),
5171 return pedantic_non_lvalue_loc (loc,
5172 fold_convert_loc (loc, type, tem));
5177 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5178 MAX_EXPR, to preserve the signedness of the comparison. */
5179 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5181 && operand_equal_p (arg01,
5182 const_binop (MINUS_EXPR, arg2,
5183 build_int_cst (type, 1)),
5186 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5187 fold_convert_loc (loc, TREE_TYPE (arg00),
5189 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5194 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5195 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5197 && operand_equal_p (arg01,
5198 const_binop (PLUS_EXPR, arg2,
5199 build_int_cst (type, 1)),
5202 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5203 fold_convert_loc (loc, TREE_TYPE (arg00),
5205 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5219 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5220 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5221 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5225 /* EXP is some logical combination of boolean tests. See if we can
5226 merge it into some range test. Return the new tree if so. */
5229 fold_range_test (location_t loc, enum tree_code code, tree type,
5232 int or_op = (code == TRUTH_ORIF_EXPR
5233 || code == TRUTH_OR_EXPR);
5234 int in0_p, in1_p, in_p;
5235 tree low0, low1, low, high0, high1, high;
5236 bool strict_overflow_p = false;
5238 const char * const warnmsg = G_("assuming signed overflow does not occur "
5239 "when simplifying range test");
5241 if (!INTEGRAL_TYPE_P (type))
5244 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5245 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5247 /* If this is an OR operation, invert both sides; we will invert
5248 again at the end. */
5250 in0_p = ! in0_p, in1_p = ! in1_p;
5252 /* If both expressions are the same, if we can merge the ranges, and we
5253 can build the range test, return it or it inverted. If one of the
5254 ranges is always true or always false, consider it to be the same
5255 expression as the other. */
5256 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5257 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5259 && 0 != (tem = (build_range_check (loc, type,
5261 : rhs != 0 ? rhs : integer_zero_node,
5264 if (strict_overflow_p)
5265 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5266 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5269 /* On machines where the branch cost is expensive, if this is a
5270 short-circuited branch and the underlying object on both sides
5271 is the same, make a non-short-circuit operation. */
5272 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5273 && lhs != 0 && rhs != 0
5274 && (code == TRUTH_ANDIF_EXPR
5275 || code == TRUTH_ORIF_EXPR)
5276 && operand_equal_p (lhs, rhs, 0))
5278 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5279 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5280 which cases we can't do this. */
5281 if (simple_operand_p (lhs))
5282 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5283 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5286 else if (!lang_hooks.decls.global_bindings_p ()
5287 && !CONTAINS_PLACEHOLDER_P (lhs))
5289 tree common = save_expr (lhs);
5291 if (0 != (lhs = build_range_check (loc, type, common,
5292 or_op ? ! in0_p : in0_p,
5294 && (0 != (rhs = build_range_check (loc, type, common,
5295 or_op ? ! in1_p : in1_p,
5298 if (strict_overflow_p)
5299 fold_overflow_warning (warnmsg,
5300 WARN_STRICT_OVERFLOW_COMPARISON);
5301 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5302 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5311 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5312 bit value. Arrange things so the extra bits will be set to zero if and
5313 only if C is signed-extended to its full width. If MASK is nonzero,
5314 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5317 unextend (tree c, int p, int unsignedp, tree mask)
5319 tree type = TREE_TYPE (c);
5320 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5323 if (p == modesize || unsignedp)
5326 /* We work by getting just the sign bit into the low-order bit, then
5327 into the high-order bit, then sign-extend. We then XOR that value
5329 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5331 /* We must use a signed type in order to get an arithmetic right shift.
5332 However, we must also avoid introducing accidental overflows, so that
5333 a subsequent call to integer_zerop will work. Hence we must
5334 do the type conversion here. At this point, the constant is either
5335 zero or one, and the conversion to a signed type can never overflow.
5336 We could get an overflow if this conversion is done anywhere else. */
5337 if (TYPE_UNSIGNED (type))
5338 temp = fold_convert (signed_type_for (type), temp);
5340 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5341 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5343 temp = const_binop (BIT_AND_EXPR, temp,
5344 fold_convert (TREE_TYPE (c), mask));
5345 /* If necessary, convert the type back to match the type of C. */
5346 if (TYPE_UNSIGNED (type))
5347 temp = fold_convert (type, temp);
5349 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5352 /* For an expression that has the form
5356 we can drop one of the inner expressions and simplify to
5360 LOC is the location of the resulting expression. OP is the inner
5361 logical operation; the left-hand side in the examples above, while CMPOP
5362 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5363 removing a condition that guards another, as in
5364 (A != NULL && A->...) || A == NULL
5365 which we must not transform. If RHS_ONLY is true, only eliminate the
5366 right-most operand of the inner logical operation. */
5369 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5372 tree type = TREE_TYPE (cmpop);
5373 enum tree_code code = TREE_CODE (cmpop);
5374 enum tree_code truthop_code = TREE_CODE (op);
5375 tree lhs = TREE_OPERAND (op, 0);
5376 tree rhs = TREE_OPERAND (op, 1);
5377 tree orig_lhs = lhs, orig_rhs = rhs;
5378 enum tree_code rhs_code = TREE_CODE (rhs);
5379 enum tree_code lhs_code = TREE_CODE (lhs);
5380 enum tree_code inv_code;
5382 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5385 if (TREE_CODE_CLASS (code) != tcc_comparison)
5388 if (rhs_code == truthop_code)
5390 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5391 if (newrhs != NULL_TREE)
5394 rhs_code = TREE_CODE (rhs);
5397 if (lhs_code == truthop_code && !rhs_only)
5399 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5400 if (newlhs != NULL_TREE)
5403 lhs_code = TREE_CODE (lhs);
5407 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5408 if (inv_code == rhs_code
5409 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5410 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5412 if (!rhs_only && inv_code == lhs_code
5413 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5414 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5416 if (rhs != orig_rhs || lhs != orig_lhs)
5417 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5422 /* Find ways of folding logical expressions of LHS and RHS:
5423 Try to merge two comparisons to the same innermost item.
5424 Look for range tests like "ch >= '0' && ch <= '9'".
5425 Look for combinations of simple terms on machines with expensive branches
5426 and evaluate the RHS unconditionally.
5428 For example, if we have p->a == 2 && p->b == 4 and we can make an
5429 object large enough to span both A and B, we can do this with a comparison
5430 against the object ANDed with the a mask.
5432 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5433 operations to do this with one comparison.
5435 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5436 function and the one above.
5438 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5439 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5441 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5444 We return the simplified tree or 0 if no optimization is possible. */
5447 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5450 /* If this is the "or" of two comparisons, we can do something if
5451 the comparisons are NE_EXPR. If this is the "and", we can do something
5452 if the comparisons are EQ_EXPR. I.e.,
5453 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5455 WANTED_CODE is this operation code. For single bit fields, we can
5456 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5457 comparison for one-bit fields. */
5459 enum tree_code wanted_code;
5460 enum tree_code lcode, rcode;
5461 tree ll_arg, lr_arg, rl_arg, rr_arg;
5462 tree ll_inner, lr_inner, rl_inner, rr_inner;
5463 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5464 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5465 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5466 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5467 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5468 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5469 machine_mode lnmode, rnmode;
5470 tree ll_mask, lr_mask, rl_mask, rr_mask;
5471 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5472 tree l_const, r_const;
5473 tree lntype, rntype, result;
5474 HOST_WIDE_INT first_bit, end_bit;
5477 /* Start by getting the comparison codes. Fail if anything is volatile.
5478 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5479 it were surrounded with a NE_EXPR. */
5481 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5484 lcode = TREE_CODE (lhs);
5485 rcode = TREE_CODE (rhs);
5487 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5489 lhs = build2 (NE_EXPR, truth_type, lhs,
5490 build_int_cst (TREE_TYPE (lhs), 0));
5494 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5496 rhs = build2 (NE_EXPR, truth_type, rhs,
5497 build_int_cst (TREE_TYPE (rhs), 0));
5501 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5502 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5505 ll_arg = TREE_OPERAND (lhs, 0);
5506 lr_arg = TREE_OPERAND (lhs, 1);
5507 rl_arg = TREE_OPERAND (rhs, 0);
5508 rr_arg = TREE_OPERAND (rhs, 1);
5510 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5511 if (simple_operand_p (ll_arg)
5512 && simple_operand_p (lr_arg))
5514 if (operand_equal_p (ll_arg, rl_arg, 0)
5515 && operand_equal_p (lr_arg, rr_arg, 0))
5517 result = combine_comparisons (loc, code, lcode, rcode,
5518 truth_type, ll_arg, lr_arg);
5522 else if (operand_equal_p (ll_arg, rr_arg, 0)
5523 && operand_equal_p (lr_arg, rl_arg, 0))
5525 result = combine_comparisons (loc, code, lcode,
5526 swap_tree_comparison (rcode),
5527 truth_type, ll_arg, lr_arg);
5533 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5534 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5536 /* If the RHS can be evaluated unconditionally and its operands are
5537 simple, it wins to evaluate the RHS unconditionally on machines
5538 with expensive branches. In this case, this isn't a comparison
5539 that can be merged. */
5541 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5543 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5544 && simple_operand_p (rl_arg)
5545 && simple_operand_p (rr_arg))
5547 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5548 if (code == TRUTH_OR_EXPR
5549 && lcode == NE_EXPR && integer_zerop (lr_arg)
5550 && rcode == NE_EXPR && integer_zerop (rr_arg)
5551 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5552 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5553 return build2_loc (loc, NE_EXPR, truth_type,
5554 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5556 build_int_cst (TREE_TYPE (ll_arg), 0));
5558 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5559 if (code == TRUTH_AND_EXPR
5560 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5561 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5562 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5563 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5564 return build2_loc (loc, EQ_EXPR, truth_type,
5565 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5567 build_int_cst (TREE_TYPE (ll_arg), 0));
5570 /* See if the comparisons can be merged. Then get all the parameters for
5573 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5574 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5578 ll_inner = decode_field_reference (loc, ll_arg,
5579 &ll_bitsize, &ll_bitpos, &ll_mode,
5580 &ll_unsignedp, &volatilep, &ll_mask,
5582 lr_inner = decode_field_reference (loc, lr_arg,
5583 &lr_bitsize, &lr_bitpos, &lr_mode,
5584 &lr_unsignedp, &volatilep, &lr_mask,
5586 rl_inner = decode_field_reference (loc, rl_arg,
5587 &rl_bitsize, &rl_bitpos, &rl_mode,
5588 &rl_unsignedp, &volatilep, &rl_mask,
5590 rr_inner = decode_field_reference (loc, rr_arg,
5591 &rr_bitsize, &rr_bitpos, &rr_mode,
5592 &rr_unsignedp, &volatilep, &rr_mask,
5595 /* It must be true that the inner operation on the lhs of each
5596 comparison must be the same if we are to be able to do anything.
5597 Then see if we have constants. If not, the same must be true for
5599 if (volatilep || ll_inner == 0 || rl_inner == 0
5600 || ! operand_equal_p (ll_inner, rl_inner, 0))
5603 if (TREE_CODE (lr_arg) == INTEGER_CST
5604 && TREE_CODE (rr_arg) == INTEGER_CST)
5605 l_const = lr_arg, r_const = rr_arg;
5606 else if (lr_inner == 0 || rr_inner == 0
5607 || ! operand_equal_p (lr_inner, rr_inner, 0))
5610 l_const = r_const = 0;
5612 /* If either comparison code is not correct for our logical operation,
5613 fail. However, we can convert a one-bit comparison against zero into
5614 the opposite comparison against that bit being set in the field. */
5616 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5617 if (lcode != wanted_code)
5619 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5621 /* Make the left operand unsigned, since we are only interested
5622 in the value of one bit. Otherwise we are doing the wrong
5631 /* This is analogous to the code for l_const above. */
5632 if (rcode != wanted_code)
5634 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5643 /* See if we can find a mode that contains both fields being compared on
5644 the left. If we can't, fail. Otherwise, update all constants and masks
5645 to be relative to a field of that size. */
5646 first_bit = MIN (ll_bitpos, rl_bitpos);
5647 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5648 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5649 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5651 if (lnmode == VOIDmode)
5654 lnbitsize = GET_MODE_BITSIZE (lnmode);
5655 lnbitpos = first_bit & ~ (lnbitsize - 1);
5656 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5657 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5659 if (BYTES_BIG_ENDIAN)
5661 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5662 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5665 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5666 size_int (xll_bitpos));
5667 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5668 size_int (xrl_bitpos));
5672 l_const = fold_convert_loc (loc, lntype, l_const);
5673 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5674 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5675 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5676 fold_build1_loc (loc, BIT_NOT_EXPR,
5679 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5681 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5686 r_const = fold_convert_loc (loc, lntype, r_const);
5687 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5688 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5689 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5690 fold_build1_loc (loc, BIT_NOT_EXPR,
5693 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5695 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5699 /* If the right sides are not constant, do the same for it. Also,
5700 disallow this optimization if a size or signedness mismatch occurs
5701 between the left and right sides. */
5704 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5705 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5706 /* Make sure the two fields on the right
5707 correspond to the left without being swapped. */
5708 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5711 first_bit = MIN (lr_bitpos, rr_bitpos);
5712 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5713 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5714 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5716 if (rnmode == VOIDmode)
5719 rnbitsize = GET_MODE_BITSIZE (rnmode);
5720 rnbitpos = first_bit & ~ (rnbitsize - 1);
5721 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5722 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5724 if (BYTES_BIG_ENDIAN)
5726 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5727 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5730 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5732 size_int (xlr_bitpos));
5733 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5735 size_int (xrr_bitpos));
5737 /* Make a mask that corresponds to both fields being compared.
5738 Do this for both items being compared. If the operands are the
5739 same size and the bits being compared are in the same position
5740 then we can do this by masking both and comparing the masked
5742 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5743 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5744 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5746 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5747 ll_unsignedp || rl_unsignedp);
5748 if (! all_ones_mask_p (ll_mask, lnbitsize))
5749 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5751 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5752 lr_unsignedp || rr_unsignedp);
5753 if (! all_ones_mask_p (lr_mask, rnbitsize))
5754 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5756 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5759 /* There is still another way we can do something: If both pairs of
5760 fields being compared are adjacent, we may be able to make a wider
5761 field containing them both.
5763 Note that we still must mask the lhs/rhs expressions. Furthermore,
5764 the mask must be shifted to account for the shift done by
5765 make_bit_field_ref. */
5766 if ((ll_bitsize + ll_bitpos == rl_bitpos
5767 && lr_bitsize + lr_bitpos == rr_bitpos)
5768 || (ll_bitpos == rl_bitpos + rl_bitsize
5769 && lr_bitpos == rr_bitpos + rr_bitsize))
5773 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5774 ll_bitsize + rl_bitsize,
5775 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5776 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5777 lr_bitsize + rr_bitsize,
5778 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5780 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5781 size_int (MIN (xll_bitpos, xrl_bitpos)));
5782 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5783 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5785 /* Convert to the smaller type before masking out unwanted bits. */
5787 if (lntype != rntype)
5789 if (lnbitsize > rnbitsize)
5791 lhs = fold_convert_loc (loc, rntype, lhs);
5792 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5795 else if (lnbitsize < rnbitsize)
5797 rhs = fold_convert_loc (loc, lntype, rhs);
5798 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5803 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5804 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5806 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5807 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5809 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5815 /* Handle the case of comparisons with constants. If there is something in
5816 common between the masks, those bits of the constants must be the same.
5817 If not, the condition is always false. Test for this to avoid generating
5818 incorrect code below. */
5819 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5820 if (! integer_zerop (result)
5821 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5822 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5824 if (wanted_code == NE_EXPR)
5826 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5827 return constant_boolean_node (true, truth_type);
5831 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5832 return constant_boolean_node (false, truth_type);
5836 /* Construct the expression we will return. First get the component
5837 reference we will make. Unless the mask is all ones the width of
5838 that field, perform the mask operation. Then compare with the
5840 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5841 ll_unsignedp || rl_unsignedp);
5843 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5844 if (! all_ones_mask_p (ll_mask, lnbitsize))
5845 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5847 return build2_loc (loc, wanted_code, truth_type, result,
5848 const_binop (BIT_IOR_EXPR, l_const, r_const));
5851 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5855 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5859 enum tree_code op_code;
5862 int consts_equal, consts_lt;
5865 STRIP_SIGN_NOPS (arg0);
5867 op_code = TREE_CODE (arg0);
5868 minmax_const = TREE_OPERAND (arg0, 1);
5869 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5870 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5871 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5872 inner = TREE_OPERAND (arg0, 0);
5874 /* If something does not permit us to optimize, return the original tree. */
5875 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5876 || TREE_CODE (comp_const) != INTEGER_CST
5877 || TREE_OVERFLOW (comp_const)
5878 || TREE_CODE (minmax_const) != INTEGER_CST
5879 || TREE_OVERFLOW (minmax_const))
5882 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5883 and GT_EXPR, doing the rest with recursive calls using logical
5887 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5890 = optimize_minmax_comparison (loc,
5891 invert_tree_comparison (code, false),
5894 return invert_truthvalue_loc (loc, tem);
5900 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5901 optimize_minmax_comparison
5902 (loc, EQ_EXPR, type, arg0, comp_const),
5903 optimize_minmax_comparison
5904 (loc, GT_EXPR, type, arg0, comp_const));
5907 if (op_code == MAX_EXPR && consts_equal)
5908 /* MAX (X, 0) == 0 -> X <= 0 */
5909 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5911 else if (op_code == MAX_EXPR && consts_lt)
5912 /* MAX (X, 0) == 5 -> X == 5 */
5913 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5915 else if (op_code == MAX_EXPR)
5916 /* MAX (X, 0) == -1 -> false */
5917 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5919 else if (consts_equal)
5920 /* MIN (X, 0) == 0 -> X >= 0 */
5921 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5924 /* MIN (X, 0) == 5 -> false */
5925 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5928 /* MIN (X, 0) == -1 -> X == -1 */
5929 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5932 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5933 /* MAX (X, 0) > 0 -> X > 0
5934 MAX (X, 0) > 5 -> X > 5 */
5935 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5937 else if (op_code == MAX_EXPR)
5938 /* MAX (X, 0) > -1 -> true */
5939 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5941 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5942 /* MIN (X, 0) > 0 -> false
5943 MIN (X, 0) > 5 -> false */
5944 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5947 /* MIN (X, 0) > -1 -> X > -1 */
5948 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5955 /* T is an integer expression that is being multiplied, divided, or taken a
5956 modulus (CODE says which and what kind of divide or modulus) by a
5957 constant C. See if we can eliminate that operation by folding it with
5958 other operations already in T. WIDE_TYPE, if non-null, is a type that
5959 should be used for the computation if wider than our type.
5961 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5962 (X * 2) + (Y * 4). We must, however, be assured that either the original
5963 expression would not overflow or that overflow is undefined for the type
5964 in the language in question.
5966 If we return a non-null expression, it is an equivalent form of the
5967 original computation, but need not be in the original type.
5969 We set *STRICT_OVERFLOW_P to true if the return values depends on
5970 signed overflow being undefined. Otherwise we do not change
5971 *STRICT_OVERFLOW_P. */
5974 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5975 bool *strict_overflow_p)
5977 /* To avoid exponential search depth, refuse to allow recursion past
5978 three levels. Beyond that (1) it's highly unlikely that we'll find
5979 something interesting and (2) we've probably processed it before
5980 when we built the inner expression. */
5989 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5996 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5997 bool *strict_overflow_p)
5999 tree type = TREE_TYPE (t);
6000 enum tree_code tcode = TREE_CODE (t);
6001 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6002 > GET_MODE_SIZE (TYPE_MODE (type)))
6003 ? wide_type : type);
6005 int same_p = tcode == code;
6006 tree op0 = NULL_TREE, op1 = NULL_TREE;
6007 bool sub_strict_overflow_p;
6009 /* Don't deal with constants of zero here; they confuse the code below. */
6010 if (integer_zerop (c))
6013 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6014 op0 = TREE_OPERAND (t, 0);
6016 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6017 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6019 /* Note that we need not handle conditional operations here since fold
6020 already handles those cases. So just do arithmetic here. */
6024 /* For a constant, we can always simplify if we are a multiply
6025 or (for divide and modulus) if it is a multiple of our constant. */
6026 if (code == MULT_EXPR
6027 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6028 return const_binop (code, fold_convert (ctype, t),
6029 fold_convert (ctype, c));
6032 CASE_CONVERT: case NON_LVALUE_EXPR:
6033 /* If op0 is an expression ... */
6034 if ((COMPARISON_CLASS_P (op0)
6035 || UNARY_CLASS_P (op0)
6036 || BINARY_CLASS_P (op0)
6037 || VL_EXP_CLASS_P (op0)
6038 || EXPRESSION_CLASS_P (op0))
6039 /* ... and has wrapping overflow, and its type is smaller
6040 than ctype, then we cannot pass through as widening. */
6041 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6042 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6043 && (TYPE_PRECISION (ctype)
6044 > TYPE_PRECISION (TREE_TYPE (op0))))
6045 /* ... or this is a truncation (t is narrower than op0),
6046 then we cannot pass through this narrowing. */
6047 || (TYPE_PRECISION (type)
6048 < TYPE_PRECISION (TREE_TYPE (op0)))
6049 /* ... or signedness changes for division or modulus,
6050 then we cannot pass through this conversion. */
6051 || (code != MULT_EXPR
6052 && (TYPE_UNSIGNED (ctype)
6053 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6054 /* ... or has undefined overflow while the converted to
6055 type has not, we cannot do the operation in the inner type
6056 as that would introduce undefined overflow. */
6057 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6059 && !TYPE_OVERFLOW_UNDEFINED (type))))
6062 /* Pass the constant down and see if we can make a simplification. If
6063 we can, replace this expression with the inner simplification for
6064 possible later conversion to our or some other type. */
6065 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6066 && TREE_CODE (t2) == INTEGER_CST
6067 && !TREE_OVERFLOW (t2)
6068 && (0 != (t1 = extract_muldiv (op0, t2, code,
6070 ? ctype : NULL_TREE,
6071 strict_overflow_p))))
6076 /* If widening the type changes it from signed to unsigned, then we
6077 must avoid building ABS_EXPR itself as unsigned. */
6078 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6080 tree cstype = (*signed_type_for) (ctype);
6081 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6084 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6085 return fold_convert (ctype, t1);
6089 /* If the constant is negative, we cannot simplify this. */
6090 if (tree_int_cst_sgn (c) == -1)
6094 /* For division and modulus, type can't be unsigned, as e.g.
6095 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6096 For signed types, even with wrapping overflow, this is fine. */
6097 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6099 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6101 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6104 case MIN_EXPR: case MAX_EXPR:
6105 /* If widening the type changes the signedness, then we can't perform
6106 this optimization as that changes the result. */
6107 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6110 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6111 sub_strict_overflow_p = false;
6112 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6113 &sub_strict_overflow_p)) != 0
6114 && (t2 = extract_muldiv (op1, c, code, wide_type,
6115 &sub_strict_overflow_p)) != 0)
6117 if (tree_int_cst_sgn (c) < 0)
6118 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6119 if (sub_strict_overflow_p)
6120 *strict_overflow_p = true;
6121 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6122 fold_convert (ctype, t2));
6126 case LSHIFT_EXPR: case RSHIFT_EXPR:
6127 /* If the second operand is constant, this is a multiplication
6128 or floor division, by a power of two, so we can treat it that
6129 way unless the multiplier or divisor overflows. Signed
6130 left-shift overflow is implementation-defined rather than
6131 undefined in C90, so do not convert signed left shift into
6133 if (TREE_CODE (op1) == INTEGER_CST
6134 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6135 /* const_binop may not detect overflow correctly,
6136 so check for it explicitly here. */
6137 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6138 && 0 != (t1 = fold_convert (ctype,
6139 const_binop (LSHIFT_EXPR,
6142 && !TREE_OVERFLOW (t1))
6143 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6144 ? MULT_EXPR : FLOOR_DIV_EXPR,
6146 fold_convert (ctype, op0),
6148 c, code, wide_type, strict_overflow_p);
6151 case PLUS_EXPR: case MINUS_EXPR:
6152 /* See if we can eliminate the operation on both sides. If we can, we
6153 can return a new PLUS or MINUS. If we can't, the only remaining
6154 cases where we can do anything are if the second operand is a
6156 sub_strict_overflow_p = false;
6157 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6158 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6159 if (t1 != 0 && t2 != 0
6160 && (code == MULT_EXPR
6161 /* If not multiplication, we can only do this if both operands
6162 are divisible by c. */
6163 || (multiple_of_p (ctype, op0, c)
6164 && multiple_of_p (ctype, op1, c))))
6166 if (sub_strict_overflow_p)
6167 *strict_overflow_p = true;
6168 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6169 fold_convert (ctype, t2));
6172 /* If this was a subtraction, negate OP1 and set it to be an addition.
6173 This simplifies the logic below. */
6174 if (tcode == MINUS_EXPR)
6176 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6177 /* If OP1 was not easily negatable, the constant may be OP0. */
6178 if (TREE_CODE (op0) == INTEGER_CST)
6180 std::swap (op0, op1);
6185 if (TREE_CODE (op1) != INTEGER_CST)
6188 /* If either OP1 or C are negative, this optimization is not safe for
6189 some of the division and remainder types while for others we need
6190 to change the code. */
6191 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6193 if (code == CEIL_DIV_EXPR)
6194 code = FLOOR_DIV_EXPR;
6195 else if (code == FLOOR_DIV_EXPR)
6196 code = CEIL_DIV_EXPR;
6197 else if (code != MULT_EXPR
6198 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6202 /* If it's a multiply or a division/modulus operation of a multiple
6203 of our constant, do the operation and verify it doesn't overflow. */
6204 if (code == MULT_EXPR
6205 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6207 op1 = const_binop (code, fold_convert (ctype, op1),
6208 fold_convert (ctype, c));
6209 /* We allow the constant to overflow with wrapping semantics. */
6211 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6217 /* If we have an unsigned type, we cannot widen the operation since it
6218 will change the result if the original computation overflowed. */
6219 if (TYPE_UNSIGNED (ctype) && ctype != type)
6222 /* If we were able to eliminate our operation from the first side,
6223 apply our operation to the second side and reform the PLUS. */
6224 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6225 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6227 /* The last case is if we are a multiply. In that case, we can
6228 apply the distributive law to commute the multiply and addition
6229 if the multiplication of the constants doesn't overflow
6230 and overflow is defined. With undefined overflow
6231 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6232 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6233 return fold_build2 (tcode, ctype,
6234 fold_build2 (code, ctype,
6235 fold_convert (ctype, op0),
6236 fold_convert (ctype, c)),
6242 /* We have a special case here if we are doing something like
6243 (C * 8) % 4 since we know that's zero. */
6244 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6245 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6246 /* If the multiplication can overflow we cannot optimize this. */
6247 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6248 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6249 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6251 *strict_overflow_p = true;
6252 return omit_one_operand (type, integer_zero_node, op0);
6255 /* ... fall through ... */
6257 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6258 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6259 /* If we can extract our operation from the LHS, do so and return a
6260 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6261 do something only if the second operand is a constant. */
6263 && (t1 = extract_muldiv (op0, c, code, wide_type,
6264 strict_overflow_p)) != 0)
6265 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6266 fold_convert (ctype, op1));
6267 else if (tcode == MULT_EXPR && code == MULT_EXPR
6268 && (t1 = extract_muldiv (op1, c, code, wide_type,
6269 strict_overflow_p)) != 0)
6270 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6271 fold_convert (ctype, t1));
6272 else if (TREE_CODE (op1) != INTEGER_CST)
6275 /* If these are the same operation types, we can associate them
6276 assuming no overflow. */
6279 bool overflow_p = false;
6280 bool overflow_mul_p;
6281 signop sign = TYPE_SIGN (ctype);
6282 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6283 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6285 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6289 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6290 TYPE_SIGN (TREE_TYPE (op1)));
6291 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6292 wide_int_to_tree (ctype, mul));
6296 /* If these operations "cancel" each other, we have the main
6297 optimizations of this pass, which occur when either constant is a
6298 multiple of the other, in which case we replace this with either an
6299 operation or CODE or TCODE.
6301 If we have an unsigned type, we cannot do this since it will change
6302 the result if the original computation overflowed. */
6303 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6304 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6305 || (tcode == MULT_EXPR
6306 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6307 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6308 && code != MULT_EXPR)))
6310 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6312 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6313 *strict_overflow_p = true;
6314 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6315 fold_convert (ctype,
6316 const_binop (TRUNC_DIV_EXPR,
6319 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6321 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6322 *strict_overflow_p = true;
6323 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6324 fold_convert (ctype,
6325 const_binop (TRUNC_DIV_EXPR,
6338 /* Return a node which has the indicated constant VALUE (either 0 or
6339 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6340 and is of the indicated TYPE. */
6343 constant_boolean_node (bool value, tree type)
6345 if (type == integer_type_node)
6346 return value ? integer_one_node : integer_zero_node;
6347 else if (type == boolean_type_node)
6348 return value ? boolean_true_node : boolean_false_node;
6349 else if (TREE_CODE (type) == VECTOR_TYPE)
6350 return build_vector_from_val (type,
6351 build_int_cst (TREE_TYPE (type),
6354 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6358 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6359 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6360 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6361 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6362 COND is the first argument to CODE; otherwise (as in the example
6363 given here), it is the second argument. TYPE is the type of the
6364 original expression. Return NULL_TREE if no simplification is
6368 fold_binary_op_with_conditional_arg (location_t loc,
6369 enum tree_code code,
6370 tree type, tree op0, tree op1,
6371 tree cond, tree arg, int cond_first_p)
6373 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6374 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6375 tree test, true_value, false_value;
6376 tree lhs = NULL_TREE;
6377 tree rhs = NULL_TREE;
6378 enum tree_code cond_code = COND_EXPR;
6380 if (TREE_CODE (cond) == COND_EXPR
6381 || TREE_CODE (cond) == VEC_COND_EXPR)
6383 test = TREE_OPERAND (cond, 0);
6384 true_value = TREE_OPERAND (cond, 1);
6385 false_value = TREE_OPERAND (cond, 2);
6386 /* If this operand throws an expression, then it does not make
6387 sense to try to perform a logical or arithmetic operation
6389 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6391 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6396 tree testtype = TREE_TYPE (cond);
6398 true_value = constant_boolean_node (true, testtype);
6399 false_value = constant_boolean_node (false, testtype);
6402 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6403 cond_code = VEC_COND_EXPR;
6405 /* This transformation is only worthwhile if we don't have to wrap ARG
6406 in a SAVE_EXPR and the operation can be simplified without recursing
6407 on at least one of the branches once its pushed inside the COND_EXPR. */
6408 if (!TREE_CONSTANT (arg)
6409 && (TREE_SIDE_EFFECTS (arg)
6410 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6411 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6414 arg = fold_convert_loc (loc, arg_type, arg);
6417 true_value = fold_convert_loc (loc, cond_type, true_value);
6419 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6421 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6425 false_value = fold_convert_loc (loc, cond_type, false_value);
6427 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6429 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6432 /* Check that we have simplified at least one of the branches. */
6433 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6436 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6440 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6442 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6443 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6444 ADDEND is the same as X.
6446 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6447 and finite. The problematic cases are when X is zero, and its mode
6448 has signed zeros. In the case of rounding towards -infinity,
6449 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6450 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6453 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6455 if (!real_zerop (addend))
6458 /* Don't allow the fold with -fsignaling-nans. */
6459 if (HONOR_SNANS (element_mode (type)))
6462 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6463 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6466 /* In a vector or complex, we would need to check the sign of all zeros. */
6467 if (TREE_CODE (addend) != REAL_CST)
6470 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6471 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6474 /* The mode has signed zeros, and we have to honor their sign.
6475 In this situation, there is only one case we can return true for.
6476 X - 0 is the same as X unless rounding towards -infinity is
6478 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6481 /* Subroutine of fold() that optimizes comparisons of a division by
6482 a nonzero integer constant against an integer constant, i.e.
6485 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6486 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6487 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6489 The function returns the constant folded tree if a simplification
6490 can be made, and NULL_TREE otherwise. */
6493 fold_div_compare (location_t loc,
6494 enum tree_code code, tree type, tree arg0, tree arg1)
6496 tree prod, tmp, hi, lo;
6497 tree arg00 = TREE_OPERAND (arg0, 0);
6498 tree arg01 = TREE_OPERAND (arg0, 1);
6499 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6500 bool neg_overflow = false;
6503 /* We have to do this the hard way to detect unsigned overflow.
6504 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6505 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6506 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6507 neg_overflow = false;
6509 if (sign == UNSIGNED)
6511 tmp = int_const_binop (MINUS_EXPR, arg01,
6512 build_int_cst (TREE_TYPE (arg01), 1));
6515 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6516 val = wi::add (prod, tmp, sign, &overflow);
6517 hi = force_fit_type (TREE_TYPE (arg00), val,
6518 -1, overflow | TREE_OVERFLOW (prod));
6520 else if (tree_int_cst_sgn (arg01) >= 0)
6522 tmp = int_const_binop (MINUS_EXPR, arg01,
6523 build_int_cst (TREE_TYPE (arg01), 1));
6524 switch (tree_int_cst_sgn (arg1))
6527 neg_overflow = true;
6528 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6533 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6538 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6548 /* A negative divisor reverses the relational operators. */
6549 code = swap_tree_comparison (code);
6551 tmp = int_const_binop (PLUS_EXPR, arg01,
6552 build_int_cst (TREE_TYPE (arg01), 1));
6553 switch (tree_int_cst_sgn (arg1))
6556 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6561 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6566 neg_overflow = true;
6567 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6579 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6580 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6581 if (TREE_OVERFLOW (hi))
6582 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6583 if (TREE_OVERFLOW (lo))
6584 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6585 return build_range_check (loc, type, arg00, 1, lo, hi);
6588 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6589 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6590 if (TREE_OVERFLOW (hi))
6591 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6592 if (TREE_OVERFLOW (lo))
6593 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6594 return build_range_check (loc, type, arg00, 0, lo, hi);
6597 if (TREE_OVERFLOW (lo))
6599 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6600 return omit_one_operand_loc (loc, type, tmp, arg00);
6602 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6605 if (TREE_OVERFLOW (hi))
6607 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6608 return omit_one_operand_loc (loc, type, tmp, arg00);
6610 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6613 if (TREE_OVERFLOW (hi))
6615 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6616 return omit_one_operand_loc (loc, type, tmp, arg00);
6618 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6621 if (TREE_OVERFLOW (lo))
6623 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6624 return omit_one_operand_loc (loc, type, tmp, arg00);
6626 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6636 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6637 equality/inequality test, then return a simplified form of the test
6638 using a sign testing. Otherwise return NULL. TYPE is the desired
6642 fold_single_bit_test_into_sign_test (location_t loc,
6643 enum tree_code code, tree arg0, tree arg1,
6646 /* If this is testing a single bit, we can optimize the test. */
6647 if ((code == NE_EXPR || code == EQ_EXPR)
6648 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6649 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6651 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6652 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6653 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6655 if (arg00 != NULL_TREE
6656 /* This is only a win if casting to a signed type is cheap,
6657 i.e. when arg00's type is not a partial mode. */
6658 && TYPE_PRECISION (TREE_TYPE (arg00))
6659 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6661 tree stype = signed_type_for (TREE_TYPE (arg00));
6662 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6664 fold_convert_loc (loc, stype, arg00),
6665 build_int_cst (stype, 0));
6672 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6673 equality/inequality test, then return a simplified form of
6674 the test using shifts and logical operations. Otherwise return
6675 NULL. TYPE is the desired result type. */
6678 fold_single_bit_test (location_t loc, enum tree_code code,
6679 tree arg0, tree arg1, tree result_type)
6681 /* If this is testing a single bit, we can optimize the test. */
6682 if ((code == NE_EXPR || code == EQ_EXPR)
6683 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6684 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6686 tree inner = TREE_OPERAND (arg0, 0);
6687 tree type = TREE_TYPE (arg0);
6688 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6689 machine_mode operand_mode = TYPE_MODE (type);
6691 tree signed_type, unsigned_type, intermediate_type;
6694 /* First, see if we can fold the single bit test into a sign-bit
6696 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6701 /* Otherwise we have (A & C) != 0 where C is a single bit,
6702 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6703 Similarly for (A & C) == 0. */
6705 /* If INNER is a right shift of a constant and it plus BITNUM does
6706 not overflow, adjust BITNUM and INNER. */
6707 if (TREE_CODE (inner) == RSHIFT_EXPR
6708 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6709 && bitnum < TYPE_PRECISION (type)
6710 && wi::ltu_p (TREE_OPERAND (inner, 1),
6711 TYPE_PRECISION (type) - bitnum))
6713 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6714 inner = TREE_OPERAND (inner, 0);
6717 /* If we are going to be able to omit the AND below, we must do our
6718 operations as unsigned. If we must use the AND, we have a choice.
6719 Normally unsigned is faster, but for some machines signed is. */
6720 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6721 && !flag_syntax_only) ? 0 : 1;
6723 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6724 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6725 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6726 inner = fold_convert_loc (loc, intermediate_type, inner);
6729 inner = build2 (RSHIFT_EXPR, intermediate_type,
6730 inner, size_int (bitnum));
6732 one = build_int_cst (intermediate_type, 1);
6734 if (code == EQ_EXPR)
6735 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6737 /* Put the AND last so it can combine with more things. */
6738 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6740 /* Make sure to return the proper type. */
6741 inner = fold_convert_loc (loc, result_type, inner);
6748 /* Check whether we are allowed to reorder operands arg0 and arg1,
6749 such that the evaluation of arg1 occurs before arg0. */
6752 reorder_operands_p (const_tree arg0, const_tree arg1)
6754 if (! flag_evaluation_order)
6756 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6758 return ! TREE_SIDE_EFFECTS (arg0)
6759 && ! TREE_SIDE_EFFECTS (arg1);
6762 /* Test whether it is preferable two swap two operands, ARG0 and
6763 ARG1, for example because ARG0 is an integer constant and ARG1
6764 isn't. If REORDER is true, only recommend swapping if we can
6765 evaluate the operands in reverse order. */
6768 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6770 if (CONSTANT_CLASS_P (arg1))
6772 if (CONSTANT_CLASS_P (arg0))
6778 if (TREE_CONSTANT (arg1))
6780 if (TREE_CONSTANT (arg0))
6783 if (reorder && flag_evaluation_order
6784 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6787 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6788 for commutative and comparison operators. Ensuring a canonical
6789 form allows the optimizers to find additional redundancies without
6790 having to explicitly check for both orderings. */
6791 if (TREE_CODE (arg0) == SSA_NAME
6792 && TREE_CODE (arg1) == SSA_NAME
6793 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6796 /* Put SSA_NAMEs last. */
6797 if (TREE_CODE (arg1) == SSA_NAME)
6799 if (TREE_CODE (arg0) == SSA_NAME)
6802 /* Put variables last. */
6812 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6813 means A >= Y && A != MAX, but in this case we know that
6814 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6817 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6819 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6821 if (TREE_CODE (bound) == LT_EXPR)
6822 a = TREE_OPERAND (bound, 0);
6823 else if (TREE_CODE (bound) == GT_EXPR)
6824 a = TREE_OPERAND (bound, 1);
6828 typea = TREE_TYPE (a);
6829 if (!INTEGRAL_TYPE_P (typea)
6830 && !POINTER_TYPE_P (typea))
6833 if (TREE_CODE (ineq) == LT_EXPR)
6835 a1 = TREE_OPERAND (ineq, 1);
6836 y = TREE_OPERAND (ineq, 0);
6838 else if (TREE_CODE (ineq) == GT_EXPR)
6840 a1 = TREE_OPERAND (ineq, 0);
6841 y = TREE_OPERAND (ineq, 1);
6846 if (TREE_TYPE (a1) != typea)
6849 if (POINTER_TYPE_P (typea))
6851 /* Convert the pointer types into integer before taking the difference. */
6852 tree ta = fold_convert_loc (loc, ssizetype, a);
6853 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6854 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6857 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6859 if (!diff || !integer_onep (diff))
6862 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6865 /* Fold a sum or difference of at least one multiplication.
6866 Returns the folded tree or NULL if no simplification could be made. */
6869 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6870 tree arg0, tree arg1)
6872 tree arg00, arg01, arg10, arg11;
6873 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6875 /* (A * C) +- (B * C) -> (A+-B) * C.
6876 (A * C) +- A -> A * (C+-1).
6877 We are most concerned about the case where C is a constant,
6878 but other combinations show up during loop reduction. Since
6879 it is not difficult, try all four possibilities. */
6881 if (TREE_CODE (arg0) == MULT_EXPR)
6883 arg00 = TREE_OPERAND (arg0, 0);
6884 arg01 = TREE_OPERAND (arg0, 1);
6886 else if (TREE_CODE (arg0) == INTEGER_CST)
6888 arg00 = build_one_cst (type);
6893 /* We cannot generate constant 1 for fract. */
6894 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6897 arg01 = build_one_cst (type);
6899 if (TREE_CODE (arg1) == MULT_EXPR)
6901 arg10 = TREE_OPERAND (arg1, 0);
6902 arg11 = TREE_OPERAND (arg1, 1);
6904 else if (TREE_CODE (arg1) == INTEGER_CST)
6906 arg10 = build_one_cst (type);
6907 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6908 the purpose of this canonicalization. */
6909 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6910 && negate_expr_p (arg1)
6911 && code == PLUS_EXPR)
6913 arg11 = negate_expr (arg1);
6921 /* We cannot generate constant 1 for fract. */
6922 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6925 arg11 = build_one_cst (type);
6929 if (operand_equal_p (arg01, arg11, 0))
6930 same = arg01, alt0 = arg00, alt1 = arg10;
6931 else if (operand_equal_p (arg00, arg10, 0))
6932 same = arg00, alt0 = arg01, alt1 = arg11;
6933 else if (operand_equal_p (arg00, arg11, 0))
6934 same = arg00, alt0 = arg01, alt1 = arg10;
6935 else if (operand_equal_p (arg01, arg10, 0))
6936 same = arg01, alt0 = arg00, alt1 = arg11;
6938 /* No identical multiplicands; see if we can find a common
6939 power-of-two factor in non-power-of-two multiplies. This
6940 can help in multi-dimensional array access. */
6941 else if (tree_fits_shwi_p (arg01)
6942 && tree_fits_shwi_p (arg11))
6944 HOST_WIDE_INT int01, int11, tmp;
6947 int01 = tree_to_shwi (arg01);
6948 int11 = tree_to_shwi (arg11);
6950 /* Move min of absolute values to int11. */
6951 if (absu_hwi (int01) < absu_hwi (int11))
6953 tmp = int01, int01 = int11, int11 = tmp;
6954 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6961 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6962 /* The remainder should not be a constant, otherwise we
6963 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6964 increased the number of multiplications necessary. */
6965 && TREE_CODE (arg10) != INTEGER_CST)
6967 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6968 build_int_cst (TREE_TYPE (arg00),
6973 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6978 return fold_build2_loc (loc, MULT_EXPR, type,
6979 fold_build2_loc (loc, code, type,
6980 fold_convert_loc (loc, type, alt0),
6981 fold_convert_loc (loc, type, alt1)),
6982 fold_convert_loc (loc, type, same));
6987 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6988 specified by EXPR into the buffer PTR of length LEN bytes.
6989 Return the number of bytes placed in the buffer, or zero
6993 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6995 tree type = TREE_TYPE (expr);
6996 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6997 int byte, offset, word, words;
6998 unsigned char value;
7000 if ((off == -1 && total_bytes > len)
7001 || off >= total_bytes)
7005 words = total_bytes / UNITS_PER_WORD;
7007 for (byte = 0; byte < total_bytes; byte++)
7009 int bitpos = byte * BITS_PER_UNIT;
7010 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7012 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7014 if (total_bytes > UNITS_PER_WORD)
7016 word = byte / UNITS_PER_WORD;
7017 if (WORDS_BIG_ENDIAN)
7018 word = (words - 1) - word;
7019 offset = word * UNITS_PER_WORD;
7020 if (BYTES_BIG_ENDIAN)
7021 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7023 offset += byte % UNITS_PER_WORD;
7026 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7028 && offset - off < len)
7029 ptr[offset - off] = value;
7031 return MIN (len, total_bytes - off);
7035 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7036 specified by EXPR into the buffer PTR of length LEN bytes.
7037 Return the number of bytes placed in the buffer, or zero
7041 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7043 tree type = TREE_TYPE (expr);
7044 machine_mode mode = TYPE_MODE (type);
7045 int total_bytes = GET_MODE_SIZE (mode);
7046 FIXED_VALUE_TYPE value;
7047 tree i_value, i_type;
7049 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7052 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7054 if (NULL_TREE == i_type
7055 || TYPE_PRECISION (i_type) != total_bytes)
7058 value = TREE_FIXED_CST (expr);
7059 i_value = double_int_to_tree (i_type, value.data);
7061 return native_encode_int (i_value, ptr, len, off);
7065 /* Subroutine of native_encode_expr. Encode the REAL_CST
7066 specified by EXPR into the buffer PTR of length LEN bytes.
7067 Return the number of bytes placed in the buffer, or zero
7071 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7073 tree type = TREE_TYPE (expr);
7074 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7075 int byte, offset, word, words, bitpos;
7076 unsigned char value;
7078 /* There are always 32 bits in each long, no matter the size of
7079 the hosts long. We handle floating point representations with
7083 if ((off == -1 && total_bytes > len)
7084 || off >= total_bytes)
7088 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7090 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7092 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7093 bitpos += BITS_PER_UNIT)
7095 byte = (bitpos / BITS_PER_UNIT) & 3;
7096 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7098 if (UNITS_PER_WORD < 4)
7100 word = byte / UNITS_PER_WORD;
7101 if (WORDS_BIG_ENDIAN)
7102 word = (words - 1) - word;
7103 offset = word * UNITS_PER_WORD;
7104 if (BYTES_BIG_ENDIAN)
7105 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7107 offset += byte % UNITS_PER_WORD;
7110 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7111 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7113 && offset - off < len)
7114 ptr[offset - off] = value;
7116 return MIN (len, total_bytes - off);
7119 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7120 specified by EXPR into the buffer PTR of length LEN bytes.
7121 Return the number of bytes placed in the buffer, or zero
7125 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7130 part = TREE_REALPART (expr);
7131 rsize = native_encode_expr (part, ptr, len, off);
7135 part = TREE_IMAGPART (expr);
7137 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7138 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7142 return rsize + isize;
7146 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7147 specified by EXPR into the buffer PTR of length LEN bytes.
7148 Return the number of bytes placed in the buffer, or zero
7152 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7159 count = VECTOR_CST_NELTS (expr);
7160 itype = TREE_TYPE (TREE_TYPE (expr));
7161 size = GET_MODE_SIZE (TYPE_MODE (itype));
7162 for (i = 0; i < count; i++)
7169 elem = VECTOR_CST_ELT (expr, i);
7170 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7171 if ((off == -1 && res != size)
7184 /* Subroutine of native_encode_expr. Encode the STRING_CST
7185 specified by EXPR into the buffer PTR of length LEN bytes.
7186 Return the number of bytes placed in the buffer, or zero
7190 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7192 tree type = TREE_TYPE (expr);
7193 HOST_WIDE_INT total_bytes;
7195 if (TREE_CODE (type) != ARRAY_TYPE
7196 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7197 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7198 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7200 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7201 if ((off == -1 && total_bytes > len)
7202 || off >= total_bytes)
7206 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7209 if (off < TREE_STRING_LENGTH (expr))
7211 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7212 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7214 memset (ptr + written, 0,
7215 MIN (total_bytes - written, len - written));
7218 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7219 return MIN (total_bytes - off, len);
7223 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7224 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7225 buffer PTR of length LEN bytes. If OFF is not -1 then start
7226 the encoding at byte offset OFF and encode at most LEN bytes.
7227 Return the number of bytes placed in the buffer, or zero upon failure. */
7230 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7232 /* We don't support starting at negative offset and -1 is special. */
7236 switch (TREE_CODE (expr))
7239 return native_encode_int (expr, ptr, len, off);
7242 return native_encode_real (expr, ptr, len, off);
7245 return native_encode_fixed (expr, ptr, len, off);
7248 return native_encode_complex (expr, ptr, len, off);
7251 return native_encode_vector (expr, ptr, len, off);
7254 return native_encode_string (expr, ptr, len, off);
7262 /* Subroutine of native_interpret_expr. Interpret the contents of
7263 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7264 If the buffer cannot be interpreted, return NULL_TREE. */
7267 native_interpret_int (tree type, const unsigned char *ptr, int len)
7269 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7271 if (total_bytes > len
7272 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7275 wide_int result = wi::from_buffer (ptr, total_bytes);
7277 return wide_int_to_tree (type, result);
7281 /* Subroutine of native_interpret_expr. Interpret the contents of
7282 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7283 If the buffer cannot be interpreted, return NULL_TREE. */
7286 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7288 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7290 FIXED_VALUE_TYPE fixed_value;
7292 if (total_bytes > len
7293 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7296 result = double_int::from_buffer (ptr, total_bytes);
7297 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7299 return build_fixed (type, fixed_value);
7303 /* Subroutine of native_interpret_expr. Interpret the contents of
7304 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7305 If the buffer cannot be interpreted, return NULL_TREE. */
7308 native_interpret_real (tree type, const unsigned char *ptr, int len)
7310 machine_mode mode = TYPE_MODE (type);
7311 int total_bytes = GET_MODE_SIZE (mode);
7312 unsigned char value;
7313 /* There are always 32 bits in each long, no matter the size of
7314 the hosts long. We handle floating point representations with
7319 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7320 if (total_bytes > len || total_bytes > 24)
7322 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7324 memset (tmp, 0, sizeof (tmp));
7325 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7326 bitpos += BITS_PER_UNIT)
7328 /* Both OFFSET and BYTE index within a long;
7329 bitpos indexes the whole float. */
7330 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7331 if (UNITS_PER_WORD < 4)
7333 int word = byte / UNITS_PER_WORD;
7334 if (WORDS_BIG_ENDIAN)
7335 word = (words - 1) - word;
7336 offset = word * UNITS_PER_WORD;
7337 if (BYTES_BIG_ENDIAN)
7338 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7340 offset += byte % UNITS_PER_WORD;
7345 if (BYTES_BIG_ENDIAN)
7347 /* Reverse bytes within each long, or within the entire float
7348 if it's smaller than a long (for HFmode). */
7349 offset = MIN (3, total_bytes - 1) - offset;
7350 gcc_assert (offset >= 0);
7353 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7355 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7358 real_from_target (&r, tmp, mode);
7359 return build_real (type, r);
7363 /* Subroutine of native_interpret_expr. Interpret the contents of
7364 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7365 If the buffer cannot be interpreted, return NULL_TREE. */
7368 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7370 tree etype, rpart, ipart;
7373 etype = TREE_TYPE (type);
7374 size = GET_MODE_SIZE (TYPE_MODE (etype));
7377 rpart = native_interpret_expr (etype, ptr, size);
7380 ipart = native_interpret_expr (etype, ptr+size, size);
7383 return build_complex (type, rpart, ipart);
7387 /* Subroutine of native_interpret_expr. Interpret the contents of
7388 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7389 If the buffer cannot be interpreted, return NULL_TREE. */
7392 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7398 etype = TREE_TYPE (type);
7399 size = GET_MODE_SIZE (TYPE_MODE (etype));
7400 count = TYPE_VECTOR_SUBPARTS (type);
7401 if (size * count > len)
7404 elements = XALLOCAVEC (tree, count);
7405 for (i = count - 1; i >= 0; i--)
7407 elem = native_interpret_expr (etype, ptr+(i*size), size);
7412 return build_vector (type, elements);
7416 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7417 the buffer PTR of length LEN as a constant of type TYPE. For
7418 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7419 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7420 return NULL_TREE. */
7423 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7425 switch (TREE_CODE (type))
7431 case REFERENCE_TYPE:
7432 return native_interpret_int (type, ptr, len);
7435 return native_interpret_real (type, ptr, len);
7437 case FIXED_POINT_TYPE:
7438 return native_interpret_fixed (type, ptr, len);
7441 return native_interpret_complex (type, ptr, len);
7444 return native_interpret_vector (type, ptr, len);
7451 /* Returns true if we can interpret the contents of a native encoding
7455 can_native_interpret_type_p (tree type)
7457 switch (TREE_CODE (type))
7463 case REFERENCE_TYPE:
7464 case FIXED_POINT_TYPE:
7474 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7475 TYPE at compile-time. If we're unable to perform the conversion
7476 return NULL_TREE. */
7479 fold_view_convert_expr (tree type, tree expr)
7481 /* We support up to 512-bit values (for V8DFmode). */
7482 unsigned char buffer[64];
7485 /* Check that the host and target are sane. */
7486 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7489 len = native_encode_expr (expr, buffer, sizeof (buffer));
7493 return native_interpret_expr (type, buffer, len);
7496 /* Build an expression for the address of T. Folds away INDIRECT_REF
7497 to avoid confusing the gimplify process. */
7500 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7502 /* The size of the object is not relevant when talking about its address. */
7503 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7504 t = TREE_OPERAND (t, 0);
7506 if (TREE_CODE (t) == INDIRECT_REF)
7508 t = TREE_OPERAND (t, 0);
7510 if (TREE_TYPE (t) != ptrtype)
7511 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7513 else if (TREE_CODE (t) == MEM_REF
7514 && integer_zerop (TREE_OPERAND (t, 1)))
7515 return TREE_OPERAND (t, 0);
7516 else if (TREE_CODE (t) == MEM_REF
7517 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7518 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7519 TREE_OPERAND (t, 0),
7520 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7521 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7523 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7525 if (TREE_TYPE (t) != ptrtype)
7526 t = fold_convert_loc (loc, ptrtype, t);
7529 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7534 /* Build an expression for the address of T. */
7537 build_fold_addr_expr_loc (location_t loc, tree t)
7539 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7541 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7544 /* Fold a unary expression of code CODE and type TYPE with operand
7545 OP0. Return the folded expression if folding is successful.
7546 Otherwise, return NULL_TREE. */
7549 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7553 enum tree_code_class kind = TREE_CODE_CLASS (code);
7555 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7556 && TREE_CODE_LENGTH (code) == 1);
7561 if (CONVERT_EXPR_CODE_P (code)
7562 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7564 /* Don't use STRIP_NOPS, because signedness of argument type
7566 STRIP_SIGN_NOPS (arg0);
7570 /* Strip any conversions that don't change the mode. This
7571 is safe for every expression, except for a comparison
7572 expression because its signedness is derived from its
7575 Note that this is done as an internal manipulation within
7576 the constant folder, in order to find the simplest
7577 representation of the arguments so that their form can be
7578 studied. In any cases, the appropriate type conversions
7579 should be put back in the tree that will get out of the
7584 if (CONSTANT_CLASS_P (arg0))
7586 tree tem = const_unop (code, type, arg0);
7589 if (TREE_TYPE (tem) != type)
7590 tem = fold_convert_loc (loc, type, tem);
7596 tem = generic_simplify (loc, code, type, op0);
7600 if (TREE_CODE_CLASS (code) == tcc_unary)
7602 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7603 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7604 fold_build1_loc (loc, code, type,
7605 fold_convert_loc (loc, TREE_TYPE (op0),
7606 TREE_OPERAND (arg0, 1))));
7607 else if (TREE_CODE (arg0) == COND_EXPR)
7609 tree arg01 = TREE_OPERAND (arg0, 1);
7610 tree arg02 = TREE_OPERAND (arg0, 2);
7611 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7612 arg01 = fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc,
7614 TREE_TYPE (op0), arg01));
7615 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7616 arg02 = fold_build1_loc (loc, code, type,
7617 fold_convert_loc (loc,
7618 TREE_TYPE (op0), arg02));
7619 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7622 /* If this was a conversion, and all we did was to move into
7623 inside the COND_EXPR, bring it back out. But leave it if
7624 it is a conversion from integer to integer and the
7625 result precision is no wider than a word since such a
7626 conversion is cheap and may be optimized away by combine,
7627 while it couldn't if it were outside the COND_EXPR. Then return
7628 so we don't get into an infinite recursion loop taking the
7629 conversion out and then back in. */
7631 if ((CONVERT_EXPR_CODE_P (code)
7632 || code == NON_LVALUE_EXPR)
7633 && TREE_CODE (tem) == COND_EXPR
7634 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7635 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7636 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7637 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7638 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7639 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7640 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7642 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7643 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7644 || flag_syntax_only))
7645 tem = build1_loc (loc, code, type,
7647 TREE_TYPE (TREE_OPERAND
7648 (TREE_OPERAND (tem, 1), 0)),
7649 TREE_OPERAND (tem, 0),
7650 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7651 TREE_OPERAND (TREE_OPERAND (tem, 2),
7659 case NON_LVALUE_EXPR:
7660 if (!maybe_lvalue_p (op0))
7661 return fold_convert_loc (loc, type, op0);
7666 case FIX_TRUNC_EXPR:
7667 if (COMPARISON_CLASS_P (op0))
7669 /* If we have (type) (a CMP b) and type is an integral type, return
7670 new expression involving the new type. Canonicalize
7671 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7673 Do not fold the result as that would not simplify further, also
7674 folding again results in recursions. */
7675 if (TREE_CODE (type) == BOOLEAN_TYPE)
7676 return build2_loc (loc, TREE_CODE (op0), type,
7677 TREE_OPERAND (op0, 0),
7678 TREE_OPERAND (op0, 1));
7679 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7680 && TREE_CODE (type) != VECTOR_TYPE)
7681 return build3_loc (loc, COND_EXPR, type, op0,
7682 constant_boolean_node (true, type),
7683 constant_boolean_node (false, type));
7686 /* Handle (T *)&A.B.C for A being of type T and B and C
7687 living at offset zero. This occurs frequently in
7688 C++ upcasting and then accessing the base. */
7689 if (TREE_CODE (op0) == ADDR_EXPR
7690 && POINTER_TYPE_P (type)
7691 && handled_component_p (TREE_OPERAND (op0, 0)))
7693 HOST_WIDE_INT bitsize, bitpos;
7696 int unsignedp, volatilep;
7697 tree base = TREE_OPERAND (op0, 0);
7698 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7699 &mode, &unsignedp, &volatilep, false);
7700 /* If the reference was to a (constant) zero offset, we can use
7701 the address of the base if it has the same base type
7702 as the result type and the pointer type is unqualified. */
7703 if (! offset && bitpos == 0
7704 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7705 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7706 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7707 return fold_convert_loc (loc, type,
7708 build_fold_addr_expr_loc (loc, base));
7711 if (TREE_CODE (op0) == MODIFY_EXPR
7712 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7713 /* Detect assigning a bitfield. */
7714 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7716 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7718 /* Don't leave an assignment inside a conversion
7719 unless assigning a bitfield. */
7720 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7721 /* First do the assignment, then return converted constant. */
7722 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7723 TREE_NO_WARNING (tem) = 1;
7724 TREE_USED (tem) = 1;
7728 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7729 constants (if x has signed type, the sign bit cannot be set
7730 in c). This folds extension into the BIT_AND_EXPR.
7731 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7732 very likely don't have maximal range for their precision and this
7733 transformation effectively doesn't preserve non-maximal ranges. */
7734 if (TREE_CODE (type) == INTEGER_TYPE
7735 && TREE_CODE (op0) == BIT_AND_EXPR
7736 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7738 tree and_expr = op0;
7739 tree and0 = TREE_OPERAND (and_expr, 0);
7740 tree and1 = TREE_OPERAND (and_expr, 1);
7743 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7744 || (TYPE_PRECISION (type)
7745 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7747 else if (TYPE_PRECISION (TREE_TYPE (and1))
7748 <= HOST_BITS_PER_WIDE_INT
7749 && tree_fits_uhwi_p (and1))
7751 unsigned HOST_WIDE_INT cst;
7753 cst = tree_to_uhwi (and1);
7754 cst &= HOST_WIDE_INT_M1U
7755 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7756 change = (cst == 0);
7758 && !flag_syntax_only
7759 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7762 tree uns = unsigned_type_for (TREE_TYPE (and0));
7763 and0 = fold_convert_loc (loc, uns, and0);
7764 and1 = fold_convert_loc (loc, uns, and1);
7769 tem = force_fit_type (type, wi::to_widest (and1), 0,
7770 TREE_OVERFLOW (and1));
7771 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7772 fold_convert_loc (loc, type, and0), tem);
7776 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7777 when one of the new casts will fold away. Conservatively we assume
7778 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7779 if (POINTER_TYPE_P (type)
7780 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7781 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7782 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7783 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7784 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7786 tree arg00 = TREE_OPERAND (arg0, 0);
7787 tree arg01 = TREE_OPERAND (arg0, 1);
7789 return fold_build_pointer_plus_loc
7790 (loc, fold_convert_loc (loc, type, arg00), arg01);
7793 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7794 of the same precision, and X is an integer type not narrower than
7795 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7796 if (INTEGRAL_TYPE_P (type)
7797 && TREE_CODE (op0) == BIT_NOT_EXPR
7798 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7799 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7800 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7802 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7803 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7804 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7805 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7806 fold_convert_loc (loc, type, tem));
7809 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7810 type of X and Y (integer types only). */
7811 if (INTEGRAL_TYPE_P (type)
7812 && TREE_CODE (op0) == MULT_EXPR
7813 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7814 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7816 /* Be careful not to introduce new overflows. */
7818 if (TYPE_OVERFLOW_WRAPS (type))
7821 mult_type = unsigned_type_for (type);
7823 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7825 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7826 fold_convert_loc (loc, mult_type,
7827 TREE_OPERAND (op0, 0)),
7828 fold_convert_loc (loc, mult_type,
7829 TREE_OPERAND (op0, 1)));
7830 return fold_convert_loc (loc, type, tem);
7836 case VIEW_CONVERT_EXPR:
7837 if (TREE_CODE (op0) == MEM_REF)
7838 return fold_build2_loc (loc, MEM_REF, type,
7839 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7844 tem = fold_negate_expr (loc, arg0);
7846 return fold_convert_loc (loc, type, tem);
7850 /* Convert fabs((double)float) into (double)fabsf(float). */
7851 if (TREE_CODE (arg0) == NOP_EXPR
7852 && TREE_CODE (type) == REAL_TYPE)
7854 tree targ0 = strip_float_extensions (arg0);
7856 return fold_convert_loc (loc, type,
7857 fold_build1_loc (loc, ABS_EXPR,
7864 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7865 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7866 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7867 fold_convert_loc (loc, type,
7868 TREE_OPERAND (arg0, 0)))))
7869 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7870 fold_convert_loc (loc, type,
7871 TREE_OPERAND (arg0, 1)));
7872 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7873 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7874 fold_convert_loc (loc, type,
7875 TREE_OPERAND (arg0, 1)))))
7876 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7877 fold_convert_loc (loc, type,
7878 TREE_OPERAND (arg0, 0)), tem);
7882 case TRUTH_NOT_EXPR:
7883 /* Note that the operand of this must be an int
7884 and its values must be 0 or 1.
7885 ("true" is a fixed value perhaps depending on the language,
7886 but we don't handle values other than 1 correctly yet.) */
7887 tem = fold_truth_not_expr (loc, arg0);
7890 return fold_convert_loc (loc, type, tem);
7893 /* Fold *&X to X if X is an lvalue. */
7894 if (TREE_CODE (op0) == ADDR_EXPR)
7896 tree op00 = TREE_OPERAND (op0, 0);
7897 if ((TREE_CODE (op00) == VAR_DECL
7898 || TREE_CODE (op00) == PARM_DECL
7899 || TREE_CODE (op00) == RESULT_DECL)
7900 && !TREE_READONLY (op00))
7907 } /* switch (code) */
7911 /* If the operation was a conversion do _not_ mark a resulting constant
7912 with TREE_OVERFLOW if the original constant was not. These conversions
7913 have implementation defined behavior and retaining the TREE_OVERFLOW
7914 flag here would confuse later passes such as VRP. */
7916 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7917 tree type, tree op0)
7919 tree res = fold_unary_loc (loc, code, type, op0);
7921 && TREE_CODE (res) == INTEGER_CST
7922 && TREE_CODE (op0) == INTEGER_CST
7923 && CONVERT_EXPR_CODE_P (code))
7924 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7929 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7930 operands OP0 and OP1. LOC is the location of the resulting expression.
7931 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7932 Return the folded expression if folding is successful. Otherwise,
7933 return NULL_TREE. */
7935 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7936 tree arg0, tree arg1, tree op0, tree op1)
7940 /* We only do these simplifications if we are optimizing. */
7944 /* Check for things like (A || B) && (A || C). We can convert this
7945 to A || (B && C). Note that either operator can be any of the four
7946 truth and/or operations and the transformation will still be
7947 valid. Also note that we only care about order for the
7948 ANDIF and ORIF operators. If B contains side effects, this
7949 might change the truth-value of A. */
7950 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7951 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7952 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7953 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7954 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7955 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7957 tree a00 = TREE_OPERAND (arg0, 0);
7958 tree a01 = TREE_OPERAND (arg0, 1);
7959 tree a10 = TREE_OPERAND (arg1, 0);
7960 tree a11 = TREE_OPERAND (arg1, 1);
7961 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7962 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7963 && (code == TRUTH_AND_EXPR
7964 || code == TRUTH_OR_EXPR));
7966 if (operand_equal_p (a00, a10, 0))
7967 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7968 fold_build2_loc (loc, code, type, a01, a11));
7969 else if (commutative && operand_equal_p (a00, a11, 0))
7970 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7971 fold_build2_loc (loc, code, type, a01, a10));
7972 else if (commutative && operand_equal_p (a01, a10, 0))
7973 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7974 fold_build2_loc (loc, code, type, a00, a11));
7976 /* This case if tricky because we must either have commutative
7977 operators or else A10 must not have side-effects. */
7979 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7980 && operand_equal_p (a01, a11, 0))
7981 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7982 fold_build2_loc (loc, code, type, a00, a10),
7986 /* See if we can build a range comparison. */
7987 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7990 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7991 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7993 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7995 return fold_build2_loc (loc, code, type, tem, arg1);
7998 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7999 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8001 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8003 return fold_build2_loc (loc, code, type, arg0, tem);
8006 /* Check for the possibility of merging component references. If our
8007 lhs is another similar operation, try to merge its rhs with our
8008 rhs. Then try to merge our lhs and rhs. */
8009 if (TREE_CODE (arg0) == code
8010 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8011 TREE_OPERAND (arg0, 1), arg1)))
8012 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8014 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8017 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8018 && (code == TRUTH_AND_EXPR
8019 || code == TRUTH_ANDIF_EXPR
8020 || code == TRUTH_OR_EXPR
8021 || code == TRUTH_ORIF_EXPR))
8023 enum tree_code ncode, icode;
8025 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8026 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8027 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8029 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8030 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8031 We don't want to pack more than two leafs to a non-IF AND/OR
8033 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8034 equal to IF-CODE, then we don't want to add right-hand operand.
8035 If the inner right-hand side of left-hand operand has
8036 side-effects, or isn't simple, then we can't add to it,
8037 as otherwise we might destroy if-sequence. */
8038 if (TREE_CODE (arg0) == icode
8039 && simple_operand_p_2 (arg1)
8040 /* Needed for sequence points to handle trappings, and
8042 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8044 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8046 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8049 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8050 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8051 else if (TREE_CODE (arg1) == icode
8052 && simple_operand_p_2 (arg0)
8053 /* Needed for sequence points to handle trappings, and
8055 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8057 tem = fold_build2_loc (loc, ncode, type,
8058 arg0, TREE_OPERAND (arg1, 0));
8059 return fold_build2_loc (loc, icode, type, tem,
8060 TREE_OPERAND (arg1, 1));
8062 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8064 For sequence point consistancy, we need to check for trapping,
8065 and side-effects. */
8066 else if (code == icode && simple_operand_p_2 (arg0)
8067 && simple_operand_p_2 (arg1))
8068 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8074 /* Fold a binary expression of code CODE and type TYPE with operands
8075 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8076 Return the folded expression if folding is successful. Otherwise,
8077 return NULL_TREE. */
8080 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8082 enum tree_code compl_code;
8084 if (code == MIN_EXPR)
8085 compl_code = MAX_EXPR;
8086 else if (code == MAX_EXPR)
8087 compl_code = MIN_EXPR;
8091 /* MIN (MAX (a, b), b) == b. */
8092 if (TREE_CODE (op0) == compl_code
8093 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8094 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8096 /* MIN (MAX (b, a), b) == b. */
8097 if (TREE_CODE (op0) == compl_code
8098 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8099 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8100 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8102 /* MIN (a, MAX (a, b)) == a. */
8103 if (TREE_CODE (op1) == compl_code
8104 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8105 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8106 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8108 /* MIN (a, MAX (b, a)) == a. */
8109 if (TREE_CODE (op1) == compl_code
8110 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8111 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8112 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8117 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8118 by changing CODE to reduce the magnitude of constants involved in
8119 ARG0 of the comparison.
8120 Returns a canonicalized comparison tree if a simplification was
8121 possible, otherwise returns NULL_TREE.
8122 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8123 valid if signed overflow is undefined. */
8126 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8127 tree arg0, tree arg1,
8128 bool *strict_overflow_p)
8130 enum tree_code code0 = TREE_CODE (arg0);
8131 tree t, cst0 = NULL_TREE;
8134 /* Match A +- CST code arg1. We can change this only if overflow
8136 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8137 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8138 /* In principle pointers also have undefined overflow behavior,
8139 but that causes problems elsewhere. */
8140 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8141 && (code0 == MINUS_EXPR
8142 || code0 == PLUS_EXPR)
8143 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8146 /* Identify the constant in arg0 and its sign. */
8147 cst0 = TREE_OPERAND (arg0, 1);
8148 sgn0 = tree_int_cst_sgn (cst0);
8150 /* Overflowed constants and zero will cause problems. */
8151 if (integer_zerop (cst0)
8152 || TREE_OVERFLOW (cst0))
8155 /* See if we can reduce the magnitude of the constant in
8156 arg0 by changing the comparison code. */
8157 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8159 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8161 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8162 else if (code == GT_EXPR
8163 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8165 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8166 else if (code == LE_EXPR
8167 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8169 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8170 else if (code == GE_EXPR
8171 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8175 *strict_overflow_p = true;
8177 /* Now build the constant reduced in magnitude. But not if that
8178 would produce one outside of its types range. */
8179 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8181 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8182 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8184 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8185 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8188 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8189 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8190 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8191 t = fold_convert (TREE_TYPE (arg1), t);
8193 return fold_build2_loc (loc, code, type, t, arg1);
8196 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8197 overflow further. Try to decrease the magnitude of constants involved
8198 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8199 and put sole constants at the second argument position.
8200 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8203 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8204 tree arg0, tree arg1)
8207 bool strict_overflow_p;
8208 const char * const warnmsg = G_("assuming signed overflow does not occur "
8209 "when reducing constant in comparison");
8211 /* Try canonicalization by simplifying arg0. */
8212 strict_overflow_p = false;
8213 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8214 &strict_overflow_p);
8217 if (strict_overflow_p)
8218 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8222 /* Try canonicalization by simplifying arg1 using the swapped
8224 code = swap_tree_comparison (code);
8225 strict_overflow_p = false;
8226 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8227 &strict_overflow_p);
8228 if (t && strict_overflow_p)
8229 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8233 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8234 space. This is used to avoid issuing overflow warnings for
8235 expressions like &p->x which can not wrap. */
8238 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8240 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8247 int precision = TYPE_PRECISION (TREE_TYPE (base));
8248 if (offset == NULL_TREE)
8249 wi_offset = wi::zero (precision);
8250 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8256 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8257 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8261 if (!wi::fits_uhwi_p (total))
8264 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8268 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8270 if (TREE_CODE (base) == ADDR_EXPR)
8272 HOST_WIDE_INT base_size;
8274 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8275 if (base_size > 0 && size < base_size)
8279 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8282 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8283 kind INTEGER_CST. This makes sure to properly sign-extend the
8286 static HOST_WIDE_INT
8287 size_low_cst (const_tree t)
8289 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8290 int prec = TYPE_PRECISION (TREE_TYPE (t));
8291 if (prec < HOST_BITS_PER_WIDE_INT)
8292 return sext_hwi (w, prec);
8296 /* Subroutine of fold_binary. This routine performs all of the
8297 transformations that are common to the equality/inequality
8298 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8299 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8300 fold_binary should call fold_binary. Fold a comparison with
8301 tree code CODE and type TYPE with operands OP0 and OP1. Return
8302 the folded comparison or NULL_TREE. */
8305 fold_comparison (location_t loc, enum tree_code code, tree type,
8308 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8309 tree arg0, arg1, tem;
8314 STRIP_SIGN_NOPS (arg0);
8315 STRIP_SIGN_NOPS (arg1);
8317 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8318 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8320 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8321 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8323 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8324 && TREE_CODE (arg1) == INTEGER_CST
8325 && !TREE_OVERFLOW (arg1))
8327 const enum tree_code
8328 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8329 tree const1 = TREE_OPERAND (arg0, 1);
8330 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8331 tree variable = TREE_OPERAND (arg0, 0);
8332 tree new_const = int_const_binop (reverse_op, const2, const1);
8334 /* If the constant operation overflowed this can be
8335 simplified as a comparison against INT_MAX/INT_MIN. */
8336 if (TREE_OVERFLOW (new_const)
8337 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8339 int const1_sgn = tree_int_cst_sgn (const1);
8340 enum tree_code code2 = code;
8342 /* Get the sign of the constant on the lhs if the
8343 operation were VARIABLE + CONST1. */
8344 if (TREE_CODE (arg0) == MINUS_EXPR)
8345 const1_sgn = -const1_sgn;
8347 /* The sign of the constant determines if we overflowed
8348 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8349 Canonicalize to the INT_MIN overflow by swapping the comparison
8351 if (const1_sgn == -1)
8352 code2 = swap_tree_comparison (code);
8354 /* We now can look at the canonicalized case
8355 VARIABLE + 1 CODE2 INT_MIN
8356 and decide on the result. */
8363 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8369 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8378 fold_overflow_warning ("assuming signed overflow does not occur "
8379 "when changing X +- C1 cmp C2 to "
8381 WARN_STRICT_OVERFLOW_COMPARISON);
8382 return fold_build2_loc (loc, code, type, variable, new_const);
8386 /* For comparisons of pointers we can decompose it to a compile time
8387 comparison of the base objects and the offsets into the object.
8388 This requires at least one operand being an ADDR_EXPR or a
8389 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8390 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8391 && (TREE_CODE (arg0) == ADDR_EXPR
8392 || TREE_CODE (arg1) == ADDR_EXPR
8393 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8394 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8396 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8397 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8399 int volatilep, unsignedp;
8400 bool indirect_base0 = false, indirect_base1 = false;
8402 /* Get base and offset for the access. Strip ADDR_EXPR for
8403 get_inner_reference, but put it back by stripping INDIRECT_REF
8404 off the base object if possible. indirect_baseN will be true
8405 if baseN is not an address but refers to the object itself. */
8407 if (TREE_CODE (arg0) == ADDR_EXPR)
8409 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8410 &bitsize, &bitpos0, &offset0, &mode,
8411 &unsignedp, &volatilep, false);
8412 if (TREE_CODE (base0) == INDIRECT_REF)
8413 base0 = TREE_OPERAND (base0, 0);
8415 indirect_base0 = true;
8417 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8419 base0 = TREE_OPERAND (arg0, 0);
8420 STRIP_SIGN_NOPS (base0);
8421 if (TREE_CODE (base0) == ADDR_EXPR)
8423 base0 = TREE_OPERAND (base0, 0);
8424 indirect_base0 = true;
8426 offset0 = TREE_OPERAND (arg0, 1);
8427 if (tree_fits_shwi_p (offset0))
8429 HOST_WIDE_INT off = size_low_cst (offset0);
8430 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8432 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8434 bitpos0 = off * BITS_PER_UNIT;
8435 offset0 = NULL_TREE;
8441 if (TREE_CODE (arg1) == ADDR_EXPR)
8443 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8444 &bitsize, &bitpos1, &offset1, &mode,
8445 &unsignedp, &volatilep, false);
8446 if (TREE_CODE (base1) == INDIRECT_REF)
8447 base1 = TREE_OPERAND (base1, 0);
8449 indirect_base1 = true;
8451 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8453 base1 = TREE_OPERAND (arg1, 0);
8454 STRIP_SIGN_NOPS (base1);
8455 if (TREE_CODE (base1) == ADDR_EXPR)
8457 base1 = TREE_OPERAND (base1, 0);
8458 indirect_base1 = true;
8460 offset1 = TREE_OPERAND (arg1, 1);
8461 if (tree_fits_shwi_p (offset1))
8463 HOST_WIDE_INT off = size_low_cst (offset1);
8464 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8466 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8468 bitpos1 = off * BITS_PER_UNIT;
8469 offset1 = NULL_TREE;
8474 /* If we have equivalent bases we might be able to simplify. */
8475 if (indirect_base0 == indirect_base1
8476 && operand_equal_p (base0, base1,
8477 indirect_base0 ? OEP_ADDRESS_OF : 0))
8479 /* We can fold this expression to a constant if the non-constant
8480 offset parts are equal. */
8481 if ((offset0 == offset1
8482 || (offset0 && offset1
8483 && operand_equal_p (offset0, offset1, 0)))
8486 || (indirect_base0 && DECL_P (base0))
8487 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8491 && bitpos0 != bitpos1
8492 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8493 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8494 fold_overflow_warning (("assuming pointer wraparound does not "
8495 "occur when comparing P +- C1 with "
8497 WARN_STRICT_OVERFLOW_CONDITIONAL);
8502 return constant_boolean_node (bitpos0 == bitpos1, type);
8504 return constant_boolean_node (bitpos0 != bitpos1, type);
8506 return constant_boolean_node (bitpos0 < bitpos1, type);
8508 return constant_boolean_node (bitpos0 <= bitpos1, type);
8510 return constant_boolean_node (bitpos0 >= bitpos1, type);
8512 return constant_boolean_node (bitpos0 > bitpos1, type);
8516 /* We can simplify the comparison to a comparison of the variable
8517 offset parts if the constant offset parts are equal.
8518 Be careful to use signed sizetype here because otherwise we
8519 mess with array offsets in the wrong way. This is possible
8520 because pointer arithmetic is restricted to retain within an
8521 object and overflow on pointer differences is undefined as of
8522 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8523 else if (bitpos0 == bitpos1
8525 || (indirect_base0 && DECL_P (base0))
8526 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8528 /* By converting to signed sizetype we cover middle-end pointer
8529 arithmetic which operates on unsigned pointer types of size
8530 type size and ARRAY_REF offsets which are properly sign or
8531 zero extended from their type in case it is narrower than
8533 if (offset0 == NULL_TREE)
8534 offset0 = build_int_cst (ssizetype, 0);
8536 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8537 if (offset1 == NULL_TREE)
8538 offset1 = build_int_cst (ssizetype, 0);
8540 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8543 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8544 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8545 fold_overflow_warning (("assuming pointer wraparound does not "
8546 "occur when comparing P +- C1 with "
8548 WARN_STRICT_OVERFLOW_COMPARISON);
8550 return fold_build2_loc (loc, code, type, offset0, offset1);
8553 /* For equal offsets we can simplify to a comparison of the
8555 else if (bitpos0 == bitpos1
8557 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8559 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8560 && ((offset0 == offset1)
8561 || (offset0 && offset1
8562 && operand_equal_p (offset0, offset1, 0))))
8565 base0 = build_fold_addr_expr_loc (loc, base0);
8567 base1 = build_fold_addr_expr_loc (loc, base1);
8568 return fold_build2_loc (loc, code, type, base0, base1);
8572 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8573 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8574 the resulting offset is smaller in absolute value than the
8575 original one and has the same sign. */
8576 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8577 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8578 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8579 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8580 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8581 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8582 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8583 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8585 tree const1 = TREE_OPERAND (arg0, 1);
8586 tree const2 = TREE_OPERAND (arg1, 1);
8587 tree variable1 = TREE_OPERAND (arg0, 0);
8588 tree variable2 = TREE_OPERAND (arg1, 0);
8590 const char * const warnmsg = G_("assuming signed overflow does not "
8591 "occur when combining constants around "
8594 /* Put the constant on the side where it doesn't overflow and is
8595 of lower absolute value and of same sign than before. */
8596 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8597 ? MINUS_EXPR : PLUS_EXPR,
8599 if (!TREE_OVERFLOW (cst)
8600 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8601 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8603 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8604 return fold_build2_loc (loc, code, type,
8606 fold_build2_loc (loc, TREE_CODE (arg1),
8611 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8612 ? MINUS_EXPR : PLUS_EXPR,
8614 if (!TREE_OVERFLOW (cst)
8615 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8616 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8618 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8619 return fold_build2_loc (loc, code, type,
8620 fold_build2_loc (loc, TREE_CODE (arg0),
8627 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8631 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8632 constant, we can simplify it. */
8633 if (TREE_CODE (arg1) == INTEGER_CST
8634 && (TREE_CODE (arg0) == MIN_EXPR
8635 || TREE_CODE (arg0) == MAX_EXPR)
8636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8638 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8643 /* If we are comparing an expression that just has comparisons
8644 of two integer values, arithmetic expressions of those comparisons,
8645 and constants, we can simplify it. There are only three cases
8646 to check: the two values can either be equal, the first can be
8647 greater, or the second can be greater. Fold the expression for
8648 those three values. Since each value must be 0 or 1, we have
8649 eight possibilities, each of which corresponds to the constant 0
8650 or 1 or one of the six possible comparisons.
8652 This handles common cases like (a > b) == 0 but also handles
8653 expressions like ((x > y) - (y > x)) > 0, which supposedly
8654 occur in macroized code. */
8656 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8658 tree cval1 = 0, cval2 = 0;
8661 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8662 /* Don't handle degenerate cases here; they should already
8663 have been handled anyway. */
8664 && cval1 != 0 && cval2 != 0
8665 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8666 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8667 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8668 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8669 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8670 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8671 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8673 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8674 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8676 /* We can't just pass T to eval_subst in case cval1 or cval2
8677 was the same as ARG1. */
8680 = fold_build2_loc (loc, code, type,
8681 eval_subst (loc, arg0, cval1, maxval,
8685 = fold_build2_loc (loc, code, type,
8686 eval_subst (loc, arg0, cval1, maxval,
8690 = fold_build2_loc (loc, code, type,
8691 eval_subst (loc, arg0, cval1, minval,
8695 /* All three of these results should be 0 or 1. Confirm they are.
8696 Then use those values to select the proper code to use. */
8698 if (TREE_CODE (high_result) == INTEGER_CST
8699 && TREE_CODE (equal_result) == INTEGER_CST
8700 && TREE_CODE (low_result) == INTEGER_CST)
8702 /* Make a 3-bit mask with the high-order bit being the
8703 value for `>', the next for '=', and the low for '<'. */
8704 switch ((integer_onep (high_result) * 4)
8705 + (integer_onep (equal_result) * 2)
8706 + integer_onep (low_result))
8710 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8731 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8736 tem = save_expr (build2 (code, type, cval1, cval2));
8737 SET_EXPR_LOCATION (tem, loc);
8740 return fold_build2_loc (loc, code, type, cval1, cval2);
8745 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8746 into a single range test. */
8747 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8748 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8749 && TREE_CODE (arg1) == INTEGER_CST
8750 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8751 && !integer_zerop (TREE_OPERAND (arg0, 1))
8752 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8753 && !TREE_OVERFLOW (arg1))
8755 tem = fold_div_compare (loc, code, type, arg0, arg1);
8756 if (tem != NULL_TREE)
8764 /* Subroutine of fold_binary. Optimize complex multiplications of the
8765 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8766 argument EXPR represents the expression "z" of type TYPE. */
8769 fold_mult_zconjz (location_t loc, tree type, tree expr)
8771 tree itype = TREE_TYPE (type);
8772 tree rpart, ipart, tem;
8774 if (TREE_CODE (expr) == COMPLEX_EXPR)
8776 rpart = TREE_OPERAND (expr, 0);
8777 ipart = TREE_OPERAND (expr, 1);
8779 else if (TREE_CODE (expr) == COMPLEX_CST)
8781 rpart = TREE_REALPART (expr);
8782 ipart = TREE_IMAGPART (expr);
8786 expr = save_expr (expr);
8787 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8788 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8791 rpart = save_expr (rpart);
8792 ipart = save_expr (ipart);
8793 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8794 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8795 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8796 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8797 build_zero_cst (itype));
8801 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8802 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8805 vec_cst_ctor_to_array (tree arg, tree *elts)
8807 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8809 if (TREE_CODE (arg) == VECTOR_CST)
8811 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8812 elts[i] = VECTOR_CST_ELT (arg, i);
8814 else if (TREE_CODE (arg) == CONSTRUCTOR)
8816 constructor_elt *elt;
8818 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8819 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8822 elts[i] = elt->value;
8826 for (; i < nelts; i++)
8828 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8832 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8833 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8834 NULL_TREE otherwise. */
8837 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8839 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8841 bool need_ctor = false;
8843 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8844 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8845 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8846 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8849 elts = XALLOCAVEC (tree, nelts * 3);
8850 if (!vec_cst_ctor_to_array (arg0, elts)
8851 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8854 for (i = 0; i < nelts; i++)
8856 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8858 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8863 vec<constructor_elt, va_gc> *v;
8864 vec_alloc (v, nelts);
8865 for (i = 0; i < nelts; i++)
8866 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8867 return build_constructor (type, v);
8870 return build_vector (type, &elts[2 * nelts]);
8873 /* Try to fold a pointer difference of type TYPE two address expressions of
8874 array references AREF0 and AREF1 using location LOC. Return a
8875 simplified expression for the difference or NULL_TREE. */
8878 fold_addr_of_array_ref_difference (location_t loc, tree type,
8879 tree aref0, tree aref1)
8881 tree base0 = TREE_OPERAND (aref0, 0);
8882 tree base1 = TREE_OPERAND (aref1, 0);
8883 tree base_offset = build_int_cst (type, 0);
8885 /* If the bases are array references as well, recurse. If the bases
8886 are pointer indirections compute the difference of the pointers.
8887 If the bases are equal, we are set. */
8888 if ((TREE_CODE (base0) == ARRAY_REF
8889 && TREE_CODE (base1) == ARRAY_REF
8891 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8892 || (INDIRECT_REF_P (base0)
8893 && INDIRECT_REF_P (base1)
8895 = fold_binary_loc (loc, MINUS_EXPR, type,
8896 fold_convert (type, TREE_OPERAND (base0, 0)),
8898 TREE_OPERAND (base1, 0)))))
8899 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8901 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8902 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8903 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8904 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8905 return fold_build2_loc (loc, PLUS_EXPR, type,
8907 fold_build2_loc (loc, MULT_EXPR, type,
8913 /* If the real or vector real constant CST of type TYPE has an exact
8914 inverse, return it, else return NULL. */
8917 exact_inverse (tree type, tree cst)
8920 tree unit_type, *elts;
8922 unsigned vec_nelts, i;
8924 switch (TREE_CODE (cst))
8927 r = TREE_REAL_CST (cst);
8929 if (exact_real_inverse (TYPE_MODE (type), &r))
8930 return build_real (type, r);
8935 vec_nelts = VECTOR_CST_NELTS (cst);
8936 elts = XALLOCAVEC (tree, vec_nelts);
8937 unit_type = TREE_TYPE (type);
8938 mode = TYPE_MODE (unit_type);
8940 for (i = 0; i < vec_nelts; i++)
8942 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8943 if (!exact_real_inverse (mode, &r))
8945 elts[i] = build_real (unit_type, r);
8948 return build_vector (type, elts);
8955 /* Mask out the tz least significant bits of X of type TYPE where
8956 tz is the number of trailing zeroes in Y. */
8958 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8960 int tz = wi::ctz (y);
8962 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8966 /* Return true when T is an address and is known to be nonzero.
8967 For floating point we further ensure that T is not denormal.
8968 Similar logic is present in nonzero_address in rtlanal.h.
8970 If the return value is based on the assumption that signed overflow
8971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8972 change *STRICT_OVERFLOW_P. */
8975 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8977 tree type = TREE_TYPE (t);
8978 enum tree_code code;
8980 /* Doing something useful for floating point would need more work. */
8981 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8984 code = TREE_CODE (t);
8985 switch (TREE_CODE_CLASS (code))
8988 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8991 case tcc_comparison:
8992 return tree_binary_nonzero_warnv_p (code, type,
8993 TREE_OPERAND (t, 0),
8994 TREE_OPERAND (t, 1),
8997 case tcc_declaration:
8999 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9007 case TRUTH_NOT_EXPR:
9008 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9011 case TRUTH_AND_EXPR:
9013 case TRUTH_XOR_EXPR:
9014 return tree_binary_nonzero_warnv_p (code, type,
9015 TREE_OPERAND (t, 0),
9016 TREE_OPERAND (t, 1),
9024 case WITH_SIZE_EXPR:
9026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9031 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9035 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9040 tree fndecl = get_callee_fndecl (t);
9041 if (!fndecl) return false;
9042 if (flag_delete_null_pointer_checks && !flag_check_new
9043 && DECL_IS_OPERATOR_NEW (fndecl)
9044 && !TREE_NOTHROW (fndecl))
9046 if (flag_delete_null_pointer_checks
9047 && lookup_attribute ("returns_nonnull",
9048 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9050 return alloca_call_p (t);
9059 /* Return true when T is an address and is known to be nonzero.
9060 Handle warnings about undefined signed overflow. */
9063 tree_expr_nonzero_p (tree t)
9065 bool ret, strict_overflow_p;
9067 strict_overflow_p = false;
9068 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9069 if (strict_overflow_p)
9070 fold_overflow_warning (("assuming signed overflow does not occur when "
9071 "determining that expression is always "
9073 WARN_STRICT_OVERFLOW_MISC);
9077 /* Fold a binary expression of code CODE and type TYPE with operands
9078 OP0 and OP1. LOC is the location of the resulting expression.
9079 Return the folded expression if folding is successful. Otherwise,
9080 return NULL_TREE. */
9083 fold_binary_loc (location_t loc,
9084 enum tree_code code, tree type, tree op0, tree op1)
9086 enum tree_code_class kind = TREE_CODE_CLASS (code);
9087 tree arg0, arg1, tem;
9088 tree t1 = NULL_TREE;
9089 bool strict_overflow_p;
9092 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9093 && TREE_CODE_LENGTH (code) == 2
9095 && op1 != NULL_TREE);
9100 /* Strip any conversions that don't change the mode. This is
9101 safe for every expression, except for a comparison expression
9102 because its signedness is derived from its operands. So, in
9103 the latter case, only strip conversions that don't change the
9104 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9107 Note that this is done as an internal manipulation within the
9108 constant folder, in order to find the simplest representation
9109 of the arguments so that their form can be studied. In any
9110 cases, the appropriate type conversions should be put back in
9111 the tree that will get out of the constant folder. */
9113 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9115 STRIP_SIGN_NOPS (arg0);
9116 STRIP_SIGN_NOPS (arg1);
9124 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9125 constant but we can't do arithmetic on them. */
9126 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9128 tem = const_binop (code, type, arg0, arg1);
9129 if (tem != NULL_TREE)
9131 if (TREE_TYPE (tem) != type)
9132 tem = fold_convert_loc (loc, type, tem);
9137 /* If this is a commutative operation, and ARG0 is a constant, move it
9138 to ARG1 to reduce the number of tests below. */
9139 if (commutative_tree_code (code)
9140 && tree_swap_operands_p (arg0, arg1, true))
9141 return fold_build2_loc (loc, code, type, op1, op0);
9143 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9144 to ARG1 to reduce the number of tests below. */
9145 if (kind == tcc_comparison
9146 && tree_swap_operands_p (arg0, arg1, true))
9147 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9149 tem = generic_simplify (loc, code, type, op0, op1);
9153 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9155 First check for cases where an arithmetic operation is applied to a
9156 compound, conditional, or comparison operation. Push the arithmetic
9157 operation inside the compound or conditional to see if any folding
9158 can then be done. Convert comparison to conditional for this purpose.
9159 The also optimizes non-constant cases that used to be done in
9162 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9163 one of the operands is a comparison and the other is a comparison, a
9164 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9165 code below would make the expression more complex. Change it to a
9166 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9167 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9169 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9170 || code == EQ_EXPR || code == NE_EXPR)
9171 && TREE_CODE (type) != VECTOR_TYPE
9172 && ((truth_value_p (TREE_CODE (arg0))
9173 && (truth_value_p (TREE_CODE (arg1))
9174 || (TREE_CODE (arg1) == BIT_AND_EXPR
9175 && integer_onep (TREE_OPERAND (arg1, 1)))))
9176 || (truth_value_p (TREE_CODE (arg1))
9177 && (truth_value_p (TREE_CODE (arg0))
9178 || (TREE_CODE (arg0) == BIT_AND_EXPR
9179 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9181 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9182 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9185 fold_convert_loc (loc, boolean_type_node, arg0),
9186 fold_convert_loc (loc, boolean_type_node, arg1));
9188 if (code == EQ_EXPR)
9189 tem = invert_truthvalue_loc (loc, tem);
9191 return fold_convert_loc (loc, type, tem);
9194 if (TREE_CODE_CLASS (code) == tcc_binary
9195 || TREE_CODE_CLASS (code) == tcc_comparison)
9197 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9199 tem = fold_build2_loc (loc, code, type,
9200 fold_convert_loc (loc, TREE_TYPE (op0),
9201 TREE_OPERAND (arg0, 1)), op1);
9202 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9205 if (TREE_CODE (arg1) == COMPOUND_EXPR
9206 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9208 tem = fold_build2_loc (loc, code, type, op0,
9209 fold_convert_loc (loc, TREE_TYPE (op1),
9210 TREE_OPERAND (arg1, 1)));
9211 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9215 if (TREE_CODE (arg0) == COND_EXPR
9216 || TREE_CODE (arg0) == VEC_COND_EXPR
9217 || COMPARISON_CLASS_P (arg0))
9219 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9221 /*cond_first_p=*/1);
9222 if (tem != NULL_TREE)
9226 if (TREE_CODE (arg1) == COND_EXPR
9227 || TREE_CODE (arg1) == VEC_COND_EXPR
9228 || COMPARISON_CLASS_P (arg1))
9230 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9232 /*cond_first_p=*/0);
9233 if (tem != NULL_TREE)
9241 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9242 if (TREE_CODE (arg0) == ADDR_EXPR
9243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9245 tree iref = TREE_OPERAND (arg0, 0);
9246 return fold_build2 (MEM_REF, type,
9247 TREE_OPERAND (iref, 0),
9248 int_const_binop (PLUS_EXPR, arg1,
9249 TREE_OPERAND (iref, 1)));
9252 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9253 if (TREE_CODE (arg0) == ADDR_EXPR
9254 && handled_component_p (TREE_OPERAND (arg0, 0)))
9257 HOST_WIDE_INT coffset;
9258 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9262 return fold_build2 (MEM_REF, type,
9263 build_fold_addr_expr (base),
9264 int_const_binop (PLUS_EXPR, arg1,
9265 size_int (coffset)));
9270 case POINTER_PLUS_EXPR:
9271 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9272 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9273 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9274 return fold_convert_loc (loc, type,
9275 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9276 fold_convert_loc (loc, sizetype,
9278 fold_convert_loc (loc, sizetype,
9284 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9286 /* X + (X / CST) * -CST is X % CST. */
9287 if (TREE_CODE (arg1) == MULT_EXPR
9288 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9289 && operand_equal_p (arg0,
9290 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9292 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9293 tree cst1 = TREE_OPERAND (arg1, 1);
9294 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9296 if (sum && integer_zerop (sum))
9297 return fold_convert_loc (loc, type,
9298 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9299 TREE_TYPE (arg0), arg0,
9304 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9305 one. Make sure the type is not saturating and has the signedness of
9306 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9307 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9308 if ((TREE_CODE (arg0) == MULT_EXPR
9309 || TREE_CODE (arg1) == MULT_EXPR)
9310 && !TYPE_SATURATING (type)
9311 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9312 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9313 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9315 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9320 if (! FLOAT_TYPE_P (type))
9322 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9323 (plus (plus (mult) (mult)) (foo)) so that we can
9324 take advantage of the factoring cases below. */
9325 if (ANY_INTEGRAL_TYPE_P (type)
9326 && TYPE_OVERFLOW_WRAPS (type)
9327 && (((TREE_CODE (arg0) == PLUS_EXPR
9328 || TREE_CODE (arg0) == MINUS_EXPR)
9329 && TREE_CODE (arg1) == MULT_EXPR)
9330 || ((TREE_CODE (arg1) == PLUS_EXPR
9331 || TREE_CODE (arg1) == MINUS_EXPR)
9332 && TREE_CODE (arg0) == MULT_EXPR)))
9334 tree parg0, parg1, parg, marg;
9335 enum tree_code pcode;
9337 if (TREE_CODE (arg1) == MULT_EXPR)
9338 parg = arg0, marg = arg1;
9340 parg = arg1, marg = arg0;
9341 pcode = TREE_CODE (parg);
9342 parg0 = TREE_OPERAND (parg, 0);
9343 parg1 = TREE_OPERAND (parg, 1);
9347 if (TREE_CODE (parg0) == MULT_EXPR
9348 && TREE_CODE (parg1) != MULT_EXPR)
9349 return fold_build2_loc (loc, pcode, type,
9350 fold_build2_loc (loc, PLUS_EXPR, type,
9351 fold_convert_loc (loc, type,
9353 fold_convert_loc (loc, type,
9355 fold_convert_loc (loc, type, parg1));
9356 if (TREE_CODE (parg0) != MULT_EXPR
9357 && TREE_CODE (parg1) == MULT_EXPR)
9359 fold_build2_loc (loc, PLUS_EXPR, type,
9360 fold_convert_loc (loc, type, parg0),
9361 fold_build2_loc (loc, pcode, type,
9362 fold_convert_loc (loc, type, marg),
9363 fold_convert_loc (loc, type,
9369 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9370 to __complex__ ( x, y ). This is not the same for SNaNs or
9371 if signed zeros are involved. */
9372 if (!HONOR_SNANS (element_mode (arg0))
9373 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9374 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9376 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9377 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9378 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9379 bool arg0rz = false, arg0iz = false;
9380 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9381 || (arg0i && (arg0iz = real_zerop (arg0i))))
9383 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9384 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9385 if (arg0rz && arg1i && real_zerop (arg1i))
9387 tree rp = arg1r ? arg1r
9388 : build1 (REALPART_EXPR, rtype, arg1);
9389 tree ip = arg0i ? arg0i
9390 : build1 (IMAGPART_EXPR, rtype, arg0);
9391 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9393 else if (arg0iz && arg1r && real_zerop (arg1r))
9395 tree rp = arg0r ? arg0r
9396 : build1 (REALPART_EXPR, rtype, arg0);
9397 tree ip = arg1i ? arg1i
9398 : build1 (IMAGPART_EXPR, rtype, arg1);
9399 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9404 if (flag_unsafe_math_optimizations
9405 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9406 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9407 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9410 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9411 We associate floats only if the user has specified
9412 -fassociative-math. */
9413 if (flag_associative_math
9414 && TREE_CODE (arg1) == PLUS_EXPR
9415 && TREE_CODE (arg0) != MULT_EXPR)
9417 tree tree10 = TREE_OPERAND (arg1, 0);
9418 tree tree11 = TREE_OPERAND (arg1, 1);
9419 if (TREE_CODE (tree11) == MULT_EXPR
9420 && TREE_CODE (tree10) == MULT_EXPR)
9423 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9424 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9427 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9428 We associate floats only if the user has specified
9429 -fassociative-math. */
9430 if (flag_associative_math
9431 && TREE_CODE (arg0) == PLUS_EXPR
9432 && TREE_CODE (arg1) != MULT_EXPR)
9434 tree tree00 = TREE_OPERAND (arg0, 0);
9435 tree tree01 = TREE_OPERAND (arg0, 1);
9436 if (TREE_CODE (tree01) == MULT_EXPR
9437 && TREE_CODE (tree00) == MULT_EXPR)
9440 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9441 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9447 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9448 is a rotate of A by C1 bits. */
9449 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9450 is a rotate of A by B bits. */
9452 enum tree_code code0, code1;
9454 code0 = TREE_CODE (arg0);
9455 code1 = TREE_CODE (arg1);
9456 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9457 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9458 && operand_equal_p (TREE_OPERAND (arg0, 0),
9459 TREE_OPERAND (arg1, 0), 0)
9460 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9461 TYPE_UNSIGNED (rtype))
9462 /* Only create rotates in complete modes. Other cases are not
9463 expanded properly. */
9464 && (element_precision (rtype)
9465 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9467 tree tree01, tree11;
9468 enum tree_code code01, code11;
9470 tree01 = TREE_OPERAND (arg0, 1);
9471 tree11 = TREE_OPERAND (arg1, 1);
9472 STRIP_NOPS (tree01);
9473 STRIP_NOPS (tree11);
9474 code01 = TREE_CODE (tree01);
9475 code11 = TREE_CODE (tree11);
9476 if (code01 == INTEGER_CST
9477 && code11 == INTEGER_CST
9478 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9479 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9481 tem = build2_loc (loc, LROTATE_EXPR,
9482 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9483 TREE_OPERAND (arg0, 0),
9484 code0 == LSHIFT_EXPR
9485 ? TREE_OPERAND (arg0, 1)
9486 : TREE_OPERAND (arg1, 1));
9487 return fold_convert_loc (loc, type, tem);
9489 else if (code11 == MINUS_EXPR)
9491 tree tree110, tree111;
9492 tree110 = TREE_OPERAND (tree11, 0);
9493 tree111 = TREE_OPERAND (tree11, 1);
9494 STRIP_NOPS (tree110);
9495 STRIP_NOPS (tree111);
9496 if (TREE_CODE (tree110) == INTEGER_CST
9497 && 0 == compare_tree_int (tree110,
9499 (TREE_TYPE (TREE_OPERAND
9501 && operand_equal_p (tree01, tree111, 0))
9503 fold_convert_loc (loc, type,
9504 build2 ((code0 == LSHIFT_EXPR
9507 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9508 TREE_OPERAND (arg0, 0),
9509 TREE_OPERAND (arg0, 1)));
9511 else if (code01 == MINUS_EXPR)
9513 tree tree010, tree011;
9514 tree010 = TREE_OPERAND (tree01, 0);
9515 tree011 = TREE_OPERAND (tree01, 1);
9516 STRIP_NOPS (tree010);
9517 STRIP_NOPS (tree011);
9518 if (TREE_CODE (tree010) == INTEGER_CST
9519 && 0 == compare_tree_int (tree010,
9521 (TREE_TYPE (TREE_OPERAND
9523 && operand_equal_p (tree11, tree011, 0))
9524 return fold_convert_loc
9526 build2 ((code0 != LSHIFT_EXPR
9529 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9530 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9536 /* In most languages, can't associate operations on floats through
9537 parentheses. Rather than remember where the parentheses were, we
9538 don't associate floats at all, unless the user has specified
9540 And, we need to make sure type is not saturating. */
9542 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9543 && !TYPE_SATURATING (type))
9545 tree var0, con0, lit0, minus_lit0;
9546 tree var1, con1, lit1, minus_lit1;
9550 /* Split both trees into variables, constants, and literals. Then
9551 associate each group together, the constants with literals,
9552 then the result with variables. This increases the chances of
9553 literals being recombined later and of generating relocatable
9554 expressions for the sum of a constant and literal. */
9555 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9556 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9557 code == MINUS_EXPR);
9559 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9560 if (code == MINUS_EXPR)
9563 /* With undefined overflow prefer doing association in a type
9564 which wraps on overflow, if that is one of the operand types. */
9565 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9566 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9568 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9569 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9570 atype = TREE_TYPE (arg0);
9571 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9572 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9573 atype = TREE_TYPE (arg1);
9574 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9577 /* With undefined overflow we can only associate constants with one
9578 variable, and constants whose association doesn't overflow. */
9579 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9580 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9586 bool one_neg = false;
9588 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9590 tmp0 = TREE_OPERAND (tmp0, 0);
9593 if (CONVERT_EXPR_P (tmp0)
9594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9596 <= TYPE_PRECISION (atype)))
9597 tmp0 = TREE_OPERAND (tmp0, 0);
9598 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9600 tmp1 = TREE_OPERAND (tmp1, 0);
9603 if (CONVERT_EXPR_P (tmp1)
9604 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9605 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9606 <= TYPE_PRECISION (atype)))
9607 tmp1 = TREE_OPERAND (tmp1, 0);
9608 /* The only case we can still associate with two variables
9609 is if they cancel out. */
9611 || !operand_equal_p (tmp0, tmp1, 0))
9616 /* Only do something if we found more than two objects. Otherwise,
9617 nothing has changed and we risk infinite recursion. */
9619 && (2 < ((var0 != 0) + (var1 != 0)
9620 + (con0 != 0) + (con1 != 0)
9621 + (lit0 != 0) + (lit1 != 0)
9622 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9624 bool any_overflows = false;
9625 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9626 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9627 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9628 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9629 var0 = associate_trees (loc, var0, var1, code, atype);
9630 con0 = associate_trees (loc, con0, con1, code, atype);
9631 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9632 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9635 /* Preserve the MINUS_EXPR if the negative part of the literal is
9636 greater than the positive part. Otherwise, the multiplicative
9637 folding code (i.e extract_muldiv) may be fooled in case
9638 unsigned constants are subtracted, like in the following
9639 example: ((X*2 + 4) - 8U)/2. */
9640 if (minus_lit0 && lit0)
9642 if (TREE_CODE (lit0) == INTEGER_CST
9643 && TREE_CODE (minus_lit0) == INTEGER_CST
9644 && tree_int_cst_lt (lit0, minus_lit0))
9646 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9652 lit0 = associate_trees (loc, lit0, minus_lit0,
9658 /* Don't introduce overflows through reassociation. */
9660 && ((lit0 && TREE_OVERFLOW_P (lit0))
9661 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9668 fold_convert_loc (loc, type,
9669 associate_trees (loc, var0, minus_lit0,
9670 MINUS_EXPR, atype));
9673 con0 = associate_trees (loc, con0, minus_lit0,
9676 fold_convert_loc (loc, type,
9677 associate_trees (loc, var0, con0,
9682 con0 = associate_trees (loc, con0, lit0, code, atype);
9684 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9692 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9693 if (TREE_CODE (arg0) == NEGATE_EXPR
9694 && negate_expr_p (arg1)
9695 && reorder_operands_p (arg0, arg1))
9696 return fold_build2_loc (loc, MINUS_EXPR, type,
9697 fold_convert_loc (loc, type,
9698 negate_expr (arg1)),
9699 fold_convert_loc (loc, type,
9700 TREE_OPERAND (arg0, 0)));
9702 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9703 __complex__ ( x, -y ). This is not the same for SNaNs or if
9704 signed zeros are involved. */
9705 if (!HONOR_SNANS (element_mode (arg0))
9706 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9707 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9709 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9710 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9711 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9712 bool arg0rz = false, arg0iz = false;
9713 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9714 || (arg0i && (arg0iz = real_zerop (arg0i))))
9716 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9717 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9718 if (arg0rz && arg1i && real_zerop (arg1i))
9720 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9722 : build1 (REALPART_EXPR, rtype, arg1));
9723 tree ip = arg0i ? arg0i
9724 : build1 (IMAGPART_EXPR, rtype, arg0);
9725 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9727 else if (arg0iz && arg1r && real_zerop (arg1r))
9729 tree rp = arg0r ? arg0r
9730 : build1 (REALPART_EXPR, rtype, arg0);
9731 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9733 : build1 (IMAGPART_EXPR, rtype, arg1));
9734 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9739 /* A - B -> A + (-B) if B is easily negatable. */
9740 if (negate_expr_p (arg1)
9741 && !TYPE_OVERFLOW_SANITIZED (type)
9742 && ((FLOAT_TYPE_P (type)
9743 /* Avoid this transformation if B is a positive REAL_CST. */
9744 && (TREE_CODE (arg1) != REAL_CST
9745 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9746 || INTEGRAL_TYPE_P (type)))
9747 return fold_build2_loc (loc, PLUS_EXPR, type,
9748 fold_convert_loc (loc, type, arg0),
9749 fold_convert_loc (loc, type,
9750 negate_expr (arg1)));
9752 /* Fold &a[i] - &a[j] to i-j. */
9753 if (TREE_CODE (arg0) == ADDR_EXPR
9754 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9755 && TREE_CODE (arg1) == ADDR_EXPR
9756 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9758 tree tem = fold_addr_of_array_ref_difference (loc, type,
9759 TREE_OPERAND (arg0, 0),
9760 TREE_OPERAND (arg1, 0));
9765 if (FLOAT_TYPE_P (type)
9766 && flag_unsafe_math_optimizations
9767 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9768 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9769 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9772 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9773 one. Make sure the type is not saturating and has the signedness of
9774 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9775 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9776 if ((TREE_CODE (arg0) == MULT_EXPR
9777 || TREE_CODE (arg1) == MULT_EXPR)
9778 && !TYPE_SATURATING (type)
9779 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9780 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9781 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9783 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9791 if (! FLOAT_TYPE_P (type))
9793 /* Transform x * -C into -x * C if x is easily negatable. */
9794 if (TREE_CODE (arg1) == INTEGER_CST
9795 && tree_int_cst_sgn (arg1) == -1
9796 && negate_expr_p (arg0)
9797 && (tem = negate_expr (arg1)) != arg1
9798 && !TREE_OVERFLOW (tem))
9799 return fold_build2_loc (loc, MULT_EXPR, type,
9800 fold_convert_loc (loc, type,
9801 negate_expr (arg0)),
9804 /* (A + A) * C -> A * 2 * C */
9805 if (TREE_CODE (arg0) == PLUS_EXPR
9806 && TREE_CODE (arg1) == INTEGER_CST
9807 && operand_equal_p (TREE_OPERAND (arg0, 0),
9808 TREE_OPERAND (arg0, 1), 0))
9809 return fold_build2_loc (loc, MULT_EXPR, type,
9810 omit_one_operand_loc (loc, type,
9811 TREE_OPERAND (arg0, 0),
9812 TREE_OPERAND (arg0, 1)),
9813 fold_build2_loc (loc, MULT_EXPR, type,
9814 build_int_cst (type, 2) , arg1));
9816 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9817 sign-changing only. */
9818 if (TREE_CODE (arg1) == INTEGER_CST
9819 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9820 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9821 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9823 strict_overflow_p = false;
9824 if (TREE_CODE (arg1) == INTEGER_CST
9825 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9826 &strict_overflow_p)))
9828 if (strict_overflow_p)
9829 fold_overflow_warning (("assuming signed overflow does not "
9830 "occur when simplifying "
9832 WARN_STRICT_OVERFLOW_MISC);
9833 return fold_convert_loc (loc, type, tem);
9836 /* Optimize z * conj(z) for integer complex numbers. */
9837 if (TREE_CODE (arg0) == CONJ_EXPR
9838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9839 return fold_mult_zconjz (loc, type, arg1);
9840 if (TREE_CODE (arg1) == CONJ_EXPR
9841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9842 return fold_mult_zconjz (loc, type, arg0);
9846 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9847 This is not the same for NaNs or if signed zeros are
9849 if (!HONOR_NANS (arg0)
9850 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9851 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9852 && TREE_CODE (arg1) == COMPLEX_CST
9853 && real_zerop (TREE_REALPART (arg1)))
9855 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9856 if (real_onep (TREE_IMAGPART (arg1)))
9858 fold_build2_loc (loc, COMPLEX_EXPR, type,
9859 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9861 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9862 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9864 fold_build2_loc (loc, COMPLEX_EXPR, type,
9865 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9866 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9870 /* Optimize z * conj(z) for floating point complex numbers.
9871 Guarded by flag_unsafe_math_optimizations as non-finite
9872 imaginary components don't produce scalar results. */
9873 if (flag_unsafe_math_optimizations
9874 && TREE_CODE (arg0) == CONJ_EXPR
9875 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9876 return fold_mult_zconjz (loc, type, arg1);
9877 if (flag_unsafe_math_optimizations
9878 && TREE_CODE (arg1) == CONJ_EXPR
9879 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9880 return fold_mult_zconjz (loc, type, arg0);
9882 if (flag_unsafe_math_optimizations)
9885 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9888 && operand_equal_p (arg0, arg1, 0))
9890 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9894 tree arg = build_real (type, dconst2);
9895 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9903 /* Canonicalize (X & C1) | C2. */
9904 if (TREE_CODE (arg0) == BIT_AND_EXPR
9905 && TREE_CODE (arg1) == INTEGER_CST
9906 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9908 int width = TYPE_PRECISION (type), w;
9909 wide_int c1 = TREE_OPERAND (arg0, 1);
9912 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9913 if ((c1 & c2) == c1)
9914 return omit_one_operand_loc (loc, type, arg1,
9915 TREE_OPERAND (arg0, 0));
9917 wide_int msk = wi::mask (width, false,
9918 TYPE_PRECISION (TREE_TYPE (arg1)));
9920 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9921 if (msk.and_not (c1 | c2) == 0)
9922 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9923 TREE_OPERAND (arg0, 0), arg1);
9925 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9926 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9927 mode which allows further optimizations. */
9930 wide_int c3 = c1.and_not (c2);
9931 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9933 wide_int mask = wi::mask (w, false,
9934 TYPE_PRECISION (type));
9935 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9943 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9944 fold_build2_loc (loc, BIT_AND_EXPR, type,
9945 TREE_OPERAND (arg0, 0),
9946 wide_int_to_tree (type,
9951 /* See if this can be simplified into a rotate first. If that
9952 is unsuccessful continue in the association code. */
9956 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9957 if (TREE_CODE (arg0) == BIT_AND_EXPR
9958 && INTEGRAL_TYPE_P (type)
9959 && integer_onep (TREE_OPERAND (arg0, 1))
9960 && integer_onep (arg1))
9961 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9962 build_zero_cst (TREE_TYPE (arg0)));
9964 /* See if this can be simplified into a rotate first. If that
9965 is unsuccessful continue in the association code. */
9969 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9970 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9971 && INTEGRAL_TYPE_P (type)
9972 && integer_onep (TREE_OPERAND (arg0, 1))
9973 && integer_onep (arg1))
9976 tem = TREE_OPERAND (arg0, 0);
9977 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9978 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9980 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9981 build_zero_cst (TREE_TYPE (tem)));
9983 /* Fold ~X & 1 as (X & 1) == 0. */
9984 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9985 && INTEGRAL_TYPE_P (type)
9986 && integer_onep (arg1))
9989 tem = TREE_OPERAND (arg0, 0);
9990 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9991 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9993 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9994 build_zero_cst (TREE_TYPE (tem)));
9996 /* Fold !X & 1 as X == 0. */
9997 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9998 && integer_onep (arg1))
10000 tem = TREE_OPERAND (arg0, 0);
10001 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10002 build_zero_cst (TREE_TYPE (tem)));
10005 /* Fold (X ^ Y) & Y as ~X & Y. */
10006 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10007 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10009 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10010 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10011 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10012 fold_convert_loc (loc, type, arg1));
10014 /* Fold (X ^ Y) & X as ~Y & X. */
10015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10016 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10017 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10019 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10020 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10021 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10022 fold_convert_loc (loc, type, arg1));
10024 /* Fold X & (X ^ Y) as X & ~Y. */
10025 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10026 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10028 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10029 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10030 fold_convert_loc (loc, type, arg0),
10031 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10033 /* Fold X & (Y ^ X) as ~Y & X. */
10034 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10036 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10038 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10039 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10040 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10041 fold_convert_loc (loc, type, arg0));
10044 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10045 multiple of 1 << CST. */
10046 if (TREE_CODE (arg1) == INTEGER_CST)
10048 wide_int cst1 = arg1;
10049 wide_int ncst1 = -cst1;
10050 if ((cst1 & ncst1) == ncst1
10051 && multiple_of_p (type, arg0,
10052 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10053 return fold_convert_loc (loc, type, arg0);
10056 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10058 if (TREE_CODE (arg1) == INTEGER_CST
10059 && TREE_CODE (arg0) == MULT_EXPR
10060 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10062 wide_int warg1 = arg1;
10063 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10066 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10068 else if (masked != warg1)
10070 /* Avoid the transform if arg1 is a mask of some
10071 mode which allows further optimizations. */
10072 int pop = wi::popcount (warg1);
10073 if (!(pop >= BITS_PER_UNIT
10074 && exact_log2 (pop) != -1
10075 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10076 return fold_build2_loc (loc, code, type, op0,
10077 wide_int_to_tree (type, masked));
10081 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10082 ((A & N) + B) & M -> (A + B) & M
10083 Similarly if (N & M) == 0,
10084 ((A | N) + B) & M -> (A + B) & M
10085 and for - instead of + (or unary - instead of +)
10086 and/or ^ instead of |.
10087 If B is constant and (B & M) == 0, fold into A & M. */
10088 if (TREE_CODE (arg1) == INTEGER_CST)
10090 wide_int cst1 = arg1;
10091 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10092 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10093 && (TREE_CODE (arg0) == PLUS_EXPR
10094 || TREE_CODE (arg0) == MINUS_EXPR
10095 || TREE_CODE (arg0) == NEGATE_EXPR)
10096 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10097 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10103 /* Now we know that arg0 is (C + D) or (C - D) or
10104 -C and arg1 (M) is == (1LL << cst) - 1.
10105 Store C into PMOP[0] and D into PMOP[1]. */
10106 pmop[0] = TREE_OPERAND (arg0, 0);
10108 if (TREE_CODE (arg0) != NEGATE_EXPR)
10110 pmop[1] = TREE_OPERAND (arg0, 1);
10114 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10117 for (; which >= 0; which--)
10118 switch (TREE_CODE (pmop[which]))
10123 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10126 cst0 = TREE_OPERAND (pmop[which], 1);
10128 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10133 else if (cst0 != 0)
10135 /* If C or D is of the form (A & N) where
10136 (N & M) == M, or of the form (A | N) or
10137 (A ^ N) where (N & M) == 0, replace it with A. */
10138 pmop[which] = TREE_OPERAND (pmop[which], 0);
10141 /* If C or D is a N where (N & M) == 0, it can be
10142 omitted (assumed 0). */
10143 if ((TREE_CODE (arg0) == PLUS_EXPR
10144 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10145 && (cst1 & pmop[which]) == 0)
10146 pmop[which] = NULL;
10152 /* Only build anything new if we optimized one or both arguments
10154 if (pmop[0] != TREE_OPERAND (arg0, 0)
10155 || (TREE_CODE (arg0) != NEGATE_EXPR
10156 && pmop[1] != TREE_OPERAND (arg0, 1)))
10158 tree utype = TREE_TYPE (arg0);
10159 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10161 /* Perform the operations in a type that has defined
10162 overflow behavior. */
10163 utype = unsigned_type_for (TREE_TYPE (arg0));
10164 if (pmop[0] != NULL)
10165 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10166 if (pmop[1] != NULL)
10167 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10170 if (TREE_CODE (arg0) == NEGATE_EXPR)
10171 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10172 else if (TREE_CODE (arg0) == PLUS_EXPR)
10174 if (pmop[0] != NULL && pmop[1] != NULL)
10175 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10177 else if (pmop[0] != NULL)
10179 else if (pmop[1] != NULL)
10182 return build_int_cst (type, 0);
10184 else if (pmop[0] == NULL)
10185 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10187 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10189 /* TEM is now the new binary +, - or unary - replacement. */
10190 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10191 fold_convert_loc (loc, utype, arg1));
10192 return fold_convert_loc (loc, type, tem);
10197 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10198 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10199 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10201 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10203 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10206 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10212 /* Don't touch a floating-point divide by zero unless the mode
10213 of the constant can represent infinity. */
10214 if (TREE_CODE (arg1) == REAL_CST
10215 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10216 && real_zerop (arg1))
10219 /* (-A) / (-B) -> A / B */
10220 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10221 return fold_build2_loc (loc, RDIV_EXPR, type,
10222 TREE_OPERAND (arg0, 0),
10223 negate_expr (arg1));
10224 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10225 return fold_build2_loc (loc, RDIV_EXPR, type,
10226 negate_expr (arg0),
10227 TREE_OPERAND (arg1, 0));
10229 /* Convert A/B/C to A/(B*C). */
10230 if (flag_reciprocal_math
10231 && TREE_CODE (arg0) == RDIV_EXPR)
10232 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10233 fold_build2_loc (loc, MULT_EXPR, type,
10234 TREE_OPERAND (arg0, 1), arg1));
10236 /* Convert A/(B/C) to (A/B)*C. */
10237 if (flag_reciprocal_math
10238 && TREE_CODE (arg1) == RDIV_EXPR)
10239 return fold_build2_loc (loc, MULT_EXPR, type,
10240 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10241 TREE_OPERAND (arg1, 0)),
10242 TREE_OPERAND (arg1, 1));
10244 /* Convert C1/(X*C2) into (C1/C2)/X. */
10245 if (flag_reciprocal_math
10246 && TREE_CODE (arg1) == MULT_EXPR
10247 && TREE_CODE (arg0) == REAL_CST
10248 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10250 tree tem = const_binop (RDIV_EXPR, arg0,
10251 TREE_OPERAND (arg1, 1));
10253 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10254 TREE_OPERAND (arg1, 0));
10259 case TRUNC_DIV_EXPR:
10260 /* Optimize (X & (-A)) / A where A is a power of 2,
10262 if (TREE_CODE (arg0) == BIT_AND_EXPR
10263 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10264 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10266 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10267 arg1, TREE_OPERAND (arg0, 1));
10268 if (sum && integer_zerop (sum)) {
10269 tree pow2 = build_int_cst (integer_type_node,
10270 wi::exact_log2 (arg1));
10271 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10272 TREE_OPERAND (arg0, 0), pow2);
10278 case FLOOR_DIV_EXPR:
10279 /* Simplify A / (B << N) where A and B are positive and B is
10280 a power of 2, to A >> (N + log2(B)). */
10281 strict_overflow_p = false;
10282 if (TREE_CODE (arg1) == LSHIFT_EXPR
10283 && (TYPE_UNSIGNED (type)
10284 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10286 tree sval = TREE_OPERAND (arg1, 0);
10287 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10289 tree sh_cnt = TREE_OPERAND (arg1, 1);
10290 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10291 wi::exact_log2 (sval));
10293 if (strict_overflow_p)
10294 fold_overflow_warning (("assuming signed overflow does not "
10295 "occur when simplifying A / (B << N)"),
10296 WARN_STRICT_OVERFLOW_MISC);
10298 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10300 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10301 fold_convert_loc (loc, type, arg0), sh_cnt);
10307 case ROUND_DIV_EXPR:
10308 case CEIL_DIV_EXPR:
10309 case EXACT_DIV_EXPR:
10310 if (integer_zerop (arg1))
10313 /* Convert -A / -B to A / B when the type is signed and overflow is
10315 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10316 && TREE_CODE (arg0) == NEGATE_EXPR
10317 && negate_expr_p (arg1))
10319 if (INTEGRAL_TYPE_P (type))
10320 fold_overflow_warning (("assuming signed overflow does not occur "
10321 "when distributing negation across "
10323 WARN_STRICT_OVERFLOW_MISC);
10324 return fold_build2_loc (loc, code, type,
10325 fold_convert_loc (loc, type,
10326 TREE_OPERAND (arg0, 0)),
10327 fold_convert_loc (loc, type,
10328 negate_expr (arg1)));
10330 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10331 && TREE_CODE (arg1) == NEGATE_EXPR
10332 && negate_expr_p (arg0))
10334 if (INTEGRAL_TYPE_P (type))
10335 fold_overflow_warning (("assuming signed overflow does not occur "
10336 "when distributing negation across "
10338 WARN_STRICT_OVERFLOW_MISC);
10339 return fold_build2_loc (loc, code, type,
10340 fold_convert_loc (loc, type,
10341 negate_expr (arg0)),
10342 fold_convert_loc (loc, type,
10343 TREE_OPERAND (arg1, 0)));
10346 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10347 operation, EXACT_DIV_EXPR.
10349 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10350 At one time others generated faster code, it's not clear if they do
10351 after the last round to changes to the DIV code in expmed.c. */
10352 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10353 && multiple_of_p (type, arg0, arg1))
10354 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10355 fold_convert (type, arg0),
10356 fold_convert (type, arg1));
10358 strict_overflow_p = false;
10359 if (TREE_CODE (arg1) == INTEGER_CST
10360 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10361 &strict_overflow_p)))
10363 if (strict_overflow_p)
10364 fold_overflow_warning (("assuming signed overflow does not occur "
10365 "when simplifying division"),
10366 WARN_STRICT_OVERFLOW_MISC);
10367 return fold_convert_loc (loc, type, tem);
10372 case CEIL_MOD_EXPR:
10373 case FLOOR_MOD_EXPR:
10374 case ROUND_MOD_EXPR:
10375 case TRUNC_MOD_EXPR:
10376 strict_overflow_p = false;
10377 if (TREE_CODE (arg1) == INTEGER_CST
10378 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10379 &strict_overflow_p)))
10381 if (strict_overflow_p)
10382 fold_overflow_warning (("assuming signed overflow does not occur "
10383 "when simplifying modulus"),
10384 WARN_STRICT_OVERFLOW_MISC);
10385 return fold_convert_loc (loc, type, tem);
10394 /* Since negative shift count is not well-defined,
10395 don't try to compute it in the compiler. */
10396 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10399 prec = element_precision (type);
10401 /* If we have a rotate of a bit operation with the rotate count and
10402 the second operand of the bit operation both constant,
10403 permute the two operations. */
10404 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10405 && (TREE_CODE (arg0) == BIT_AND_EXPR
10406 || TREE_CODE (arg0) == BIT_IOR_EXPR
10407 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10409 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10410 fold_build2_loc (loc, code, type,
10411 TREE_OPERAND (arg0, 0), arg1),
10412 fold_build2_loc (loc, code, type,
10413 TREE_OPERAND (arg0, 1), arg1));
10415 /* Two consecutive rotates adding up to the some integer
10416 multiple of the precision of the type can be ignored. */
10417 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10418 && TREE_CODE (arg0) == RROTATE_EXPR
10419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10420 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10422 return TREE_OPERAND (arg0, 0);
10427 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10433 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10438 case TRUTH_ANDIF_EXPR:
10439 /* Note that the operands of this must be ints
10440 and their values must be 0 or 1.
10441 ("true" is a fixed value perhaps depending on the language.) */
10442 /* If first arg is constant zero, return it. */
10443 if (integer_zerop (arg0))
10444 return fold_convert_loc (loc, type, arg0);
10445 case TRUTH_AND_EXPR:
10446 /* If either arg is constant true, drop it. */
10447 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10448 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10449 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10450 /* Preserve sequence points. */
10451 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10453 /* If second arg is constant zero, result is zero, but first arg
10454 must be evaluated. */
10455 if (integer_zerop (arg1))
10456 return omit_one_operand_loc (loc, type, arg1, arg0);
10457 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10458 case will be handled here. */
10459 if (integer_zerop (arg0))
10460 return omit_one_operand_loc (loc, type, arg0, arg1);
10462 /* !X && X is always false. */
10463 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10465 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10466 /* X && !X is always false. */
10467 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10471 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10472 means A >= Y && A != MAX, but in this case we know that
10475 if (!TREE_SIDE_EFFECTS (arg0)
10476 && !TREE_SIDE_EFFECTS (arg1))
10478 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10479 if (tem && !operand_equal_p (tem, arg0, 0))
10480 return fold_build2_loc (loc, code, type, tem, arg1);
10482 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10483 if (tem && !operand_equal_p (tem, arg1, 0))
10484 return fold_build2_loc (loc, code, type, arg0, tem);
10487 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10493 case TRUTH_ORIF_EXPR:
10494 /* Note that the operands of this must be ints
10495 and their values must be 0 or true.
10496 ("true" is a fixed value perhaps depending on the language.) */
10497 /* If first arg is constant true, return it. */
10498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10499 return fold_convert_loc (loc, type, arg0);
10500 case TRUTH_OR_EXPR:
10501 /* If either arg is constant zero, drop it. */
10502 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10503 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10504 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10505 /* Preserve sequence points. */
10506 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10507 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10508 /* If second arg is constant true, result is true, but we must
10509 evaluate first arg. */
10510 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10511 return omit_one_operand_loc (loc, type, arg1, arg0);
10512 /* Likewise for first arg, but note this only occurs here for
10514 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10515 return omit_one_operand_loc (loc, type, arg0, arg1);
10517 /* !X || X is always true. */
10518 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10519 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10520 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10521 /* X || !X is always true. */
10522 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10523 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10524 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10526 /* (X && !Y) || (!X && Y) is X ^ Y */
10527 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10528 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10530 tree a0, a1, l0, l1, n0, n1;
10532 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10533 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10535 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10536 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10538 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10539 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10541 if ((operand_equal_p (n0, a0, 0)
10542 && operand_equal_p (n1, a1, 0))
10543 || (operand_equal_p (n0, a1, 0)
10544 && operand_equal_p (n1, a0, 0)))
10545 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10548 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10554 case TRUTH_XOR_EXPR:
10555 /* If the second arg is constant zero, drop it. */
10556 if (integer_zerop (arg1))
10557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10558 /* If the second arg is constant true, this is a logical inversion. */
10559 if (integer_onep (arg1))
10561 tem = invert_truthvalue_loc (loc, arg0);
10562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10564 /* Identical arguments cancel to zero. */
10565 if (operand_equal_p (arg0, arg1, 0))
10566 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10568 /* !X ^ X is always true. */
10569 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10570 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10571 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10573 /* X ^ !X is always true. */
10574 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10575 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10576 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10585 tem = fold_comparison (loc, code, type, op0, op1);
10586 if (tem != NULL_TREE)
10589 /* bool_var != 1 becomes !bool_var. */
10590 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10591 && code == NE_EXPR)
10592 return fold_convert_loc (loc, type,
10593 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10594 TREE_TYPE (arg0), arg0));
10596 /* bool_var == 0 becomes !bool_var. */
10597 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10598 && code == EQ_EXPR)
10599 return fold_convert_loc (loc, type,
10600 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10601 TREE_TYPE (arg0), arg0));
10603 /* !exp != 0 becomes !exp */
10604 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10605 && code == NE_EXPR)
10606 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10608 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10609 if ((TREE_CODE (arg0) == PLUS_EXPR
10610 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10611 || TREE_CODE (arg0) == MINUS_EXPR)
10612 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10615 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10616 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10618 tree val = TREE_OPERAND (arg0, 1);
10619 return omit_two_operands_loc (loc, type,
10620 fold_build2_loc (loc, code, type,
10622 build_int_cst (TREE_TYPE (val),
10624 TREE_OPERAND (arg0, 0), arg1);
10627 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10628 if (TREE_CODE (arg0) == MINUS_EXPR
10629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10630 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10633 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10635 return omit_two_operands_loc (loc, type,
10637 ? boolean_true_node : boolean_false_node,
10638 TREE_OPERAND (arg0, 1), arg1);
10641 /* If this is an EQ or NE comparison with zero and ARG0 is
10642 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10643 two operations, but the latter can be done in one less insn
10644 on machines that have only two-operand insns or on which a
10645 constant cannot be the first operand. */
10646 if (TREE_CODE (arg0) == BIT_AND_EXPR
10647 && integer_zerop (arg1))
10649 tree arg00 = TREE_OPERAND (arg0, 0);
10650 tree arg01 = TREE_OPERAND (arg0, 1);
10651 if (TREE_CODE (arg00) == LSHIFT_EXPR
10652 && integer_onep (TREE_OPERAND (arg00, 0)))
10654 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10655 arg01, TREE_OPERAND (arg00, 1));
10656 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10657 build_int_cst (TREE_TYPE (arg0), 1));
10658 return fold_build2_loc (loc, code, type,
10659 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10662 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10663 && integer_onep (TREE_OPERAND (arg01, 0)))
10665 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10666 arg00, TREE_OPERAND (arg01, 1));
10667 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10668 build_int_cst (TREE_TYPE (arg0), 1));
10669 return fold_build2_loc (loc, code, type,
10670 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10675 /* If this is an NE or EQ comparison of zero against the result of a
10676 signed MOD operation whose second operand is a power of 2, make
10677 the MOD operation unsigned since it is simpler and equivalent. */
10678 if (integer_zerop (arg1)
10679 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10680 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10681 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10682 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10683 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10684 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10686 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10687 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10688 fold_convert_loc (loc, newtype,
10689 TREE_OPERAND (arg0, 0)),
10690 fold_convert_loc (loc, newtype,
10691 TREE_OPERAND (arg0, 1)));
10693 return fold_build2_loc (loc, code, type, newmod,
10694 fold_convert_loc (loc, newtype, arg1));
10697 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10698 C1 is a valid shift constant, and C2 is a power of two, i.e.
10700 if (TREE_CODE (arg0) == BIT_AND_EXPR
10701 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10702 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10704 && integer_pow2p (TREE_OPERAND (arg0, 1))
10705 && integer_zerop (arg1))
10707 tree itype = TREE_TYPE (arg0);
10708 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10709 prec = TYPE_PRECISION (itype);
10711 /* Check for a valid shift count. */
10712 if (wi::ltu_p (arg001, prec))
10714 tree arg01 = TREE_OPERAND (arg0, 1);
10715 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10716 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10717 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10718 can be rewritten as (X & (C2 << C1)) != 0. */
10719 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10721 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10722 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10723 return fold_build2_loc (loc, code, type, tem,
10724 fold_convert_loc (loc, itype, arg1));
10726 /* Otherwise, for signed (arithmetic) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10728 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10729 else if (!TYPE_UNSIGNED (itype))
10730 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10731 arg000, build_int_cst (itype, 0));
10732 /* Otherwise, of unsigned (logical) shifts,
10733 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10734 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10736 return omit_one_operand_loc (loc, type,
10737 code == EQ_EXPR ? integer_one_node
10738 : integer_zero_node,
10743 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10744 Similarly for NE_EXPR. */
10745 if (TREE_CODE (arg0) == BIT_AND_EXPR
10746 && TREE_CODE (arg1) == INTEGER_CST
10747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10749 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10750 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10751 TREE_OPERAND (arg0, 1));
10753 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10754 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10756 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10757 if (integer_nonzerop (dandnotc))
10758 return omit_one_operand_loc (loc, type, rslt, arg0);
10761 /* If this is a comparison of a field, we may be able to simplify it. */
10762 if ((TREE_CODE (arg0) == COMPONENT_REF
10763 || TREE_CODE (arg0) == BIT_FIELD_REF)
10764 /* Handle the constant case even without -O
10765 to make sure the warnings are given. */
10766 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10768 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10773 /* Optimize comparisons of strlen vs zero to a compare of the
10774 first character of the string vs zero. To wit,
10775 strlen(ptr) == 0 => *ptr == 0
10776 strlen(ptr) != 0 => *ptr != 0
10777 Other cases should reduce to one of these two (or a constant)
10778 due to the return value of strlen being unsigned. */
10779 if (TREE_CODE (arg0) == CALL_EXPR
10780 && integer_zerop (arg1))
10782 tree fndecl = get_callee_fndecl (arg0);
10785 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10786 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10787 && call_expr_nargs (arg0) == 1
10788 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10790 tree iref = build_fold_indirect_ref_loc (loc,
10791 CALL_EXPR_ARG (arg0, 0));
10792 return fold_build2_loc (loc, code, type, iref,
10793 build_int_cst (TREE_TYPE (iref), 0));
10797 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10798 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10799 if (TREE_CODE (arg0) == RSHIFT_EXPR
10800 && integer_zerop (arg1)
10801 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10803 tree arg00 = TREE_OPERAND (arg0, 0);
10804 tree arg01 = TREE_OPERAND (arg0, 1);
10805 tree itype = TREE_TYPE (arg00);
10806 if (wi::eq_p (arg01, element_precision (itype) - 1))
10808 if (TYPE_UNSIGNED (itype))
10810 itype = signed_type_for (itype);
10811 arg00 = fold_convert_loc (loc, itype, arg00);
10813 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10814 type, arg00, build_zero_cst (itype));
10818 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10819 (X & C) == 0 when C is a single bit. */
10820 if (TREE_CODE (arg0) == BIT_AND_EXPR
10821 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10822 && integer_zerop (arg1)
10823 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10825 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10826 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10827 TREE_OPERAND (arg0, 1));
10828 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10830 fold_convert_loc (loc, TREE_TYPE (arg0),
10834 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10835 constant C is a power of two, i.e. a single bit. */
10836 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10837 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10838 && integer_zerop (arg1)
10839 && integer_pow2p (TREE_OPERAND (arg0, 1))
10840 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10841 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10843 tree arg00 = TREE_OPERAND (arg0, 0);
10844 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10845 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10848 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10849 when is C is a power of two, i.e. a single bit. */
10850 if (TREE_CODE (arg0) == BIT_AND_EXPR
10851 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10852 && integer_zerop (arg1)
10853 && integer_pow2p (TREE_OPERAND (arg0, 1))
10854 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10855 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10857 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10858 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10859 arg000, TREE_OPERAND (arg0, 1));
10860 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10861 tem, build_int_cst (TREE_TYPE (tem), 0));
10864 if (integer_zerop (arg1)
10865 && tree_expr_nonzero_p (arg0))
10867 tree res = constant_boolean_node (code==NE_EXPR, type);
10868 return omit_one_operand_loc (loc, type, res, arg0);
10871 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10872 if (TREE_CODE (arg0) == BIT_AND_EXPR
10873 && TREE_CODE (arg1) == BIT_AND_EXPR)
10875 tree arg00 = TREE_OPERAND (arg0, 0);
10876 tree arg01 = TREE_OPERAND (arg0, 1);
10877 tree arg10 = TREE_OPERAND (arg1, 0);
10878 tree arg11 = TREE_OPERAND (arg1, 1);
10879 tree itype = TREE_TYPE (arg0);
10881 if (operand_equal_p (arg01, arg11, 0))
10882 return fold_build2_loc (loc, code, type,
10883 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10884 fold_build2_loc (loc,
10885 BIT_XOR_EXPR, itype,
10888 build_zero_cst (itype));
10890 if (operand_equal_p (arg01, arg10, 0))
10891 return fold_build2_loc (loc, code, type,
10892 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10893 fold_build2_loc (loc,
10894 BIT_XOR_EXPR, itype,
10897 build_zero_cst (itype));
10899 if (operand_equal_p (arg00, arg11, 0))
10900 return fold_build2_loc (loc, code, type,
10901 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10902 fold_build2_loc (loc,
10903 BIT_XOR_EXPR, itype,
10906 build_zero_cst (itype));
10908 if (operand_equal_p (arg00, arg10, 0))
10909 return fold_build2_loc (loc, code, type,
10910 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10911 fold_build2_loc (loc,
10912 BIT_XOR_EXPR, itype,
10915 build_zero_cst (itype));
10918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10919 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10921 tree arg00 = TREE_OPERAND (arg0, 0);
10922 tree arg01 = TREE_OPERAND (arg0, 1);
10923 tree arg10 = TREE_OPERAND (arg1, 0);
10924 tree arg11 = TREE_OPERAND (arg1, 1);
10925 tree itype = TREE_TYPE (arg0);
10927 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10928 operand_equal_p guarantees no side-effects so we don't need
10929 to use omit_one_operand on Z. */
10930 if (operand_equal_p (arg01, arg11, 0))
10931 return fold_build2_loc (loc, code, type, arg00,
10932 fold_convert_loc (loc, TREE_TYPE (arg00),
10934 if (operand_equal_p (arg01, arg10, 0))
10935 return fold_build2_loc (loc, code, type, arg00,
10936 fold_convert_loc (loc, TREE_TYPE (arg00),
10938 if (operand_equal_p (arg00, arg11, 0))
10939 return fold_build2_loc (loc, code, type, arg01,
10940 fold_convert_loc (loc, TREE_TYPE (arg01),
10942 if (operand_equal_p (arg00, arg10, 0))
10943 return fold_build2_loc (loc, code, type, arg01,
10944 fold_convert_loc (loc, TREE_TYPE (arg01),
10947 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10948 if (TREE_CODE (arg01) == INTEGER_CST
10949 && TREE_CODE (arg11) == INTEGER_CST)
10951 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10952 fold_convert_loc (loc, itype, arg11));
10953 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10954 return fold_build2_loc (loc, code, type, tem,
10955 fold_convert_loc (loc, itype, arg10));
10959 /* Attempt to simplify equality/inequality comparisons of complex
10960 values. Only lower the comparison if the result is known or
10961 can be simplified to a single scalar comparison. */
10962 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10963 || TREE_CODE (arg0) == COMPLEX_CST)
10964 && (TREE_CODE (arg1) == COMPLEX_EXPR
10965 || TREE_CODE (arg1) == COMPLEX_CST))
10967 tree real0, imag0, real1, imag1;
10970 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10972 real0 = TREE_OPERAND (arg0, 0);
10973 imag0 = TREE_OPERAND (arg0, 1);
10977 real0 = TREE_REALPART (arg0);
10978 imag0 = TREE_IMAGPART (arg0);
10981 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10983 real1 = TREE_OPERAND (arg1, 0);
10984 imag1 = TREE_OPERAND (arg1, 1);
10988 real1 = TREE_REALPART (arg1);
10989 imag1 = TREE_IMAGPART (arg1);
10992 rcond = fold_binary_loc (loc, code, type, real0, real1);
10993 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10995 if (integer_zerop (rcond))
10997 if (code == EQ_EXPR)
10998 return omit_two_operands_loc (loc, type, boolean_false_node,
11000 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11004 if (code == NE_EXPR)
11005 return omit_two_operands_loc (loc, type, boolean_true_node,
11007 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11011 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11012 if (icond && TREE_CODE (icond) == INTEGER_CST)
11014 if (integer_zerop (icond))
11016 if (code == EQ_EXPR)
11017 return omit_two_operands_loc (loc, type, boolean_false_node,
11019 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11023 if (code == NE_EXPR)
11024 return omit_two_operands_loc (loc, type, boolean_true_node,
11026 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11037 tem = fold_comparison (loc, code, type, op0, op1);
11038 if (tem != NULL_TREE)
11041 /* Transform comparisons of the form X +- C CMP X. */
11042 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11044 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11045 && !HONOR_SNANS (arg0))
11046 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11047 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11049 tree arg01 = TREE_OPERAND (arg0, 1);
11050 enum tree_code code0 = TREE_CODE (arg0);
11053 if (TREE_CODE (arg01) == REAL_CST)
11054 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11056 is_positive = tree_int_cst_sgn (arg01);
11058 /* (X - c) > X becomes false. */
11059 if (code == GT_EXPR
11060 && ((code0 == MINUS_EXPR && is_positive >= 0)
11061 || (code0 == PLUS_EXPR && is_positive <= 0)))
11063 if (TREE_CODE (arg01) == INTEGER_CST
11064 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11065 fold_overflow_warning (("assuming signed overflow does not "
11066 "occur when assuming that (X - c) > X "
11067 "is always false"),
11068 WARN_STRICT_OVERFLOW_ALL);
11069 return constant_boolean_node (0, type);
11072 /* Likewise (X + c) < X becomes false. */
11073 if (code == LT_EXPR
11074 && ((code0 == PLUS_EXPR && is_positive >= 0)
11075 || (code0 == MINUS_EXPR && is_positive <= 0)))
11077 if (TREE_CODE (arg01) == INTEGER_CST
11078 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11079 fold_overflow_warning (("assuming signed overflow does not "
11080 "occur when assuming that "
11081 "(X + c) < X is always false"),
11082 WARN_STRICT_OVERFLOW_ALL);
11083 return constant_boolean_node (0, type);
11086 /* Convert (X - c) <= X to true. */
11087 if (!HONOR_NANS (arg1)
11089 && ((code0 == MINUS_EXPR && is_positive >= 0)
11090 || (code0 == PLUS_EXPR && is_positive <= 0)))
11092 if (TREE_CODE (arg01) == INTEGER_CST
11093 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11094 fold_overflow_warning (("assuming signed overflow does not "
11095 "occur when assuming that "
11096 "(X - c) <= X is always true"),
11097 WARN_STRICT_OVERFLOW_ALL);
11098 return constant_boolean_node (1, type);
11101 /* Convert (X + c) >= X to true. */
11102 if (!HONOR_NANS (arg1)
11104 && ((code0 == PLUS_EXPR && is_positive >= 0)
11105 || (code0 == MINUS_EXPR && is_positive <= 0)))
11107 if (TREE_CODE (arg01) == INTEGER_CST
11108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11109 fold_overflow_warning (("assuming signed overflow does not "
11110 "occur when assuming that "
11111 "(X + c) >= X is always true"),
11112 WARN_STRICT_OVERFLOW_ALL);
11113 return constant_boolean_node (1, type);
11116 if (TREE_CODE (arg01) == INTEGER_CST)
11118 /* Convert X + c > X and X - c < X to true for integers. */
11119 if (code == GT_EXPR
11120 && ((code0 == PLUS_EXPR && is_positive > 0)
11121 || (code0 == MINUS_EXPR && is_positive < 0)))
11123 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11124 fold_overflow_warning (("assuming signed overflow does "
11125 "not occur when assuming that "
11126 "(X + c) > X is always true"),
11127 WARN_STRICT_OVERFLOW_ALL);
11128 return constant_boolean_node (1, type);
11131 if (code == LT_EXPR
11132 && ((code0 == MINUS_EXPR && is_positive > 0)
11133 || (code0 == PLUS_EXPR && is_positive < 0)))
11135 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11136 fold_overflow_warning (("assuming signed overflow does "
11137 "not occur when assuming that "
11138 "(X - c) < X is always true"),
11139 WARN_STRICT_OVERFLOW_ALL);
11140 return constant_boolean_node (1, type);
11143 /* Convert X + c <= X and X - c >= X to false for integers. */
11144 if (code == LE_EXPR
11145 && ((code0 == PLUS_EXPR && is_positive > 0)
11146 || (code0 == MINUS_EXPR && is_positive < 0)))
11148 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11149 fold_overflow_warning (("assuming signed overflow does "
11150 "not occur when assuming that "
11151 "(X + c) <= X is always false"),
11152 WARN_STRICT_OVERFLOW_ALL);
11153 return constant_boolean_node (0, type);
11156 if (code == GE_EXPR
11157 && ((code0 == MINUS_EXPR && is_positive > 0)
11158 || (code0 == PLUS_EXPR && is_positive < 0)))
11160 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11161 fold_overflow_warning (("assuming signed overflow does "
11162 "not occur when assuming that "
11163 "(X - c) >= X is always false"),
11164 WARN_STRICT_OVERFLOW_ALL);
11165 return constant_boolean_node (0, type);
11170 /* If we are comparing an ABS_EXPR with a constant, we can
11171 convert all the cases into explicit comparisons, but they may
11172 well not be faster than doing the ABS and one comparison.
11173 But ABS (X) <= C is a range comparison, which becomes a subtraction
11174 and a comparison, and is probably faster. */
11175 if (code == LE_EXPR
11176 && TREE_CODE (arg1) == INTEGER_CST
11177 && TREE_CODE (arg0) == ABS_EXPR
11178 && ! TREE_SIDE_EFFECTS (arg0)
11179 && (0 != (tem = negate_expr (arg1)))
11180 && TREE_CODE (tem) == INTEGER_CST
11181 && !TREE_OVERFLOW (tem))
11182 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11183 build2 (GE_EXPR, type,
11184 TREE_OPERAND (arg0, 0), tem),
11185 build2 (LE_EXPR, type,
11186 TREE_OPERAND (arg0, 0), arg1));
11188 /* Convert ABS_EXPR<x> >= 0 to true. */
11189 strict_overflow_p = false;
11190 if (code == GE_EXPR
11191 && (integer_zerop (arg1)
11192 || (! HONOR_NANS (arg0)
11193 && real_zerop (arg1)))
11194 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11196 if (strict_overflow_p)
11197 fold_overflow_warning (("assuming signed overflow does not occur "
11198 "when simplifying comparison of "
11199 "absolute value and zero"),
11200 WARN_STRICT_OVERFLOW_CONDITIONAL);
11201 return omit_one_operand_loc (loc, type,
11202 constant_boolean_node (true, type),
11206 /* Convert ABS_EXPR<x> < 0 to false. */
11207 strict_overflow_p = false;
11208 if (code == LT_EXPR
11209 && (integer_zerop (arg1) || real_zerop (arg1))
11210 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11212 if (strict_overflow_p)
11213 fold_overflow_warning (("assuming signed overflow does not occur "
11214 "when simplifying comparison of "
11215 "absolute value and zero"),
11216 WARN_STRICT_OVERFLOW_CONDITIONAL);
11217 return omit_one_operand_loc (loc, type,
11218 constant_boolean_node (false, type),
11222 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11223 and similarly for >= into !=. */
11224 if ((code == LT_EXPR || code == GE_EXPR)
11225 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11226 && TREE_CODE (arg1) == LSHIFT_EXPR
11227 && integer_onep (TREE_OPERAND (arg1, 0)))
11228 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11229 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11230 TREE_OPERAND (arg1, 1)),
11231 build_zero_cst (TREE_TYPE (arg0)));
11233 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11234 otherwise Y might be >= # of bits in X's type and thus e.g.
11235 (unsigned char) (1 << Y) for Y 15 might be 0.
11236 If the cast is widening, then 1 << Y should have unsigned type,
11237 otherwise if Y is number of bits in the signed shift type minus 1,
11238 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11239 31 might be 0xffffffff80000000. */
11240 if ((code == LT_EXPR || code == GE_EXPR)
11241 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11242 && CONVERT_EXPR_P (arg1)
11243 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11244 && (element_precision (TREE_TYPE (arg1))
11245 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11246 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11247 || (element_precision (TREE_TYPE (arg1))
11248 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11249 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11251 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11252 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11253 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11254 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11255 build_zero_cst (TREE_TYPE (arg0)));
11260 case UNORDERED_EXPR:
11268 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11270 tree targ0 = strip_float_extensions (arg0);
11271 tree targ1 = strip_float_extensions (arg1);
11272 tree newtype = TREE_TYPE (targ0);
11274 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11275 newtype = TREE_TYPE (targ1);
11277 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11278 return fold_build2_loc (loc, code, type,
11279 fold_convert_loc (loc, newtype, targ0),
11280 fold_convert_loc (loc, newtype, targ1));
11285 case COMPOUND_EXPR:
11286 /* When pedantic, a compound expression can be neither an lvalue
11287 nor an integer constant expression. */
11288 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11290 /* Don't let (0, 0) be null pointer constant. */
11291 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11292 : fold_convert_loc (loc, type, arg1);
11293 return pedantic_non_lvalue_loc (loc, tem);
11296 /* An ASSERT_EXPR should never be passed to fold_binary. */
11297 gcc_unreachable ();
11301 } /* switch (code) */
11304 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11305 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11309 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11311 switch (TREE_CODE (*tp))
11317 *walk_subtrees = 0;
11319 /* ... fall through ... */
11326 /* Return whether the sub-tree ST contains a label which is accessible from
11327 outside the sub-tree. */
11330 contains_label_p (tree st)
11333 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11336 /* Fold a ternary expression of code CODE and type TYPE with operands
11337 OP0, OP1, and OP2. Return the folded expression if folding is
11338 successful. Otherwise, return NULL_TREE. */
11341 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11342 tree op0, tree op1, tree op2)
11345 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11346 enum tree_code_class kind = TREE_CODE_CLASS (code);
11348 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11349 && TREE_CODE_LENGTH (code) == 3);
11351 /* If this is a commutative operation, and OP0 is a constant, move it
11352 to OP1 to reduce the number of tests below. */
11353 if (commutative_ternary_tree_code (code)
11354 && tree_swap_operands_p (op0, op1, true))
11355 return fold_build3_loc (loc, code, type, op1, op0, op2);
11357 tem = generic_simplify (loc, code, type, op0, op1, op2);
11361 /* Strip any conversions that don't change the mode. This is safe
11362 for every expression, except for a comparison expression because
11363 its signedness is derived from its operands. So, in the latter
11364 case, only strip conversions that don't change the signedness.
11366 Note that this is done as an internal manipulation within the
11367 constant folder, in order to find the simplest representation of
11368 the arguments so that their form can be studied. In any cases,
11369 the appropriate type conversions should be put back in the tree
11370 that will get out of the constant folder. */
11391 case COMPONENT_REF:
11392 if (TREE_CODE (arg0) == CONSTRUCTOR
11393 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11395 unsigned HOST_WIDE_INT idx;
11397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11404 case VEC_COND_EXPR:
11405 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11406 so all simple results must be passed through pedantic_non_lvalue. */
11407 if (TREE_CODE (arg0) == INTEGER_CST)
11409 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11410 tem = integer_zerop (arg0) ? op2 : op1;
11411 /* Only optimize constant conditions when the selected branch
11412 has the same type as the COND_EXPR. This avoids optimizing
11413 away "c ? x : throw", where the throw has a void type.
11414 Avoid throwing away that operand which contains label. */
11415 if ((!TREE_SIDE_EFFECTS (unused_op)
11416 || !contains_label_p (unused_op))
11417 && (! VOID_TYPE_P (TREE_TYPE (tem))
11418 || VOID_TYPE_P (type)))
11419 return pedantic_non_lvalue_loc (loc, tem);
11422 else if (TREE_CODE (arg0) == VECTOR_CST)
11424 if ((TREE_CODE (arg1) == VECTOR_CST
11425 || TREE_CODE (arg1) == CONSTRUCTOR)
11426 && (TREE_CODE (arg2) == VECTOR_CST
11427 || TREE_CODE (arg2) == CONSTRUCTOR))
11429 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11430 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11431 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11432 for (i = 0; i < nelts; i++)
11434 tree val = VECTOR_CST_ELT (arg0, i);
11435 if (integer_all_onesp (val))
11437 else if (integer_zerop (val))
11438 sel[i] = nelts + i;
11439 else /* Currently unreachable. */
11442 tree t = fold_vec_perm (type, arg1, arg2, sel);
11443 if (t != NULL_TREE)
11448 /* If we have A op B ? A : C, we may be able to convert this to a
11449 simpler expression, depending on the operation and the values
11450 of B and C. Signed zeros prevent all of these transformations,
11451 for reasons given above each one.
11453 Also try swapping the arguments and inverting the conditional. */
11454 if (COMPARISON_CLASS_P (arg0)
11455 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11456 arg1, TREE_OPERAND (arg0, 1))
11457 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11459 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11464 if (COMPARISON_CLASS_P (arg0)
11465 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11467 TREE_OPERAND (arg0, 1))
11468 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11470 location_t loc0 = expr_location_or (arg0, loc);
11471 tem = fold_invert_truthvalue (loc0, arg0);
11472 if (tem && COMPARISON_CLASS_P (tem))
11474 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11480 /* If the second operand is simpler than the third, swap them
11481 since that produces better jump optimization results. */
11482 if (truth_value_p (TREE_CODE (arg0))
11483 && tree_swap_operands_p (op1, op2, false))
11485 location_t loc0 = expr_location_or (arg0, loc);
11486 /* See if this can be inverted. If it can't, possibly because
11487 it was a floating-point inequality comparison, don't do
11489 tem = fold_invert_truthvalue (loc0, arg0);
11491 return fold_build3_loc (loc, code, type, tem, op2, op1);
11494 /* Convert A ? 1 : 0 to simply A. */
11495 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11496 : (integer_onep (op1)
11497 && !VECTOR_TYPE_P (type)))
11498 && integer_zerop (op2)
11499 /* If we try to convert OP0 to our type, the
11500 call to fold will try to move the conversion inside
11501 a COND, which will recurse. In that case, the COND_EXPR
11502 is probably the best choice, so leave it alone. */
11503 && type == TREE_TYPE (arg0))
11504 return pedantic_non_lvalue_loc (loc, arg0);
11506 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11507 over COND_EXPR in cases such as floating point comparisons. */
11508 if (integer_zerop (op1)
11509 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11510 : (integer_onep (op2)
11511 && !VECTOR_TYPE_P (type)))
11512 && truth_value_p (TREE_CODE (arg0)))
11513 return pedantic_non_lvalue_loc (loc,
11514 fold_convert_loc (loc, type,
11515 invert_truthvalue_loc (loc,
11518 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11519 if (TREE_CODE (arg0) == LT_EXPR
11520 && integer_zerop (TREE_OPERAND (arg0, 1))
11521 && integer_zerop (op2)
11522 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11524 /* sign_bit_p looks through both zero and sign extensions,
11525 but for this optimization only sign extensions are
11527 tree tem2 = TREE_OPERAND (arg0, 0);
11528 while (tem != tem2)
11530 if (TREE_CODE (tem2) != NOP_EXPR
11531 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11536 tem2 = TREE_OPERAND (tem2, 0);
11538 /* sign_bit_p only checks ARG1 bits within A's precision.
11539 If <sign bit of A> has wider type than A, bits outside
11540 of A's precision in <sign bit of A> need to be checked.
11541 If they are all 0, this optimization needs to be done
11542 in unsigned A's type, if they are all 1 in signed A's type,
11543 otherwise this can't be done. */
11545 && TYPE_PRECISION (TREE_TYPE (tem))
11546 < TYPE_PRECISION (TREE_TYPE (arg1))
11547 && TYPE_PRECISION (TREE_TYPE (tem))
11548 < TYPE_PRECISION (type))
11550 int inner_width, outer_width;
11553 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11554 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11555 if (outer_width > TYPE_PRECISION (type))
11556 outer_width = TYPE_PRECISION (type);
11558 wide_int mask = wi::shifted_mask
11559 (inner_width, outer_width - inner_width, false,
11560 TYPE_PRECISION (TREE_TYPE (arg1)));
11562 wide_int common = mask & arg1;
11563 if (common == mask)
11565 tem_type = signed_type_for (TREE_TYPE (tem));
11566 tem = fold_convert_loc (loc, tem_type, tem);
11568 else if (common == 0)
11570 tem_type = unsigned_type_for (TREE_TYPE (tem));
11571 tem = fold_convert_loc (loc, tem_type, tem);
11579 fold_convert_loc (loc, type,
11580 fold_build2_loc (loc, BIT_AND_EXPR,
11581 TREE_TYPE (tem), tem,
11582 fold_convert_loc (loc,
11587 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11588 already handled above. */
11589 if (TREE_CODE (arg0) == BIT_AND_EXPR
11590 && integer_onep (TREE_OPERAND (arg0, 1))
11591 && integer_zerop (op2)
11592 && integer_pow2p (arg1))
11594 tree tem = TREE_OPERAND (arg0, 0);
11596 if (TREE_CODE (tem) == RSHIFT_EXPR
11597 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11598 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11599 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11600 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11601 TREE_OPERAND (tem, 0), arg1);
11604 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11605 is probably obsolete because the first operand should be a
11606 truth value (that's why we have the two cases above), but let's
11607 leave it in until we can confirm this for all front-ends. */
11608 if (integer_zerop (op2)
11609 && TREE_CODE (arg0) == NE_EXPR
11610 && integer_zerop (TREE_OPERAND (arg0, 1))
11611 && integer_pow2p (arg1)
11612 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11613 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11614 arg1, OEP_ONLY_CONST))
11615 return pedantic_non_lvalue_loc (loc,
11616 fold_convert_loc (loc, type,
11617 TREE_OPERAND (arg0, 0)));
11619 /* Disable the transformations below for vectors, since
11620 fold_binary_op_with_conditional_arg may undo them immediately,
11621 yielding an infinite loop. */
11622 if (code == VEC_COND_EXPR)
11625 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11626 if (integer_zerop (op2)
11627 && truth_value_p (TREE_CODE (arg0))
11628 && truth_value_p (TREE_CODE (arg1))
11629 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11630 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11631 : TRUTH_ANDIF_EXPR,
11632 type, fold_convert_loc (loc, type, arg0), arg1);
11634 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11635 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11636 && truth_value_p (TREE_CODE (arg0))
11637 && truth_value_p (TREE_CODE (arg1))
11638 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11640 location_t loc0 = expr_location_or (arg0, loc);
11641 /* Only perform transformation if ARG0 is easily inverted. */
11642 tem = fold_invert_truthvalue (loc0, arg0);
11644 return fold_build2_loc (loc, code == VEC_COND_EXPR
11647 type, fold_convert_loc (loc, type, tem),
11651 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11652 if (integer_zerop (arg1)
11653 && truth_value_p (TREE_CODE (arg0))
11654 && truth_value_p (TREE_CODE (op2))
11655 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11657 location_t loc0 = expr_location_or (arg0, loc);
11658 /* Only perform transformation if ARG0 is easily inverted. */
11659 tem = fold_invert_truthvalue (loc0, arg0);
11661 return fold_build2_loc (loc, code == VEC_COND_EXPR
11662 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11663 type, fold_convert_loc (loc, type, tem),
11667 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11668 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11669 && truth_value_p (TREE_CODE (arg0))
11670 && truth_value_p (TREE_CODE (op2))
11671 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11672 return fold_build2_loc (loc, code == VEC_COND_EXPR
11673 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11674 type, fold_convert_loc (loc, type, arg0), op2);
11679 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11680 of fold_ternary on them. */
11681 gcc_unreachable ();
11683 case BIT_FIELD_REF:
11684 if ((TREE_CODE (arg0) == VECTOR_CST
11685 || (TREE_CODE (arg0) == CONSTRUCTOR
11686 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11687 && (type == TREE_TYPE (TREE_TYPE (arg0))
11688 || (TREE_CODE (type) == VECTOR_TYPE
11689 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11691 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11692 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11693 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11694 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11697 && (idx % width) == 0
11698 && (n % width) == 0
11699 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11704 if (TREE_CODE (arg0) == VECTOR_CST)
11707 return VECTOR_CST_ELT (arg0, idx);
11709 tree *vals = XALLOCAVEC (tree, n);
11710 for (unsigned i = 0; i < n; ++i)
11711 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11712 return build_vector (type, vals);
11715 /* Constructor elements can be subvectors. */
11716 unsigned HOST_WIDE_INT k = 1;
11717 if (CONSTRUCTOR_NELTS (arg0) != 0)
11719 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11720 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11721 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11724 /* We keep an exact subset of the constructor elements. */
11725 if ((idx % k) == 0 && (n % k) == 0)
11727 if (CONSTRUCTOR_NELTS (arg0) == 0)
11728 return build_constructor (type, NULL);
11733 if (idx < CONSTRUCTOR_NELTS (arg0))
11734 return CONSTRUCTOR_ELT (arg0, idx)->value;
11735 return build_zero_cst (type);
11738 vec<constructor_elt, va_gc> *vals;
11739 vec_alloc (vals, n);
11740 for (unsigned i = 0;
11741 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11743 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11745 (arg0, idx + i)->value);
11746 return build_constructor (type, vals);
11748 /* The bitfield references a single constructor element. */
11749 else if (idx + n <= (idx / k + 1) * k)
11751 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11752 return build_zero_cst (type);
11754 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11756 return fold_build3_loc (loc, code, type,
11757 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11758 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11763 /* A bit-field-ref that referenced the full argument can be stripped. */
11764 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11765 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11766 && integer_zerop (op2))
11767 return fold_convert_loc (loc, type, arg0);
11769 /* On constants we can use native encode/interpret to constant
11770 fold (nearly) all BIT_FIELD_REFs. */
11771 if (CONSTANT_CLASS_P (arg0)
11772 && can_native_interpret_type_p (type)
11773 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11774 /* This limitation should not be necessary, we just need to
11775 round this up to mode size. */
11776 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11777 /* Need bit-shifting of the buffer to relax the following. */
11778 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11780 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11781 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11782 unsigned HOST_WIDE_INT clen;
11783 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11784 /* ??? We cannot tell native_encode_expr to start at
11785 some random byte only. So limit us to a reasonable amount
11789 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11790 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11792 && len * BITS_PER_UNIT >= bitpos + bitsize)
11794 tree v = native_interpret_expr (type,
11795 b + bitpos / BITS_PER_UNIT,
11796 bitsize / BITS_PER_UNIT);
11806 /* For integers we can decompose the FMA if possible. */
11807 if (TREE_CODE (arg0) == INTEGER_CST
11808 && TREE_CODE (arg1) == INTEGER_CST)
11809 return fold_build2_loc (loc, PLUS_EXPR, type,
11810 const_binop (MULT_EXPR, arg0, arg1), arg2);
11811 if (integer_zerop (arg2))
11812 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11814 return fold_fma (loc, type, arg0, arg1, arg2);
11816 case VEC_PERM_EXPR:
11817 if (TREE_CODE (arg2) == VECTOR_CST)
11819 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11820 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11821 unsigned char *sel2 = sel + nelts;
11822 bool need_mask_canon = false;
11823 bool need_mask_canon2 = false;
11824 bool all_in_vec0 = true;
11825 bool all_in_vec1 = true;
11826 bool maybe_identity = true;
11827 bool single_arg = (op0 == op1);
11828 bool changed = false;
11830 mask2 = 2 * nelts - 1;
11831 mask = single_arg ? (nelts - 1) : mask2;
11832 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11833 for (i = 0; i < nelts; i++)
11835 tree val = VECTOR_CST_ELT (arg2, i);
11836 if (TREE_CODE (val) != INTEGER_CST)
11839 /* Make sure that the perm value is in an acceptable
11842 need_mask_canon |= wi::gtu_p (t, mask);
11843 need_mask_canon2 |= wi::gtu_p (t, mask2);
11844 sel[i] = t.to_uhwi () & mask;
11845 sel2[i] = t.to_uhwi () & mask2;
11847 if (sel[i] < nelts)
11848 all_in_vec1 = false;
11850 all_in_vec0 = false;
11852 if ((sel[i] & (nelts-1)) != i)
11853 maybe_identity = false;
11856 if (maybe_identity)
11866 else if (all_in_vec1)
11869 for (i = 0; i < nelts; i++)
11871 need_mask_canon = true;
11874 if ((TREE_CODE (op0) == VECTOR_CST
11875 || TREE_CODE (op0) == CONSTRUCTOR)
11876 && (TREE_CODE (op1) == VECTOR_CST
11877 || TREE_CODE (op1) == CONSTRUCTOR))
11879 tree t = fold_vec_perm (type, op0, op1, sel);
11880 if (t != NULL_TREE)
11884 if (op0 == op1 && !single_arg)
11887 /* Some targets are deficient and fail to expand a single
11888 argument permutation while still allowing an equivalent
11889 2-argument version. */
11890 if (need_mask_canon && arg2 == op2
11891 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11892 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11894 need_mask_canon = need_mask_canon2;
11898 if (need_mask_canon && arg2 == op2)
11900 tree *tsel = XALLOCAVEC (tree, nelts);
11901 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11902 for (i = 0; i < nelts; i++)
11903 tsel[i] = build_int_cst (eltype, sel[i]);
11904 op2 = build_vector (TREE_TYPE (arg2), tsel);
11909 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11915 } /* switch (code) */
11918 /* Perform constant folding and related simplification of EXPR.
11919 The related simplifications include x*1 => x, x*0 => 0, etc.,
11920 and application of the associative law.
11921 NOP_EXPR conversions may be removed freely (as long as we
11922 are careful not to change the type of the overall expression).
11923 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11924 but we can constant-fold them if they have constant operands. */
11926 #ifdef ENABLE_FOLD_CHECKING
11927 # define fold(x) fold_1 (x)
11928 static tree fold_1 (tree);
11934 const tree t = expr;
11935 enum tree_code code = TREE_CODE (t);
11936 enum tree_code_class kind = TREE_CODE_CLASS (code);
11938 location_t loc = EXPR_LOCATION (expr);
11940 /* Return right away if a constant. */
11941 if (kind == tcc_constant)
11944 /* CALL_EXPR-like objects with variable numbers of operands are
11945 treated specially. */
11946 if (kind == tcc_vl_exp)
11948 if (code == CALL_EXPR)
11950 tem = fold_call_expr (loc, expr, false);
11951 return tem ? tem : expr;
11956 if (IS_EXPR_CODE_CLASS (kind))
11958 tree type = TREE_TYPE (t);
11959 tree op0, op1, op2;
11961 switch (TREE_CODE_LENGTH (code))
11964 op0 = TREE_OPERAND (t, 0);
11965 tem = fold_unary_loc (loc, code, type, op0);
11966 return tem ? tem : expr;
11968 op0 = TREE_OPERAND (t, 0);
11969 op1 = TREE_OPERAND (t, 1);
11970 tem = fold_binary_loc (loc, code, type, op0, op1);
11971 return tem ? tem : expr;
11973 op0 = TREE_OPERAND (t, 0);
11974 op1 = TREE_OPERAND (t, 1);
11975 op2 = TREE_OPERAND (t, 2);
11976 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11977 return tem ? tem : expr;
11987 tree op0 = TREE_OPERAND (t, 0);
11988 tree op1 = TREE_OPERAND (t, 1);
11990 if (TREE_CODE (op1) == INTEGER_CST
11991 && TREE_CODE (op0) == CONSTRUCTOR
11992 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11994 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
11995 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
11996 unsigned HOST_WIDE_INT begin = 0;
11998 /* Find a matching index by means of a binary search. */
11999 while (begin != end)
12001 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12002 tree index = (*elts)[middle].index;
12004 if (TREE_CODE (index) == INTEGER_CST
12005 && tree_int_cst_lt (index, op1))
12006 begin = middle + 1;
12007 else if (TREE_CODE (index) == INTEGER_CST
12008 && tree_int_cst_lt (op1, index))
12010 else if (TREE_CODE (index) == RANGE_EXPR
12011 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12012 begin = middle + 1;
12013 else if (TREE_CODE (index) == RANGE_EXPR
12014 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12017 return (*elts)[middle].value;
12024 /* Return a VECTOR_CST if possible. */
12027 tree type = TREE_TYPE (t);
12028 if (TREE_CODE (type) != VECTOR_TYPE)
12031 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12032 unsigned HOST_WIDE_INT idx, pos = 0;
12035 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12037 if (!CONSTANT_CLASS_P (value))
12039 if (TREE_CODE (value) == VECTOR_CST)
12041 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12042 vec[pos++] = VECTOR_CST_ELT (value, i);
12045 vec[pos++] = value;
12047 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12048 vec[pos] = build_zero_cst (TREE_TYPE (type));
12050 return build_vector (type, vec);
12054 return fold (DECL_INITIAL (t));
12058 } /* switch (code) */
12061 #ifdef ENABLE_FOLD_CHECKING
12064 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12065 hash_table<nofree_ptr_hash<const tree_node> > *);
12066 static void fold_check_failed (const_tree, const_tree);
12067 void print_fold_checksum (const_tree);
12069 /* When --enable-checking=fold, compute a digest of expr before
12070 and after actual fold call to see if fold did not accidentally
12071 change original expr. */
12077 struct md5_ctx ctx;
12078 unsigned char checksum_before[16], checksum_after[16];
12079 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12081 md5_init_ctx (&ctx);
12082 fold_checksum_tree (expr, &ctx, &ht);
12083 md5_finish_ctx (&ctx, checksum_before);
12086 ret = fold_1 (expr);
12088 md5_init_ctx (&ctx);
12089 fold_checksum_tree (expr, &ctx, &ht);
12090 md5_finish_ctx (&ctx, checksum_after);
12092 if (memcmp (checksum_before, checksum_after, 16))
12093 fold_check_failed (expr, ret);
12099 print_fold_checksum (const_tree expr)
12101 struct md5_ctx ctx;
12102 unsigned char checksum[16], cnt;
12103 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12105 md5_init_ctx (&ctx);
12106 fold_checksum_tree (expr, &ctx, &ht);
12107 md5_finish_ctx (&ctx, checksum);
12108 for (cnt = 0; cnt < 16; ++cnt)
12109 fprintf (stderr, "%02x", checksum[cnt]);
12110 putc ('\n', stderr);
12114 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12116 internal_error ("fold check: original tree changed by fold");
12120 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12121 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12123 const tree_node **slot;
12124 enum tree_code code;
12125 union tree_node buf;
12131 slot = ht->find_slot (expr, INSERT);
12135 code = TREE_CODE (expr);
12136 if (TREE_CODE_CLASS (code) == tcc_declaration
12137 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12139 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12140 memcpy ((char *) &buf, expr, tree_size (expr));
12141 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12142 buf.decl_with_vis.symtab_node = NULL;
12143 expr = (tree) &buf;
12145 else if (TREE_CODE_CLASS (code) == tcc_type
12146 && (TYPE_POINTER_TO (expr)
12147 || TYPE_REFERENCE_TO (expr)
12148 || TYPE_CACHED_VALUES_P (expr)
12149 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12150 || TYPE_NEXT_VARIANT (expr)))
12152 /* Allow these fields to be modified. */
12154 memcpy ((char *) &buf, expr, tree_size (expr));
12155 expr = tmp = (tree) &buf;
12156 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12157 TYPE_POINTER_TO (tmp) = NULL;
12158 TYPE_REFERENCE_TO (tmp) = NULL;
12159 TYPE_NEXT_VARIANT (tmp) = NULL;
12160 if (TYPE_CACHED_VALUES_P (tmp))
12162 TYPE_CACHED_VALUES_P (tmp) = 0;
12163 TYPE_CACHED_VALUES (tmp) = NULL;
12166 md5_process_bytes (expr, tree_size (expr), ctx);
12167 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12168 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12169 if (TREE_CODE_CLASS (code) != tcc_type
12170 && TREE_CODE_CLASS (code) != tcc_declaration
12171 && code != TREE_LIST
12172 && code != SSA_NAME
12173 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12174 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12175 switch (TREE_CODE_CLASS (code))
12181 md5_process_bytes (TREE_STRING_POINTER (expr),
12182 TREE_STRING_LENGTH (expr), ctx);
12185 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12186 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12189 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12190 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12196 case tcc_exceptional:
12200 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12201 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12202 expr = TREE_CHAIN (expr);
12203 goto recursive_label;
12206 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12207 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12213 case tcc_expression:
12214 case tcc_reference:
12215 case tcc_comparison:
12218 case tcc_statement:
12220 len = TREE_OPERAND_LENGTH (expr);
12221 for (i = 0; i < len; ++i)
12222 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12224 case tcc_declaration:
12225 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12226 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12227 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12229 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12230 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12231 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12232 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12233 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12236 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12238 if (TREE_CODE (expr) == FUNCTION_DECL)
12240 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12241 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12243 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12247 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12248 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12249 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12250 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12251 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12252 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12253 if (INTEGRAL_TYPE_P (expr)
12254 || SCALAR_FLOAT_TYPE_P (expr))
12256 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12257 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12259 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12260 if (TREE_CODE (expr) == RECORD_TYPE
12261 || TREE_CODE (expr) == UNION_TYPE
12262 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12263 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12264 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12271 /* Helper function for outputting the checksum of a tree T. When
12272 debugging with gdb, you can "define mynext" to be "next" followed
12273 by "call debug_fold_checksum (op0)", then just trace down till the
12276 DEBUG_FUNCTION void
12277 debug_fold_checksum (const_tree t)
12280 unsigned char checksum[16];
12281 struct md5_ctx ctx;
12282 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12284 md5_init_ctx (&ctx);
12285 fold_checksum_tree (t, &ctx, &ht);
12286 md5_finish_ctx (&ctx, checksum);
12289 for (i = 0; i < 16; i++)
12290 fprintf (stderr, "%d ", checksum[i]);
12292 fprintf (stderr, "\n");
12297 /* Fold a unary tree expression with code CODE of type TYPE with an
12298 operand OP0. LOC is the location of the resulting expression.
12299 Return a folded expression if successful. Otherwise, return a tree
12300 expression with code CODE of type TYPE with an operand OP0. */
12303 fold_build1_stat_loc (location_t loc,
12304 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12307 #ifdef ENABLE_FOLD_CHECKING
12308 unsigned char checksum_before[16], checksum_after[16];
12309 struct md5_ctx ctx;
12310 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12312 md5_init_ctx (&ctx);
12313 fold_checksum_tree (op0, &ctx, &ht);
12314 md5_finish_ctx (&ctx, checksum_before);
12318 tem = fold_unary_loc (loc, code, type, op0);
12320 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12322 #ifdef ENABLE_FOLD_CHECKING
12323 md5_init_ctx (&ctx);
12324 fold_checksum_tree (op0, &ctx, &ht);
12325 md5_finish_ctx (&ctx, checksum_after);
12327 if (memcmp (checksum_before, checksum_after, 16))
12328 fold_check_failed (op0, tem);
12333 /* Fold a binary tree expression with code CODE of type TYPE with
12334 operands OP0 and OP1. LOC is the location of the resulting
12335 expression. Return a folded expression if successful. Otherwise,
12336 return a tree expression with code CODE of type TYPE with operands
12340 fold_build2_stat_loc (location_t loc,
12341 enum tree_code code, tree type, tree op0, tree op1
12345 #ifdef ENABLE_FOLD_CHECKING
12346 unsigned char checksum_before_op0[16],
12347 checksum_before_op1[16],
12348 checksum_after_op0[16],
12349 checksum_after_op1[16];
12350 struct md5_ctx ctx;
12351 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12353 md5_init_ctx (&ctx);
12354 fold_checksum_tree (op0, &ctx, &ht);
12355 md5_finish_ctx (&ctx, checksum_before_op0);
12358 md5_init_ctx (&ctx);
12359 fold_checksum_tree (op1, &ctx, &ht);
12360 md5_finish_ctx (&ctx, checksum_before_op1);
12364 tem = fold_binary_loc (loc, code, type, op0, op1);
12366 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12368 #ifdef ENABLE_FOLD_CHECKING
12369 md5_init_ctx (&ctx);
12370 fold_checksum_tree (op0, &ctx, &ht);
12371 md5_finish_ctx (&ctx, checksum_after_op0);
12374 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12375 fold_check_failed (op0, tem);
12377 md5_init_ctx (&ctx);
12378 fold_checksum_tree (op1, &ctx, &ht);
12379 md5_finish_ctx (&ctx, checksum_after_op1);
12381 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12382 fold_check_failed (op1, tem);
12387 /* Fold a ternary tree expression with code CODE of type TYPE with
12388 operands OP0, OP1, and OP2. Return a folded expression if
12389 successful. Otherwise, return a tree expression with code CODE of
12390 type TYPE with operands OP0, OP1, and OP2. */
12393 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12394 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12397 #ifdef ENABLE_FOLD_CHECKING
12398 unsigned char checksum_before_op0[16],
12399 checksum_before_op1[16],
12400 checksum_before_op2[16],
12401 checksum_after_op0[16],
12402 checksum_after_op1[16],
12403 checksum_after_op2[16];
12404 struct md5_ctx ctx;
12405 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12407 md5_init_ctx (&ctx);
12408 fold_checksum_tree (op0, &ctx, &ht);
12409 md5_finish_ctx (&ctx, checksum_before_op0);
12412 md5_init_ctx (&ctx);
12413 fold_checksum_tree (op1, &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_before_op1);
12417 md5_init_ctx (&ctx);
12418 fold_checksum_tree (op2, &ctx, &ht);
12419 md5_finish_ctx (&ctx, checksum_before_op2);
12423 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12424 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12426 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12428 #ifdef ENABLE_FOLD_CHECKING
12429 md5_init_ctx (&ctx);
12430 fold_checksum_tree (op0, &ctx, &ht);
12431 md5_finish_ctx (&ctx, checksum_after_op0);
12434 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12435 fold_check_failed (op0, tem);
12437 md5_init_ctx (&ctx);
12438 fold_checksum_tree (op1, &ctx, &ht);
12439 md5_finish_ctx (&ctx, checksum_after_op1);
12442 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12443 fold_check_failed (op1, tem);
12445 md5_init_ctx (&ctx);
12446 fold_checksum_tree (op2, &ctx, &ht);
12447 md5_finish_ctx (&ctx, checksum_after_op2);
12449 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12450 fold_check_failed (op2, tem);
12455 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12456 arguments in ARGARRAY, and a null static chain.
12457 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12458 of type TYPE from the given operands as constructed by build_call_array. */
12461 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12462 int nargs, tree *argarray)
12465 #ifdef ENABLE_FOLD_CHECKING
12466 unsigned char checksum_before_fn[16],
12467 checksum_before_arglist[16],
12468 checksum_after_fn[16],
12469 checksum_after_arglist[16];
12470 struct md5_ctx ctx;
12471 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12474 md5_init_ctx (&ctx);
12475 fold_checksum_tree (fn, &ctx, &ht);
12476 md5_finish_ctx (&ctx, checksum_before_fn);
12479 md5_init_ctx (&ctx);
12480 for (i = 0; i < nargs; i++)
12481 fold_checksum_tree (argarray[i], &ctx, &ht);
12482 md5_finish_ctx (&ctx, checksum_before_arglist);
12486 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12488 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12490 #ifdef ENABLE_FOLD_CHECKING
12491 md5_init_ctx (&ctx);
12492 fold_checksum_tree (fn, &ctx, &ht);
12493 md5_finish_ctx (&ctx, checksum_after_fn);
12496 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12497 fold_check_failed (fn, tem);
12499 md5_init_ctx (&ctx);
12500 for (i = 0; i < nargs; i++)
12501 fold_checksum_tree (argarray[i], &ctx, &ht);
12502 md5_finish_ctx (&ctx, checksum_after_arglist);
12504 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12505 fold_check_failed (NULL_TREE, tem);
12510 /* Perform constant folding and related simplification of initializer
12511 expression EXPR. These behave identically to "fold_buildN" but ignore
12512 potential run-time traps and exceptions that fold must preserve. */
12514 #define START_FOLD_INIT \
12515 int saved_signaling_nans = flag_signaling_nans;\
12516 int saved_trapping_math = flag_trapping_math;\
12517 int saved_rounding_math = flag_rounding_math;\
12518 int saved_trapv = flag_trapv;\
12519 int saved_folding_initializer = folding_initializer;\
12520 flag_signaling_nans = 0;\
12521 flag_trapping_math = 0;\
12522 flag_rounding_math = 0;\
12524 folding_initializer = 1;
12526 #define END_FOLD_INIT \
12527 flag_signaling_nans = saved_signaling_nans;\
12528 flag_trapping_math = saved_trapping_math;\
12529 flag_rounding_math = saved_rounding_math;\
12530 flag_trapv = saved_trapv;\
12531 folding_initializer = saved_folding_initializer;
12534 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12535 tree type, tree op)
12540 result = fold_build1_loc (loc, code, type, op);
12547 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12548 tree type, tree op0, tree op1)
12553 result = fold_build2_loc (loc, code, type, op0, op1);
12560 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12561 int nargs, tree *argarray)
12566 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12572 #undef START_FOLD_INIT
12573 #undef END_FOLD_INIT
12575 /* Determine if first argument is a multiple of second argument. Return 0 if
12576 it is not, or we cannot easily determined it to be.
12578 An example of the sort of thing we care about (at this point; this routine
12579 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12580 fold cases do now) is discovering that
12582 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12588 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12590 This code also handles discovering that
12592 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12594 is a multiple of 8 so we don't have to worry about dealing with a
12595 possible remainder.
12597 Note that we *look* inside a SAVE_EXPR only to determine how it was
12598 calculated; it is not safe for fold to do much of anything else with the
12599 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12600 at run time. For example, the latter example above *cannot* be implemented
12601 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12602 evaluation time of the original SAVE_EXPR is not necessarily the same at
12603 the time the new expression is evaluated. The only optimization of this
12604 sort that would be valid is changing
12606 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12610 SAVE_EXPR (I) * SAVE_EXPR (J)
12612 (where the same SAVE_EXPR (J) is used in the original and the
12613 transformed version). */
12616 multiple_of_p (tree type, const_tree top, const_tree bottom)
12618 if (operand_equal_p (top, bottom, 0))
12621 if (TREE_CODE (type) != INTEGER_TYPE)
12624 switch (TREE_CODE (top))
12627 /* Bitwise and provides a power of two multiple. If the mask is
12628 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12629 if (!integer_pow2p (bottom))
12634 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12635 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12639 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12640 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12643 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12647 op1 = TREE_OPERAND (top, 1);
12648 /* const_binop may not detect overflow correctly,
12649 so check for it explicitly here. */
12650 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12651 && 0 != (t1 = fold_convert (type,
12652 const_binop (LSHIFT_EXPR,
12655 && !TREE_OVERFLOW (t1))
12656 return multiple_of_p (type, t1, bottom);
12661 /* Can't handle conversions from non-integral or wider integral type. */
12662 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12663 || (TYPE_PRECISION (type)
12664 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12667 /* .. fall through ... */
12670 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12673 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12674 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12677 if (TREE_CODE (bottom) != INTEGER_CST
12678 || integer_zerop (bottom)
12679 || (TYPE_UNSIGNED (type)
12680 && (tree_int_cst_sgn (top) < 0
12681 || tree_int_cst_sgn (bottom) < 0)))
12683 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12691 #define tree_expr_nonnegative_warnv_p(X, Y) \
12692 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12694 #define RECURSE(X) \
12695 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12697 /* Return true if CODE or TYPE is known to be non-negative. */
12700 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12702 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12703 && truth_value_p (code))
12704 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12705 have a signed:1 type (where the value is -1 and 0). */
12710 /* Return true if (CODE OP0) is known to be non-negative. If the return
12711 value is based on the assumption that signed overflow is undefined,
12712 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12713 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12716 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12717 bool *strict_overflow_p, int depth)
12719 if (TYPE_UNSIGNED (type))
12725 /* We can't return 1 if flag_wrapv is set because
12726 ABS_EXPR<INT_MIN> = INT_MIN. */
12727 if (!ANY_INTEGRAL_TYPE_P (type))
12729 if (TYPE_OVERFLOW_UNDEFINED (type))
12731 *strict_overflow_p = true;
12736 case NON_LVALUE_EXPR:
12738 case FIX_TRUNC_EXPR:
12739 return RECURSE (op0);
12743 tree inner_type = TREE_TYPE (op0);
12744 tree outer_type = type;
12746 if (TREE_CODE (outer_type) == REAL_TYPE)
12748 if (TREE_CODE (inner_type) == REAL_TYPE)
12749 return RECURSE (op0);
12750 if (INTEGRAL_TYPE_P (inner_type))
12752 if (TYPE_UNSIGNED (inner_type))
12754 return RECURSE (op0);
12757 else if (INTEGRAL_TYPE_P (outer_type))
12759 if (TREE_CODE (inner_type) == REAL_TYPE)
12760 return RECURSE (op0);
12761 if (INTEGRAL_TYPE_P (inner_type))
12762 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12763 && TYPE_UNSIGNED (inner_type);
12769 return tree_simple_nonnegative_warnv_p (code, type);
12772 /* We don't know sign of `t', so be conservative and return false. */
12776 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12777 value is based on the assumption that signed overflow is undefined,
12778 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12779 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12782 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12783 tree op1, bool *strict_overflow_p,
12786 if (TYPE_UNSIGNED (type))
12791 case POINTER_PLUS_EXPR:
12793 if (FLOAT_TYPE_P (type))
12794 return RECURSE (op0) && RECURSE (op1);
12796 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12797 both unsigned and at least 2 bits shorter than the result. */
12798 if (TREE_CODE (type) == INTEGER_TYPE
12799 && TREE_CODE (op0) == NOP_EXPR
12800 && TREE_CODE (op1) == NOP_EXPR)
12802 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12803 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12804 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12805 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12807 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12808 TYPE_PRECISION (inner2)) + 1;
12809 return prec < TYPE_PRECISION (type);
12815 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12817 /* x * x is always non-negative for floating point x
12818 or without overflow. */
12819 if (operand_equal_p (op0, op1, 0)
12820 || (RECURSE (op0) && RECURSE (op1)))
12822 if (ANY_INTEGRAL_TYPE_P (type)
12823 && TYPE_OVERFLOW_UNDEFINED (type))
12824 *strict_overflow_p = true;
12829 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12830 both unsigned and their total bits is shorter than the result. */
12831 if (TREE_CODE (type) == INTEGER_TYPE
12832 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12833 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12835 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12836 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12838 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12839 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12842 bool unsigned0 = TYPE_UNSIGNED (inner0);
12843 bool unsigned1 = TYPE_UNSIGNED (inner1);
12845 if (TREE_CODE (op0) == INTEGER_CST)
12846 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12848 if (TREE_CODE (op1) == INTEGER_CST)
12849 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12851 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12852 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12854 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12855 ? tree_int_cst_min_precision (op0, UNSIGNED)
12856 : TYPE_PRECISION (inner0);
12858 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12859 ? tree_int_cst_min_precision (op1, UNSIGNED)
12860 : TYPE_PRECISION (inner1);
12862 return precision0 + precision1 < TYPE_PRECISION (type);
12869 return RECURSE (op0) || RECURSE (op1);
12875 case TRUNC_DIV_EXPR:
12876 case CEIL_DIV_EXPR:
12877 case FLOOR_DIV_EXPR:
12878 case ROUND_DIV_EXPR:
12879 return RECURSE (op0) && RECURSE (op1);
12881 case TRUNC_MOD_EXPR:
12882 return RECURSE (op0);
12884 case FLOOR_MOD_EXPR:
12885 return RECURSE (op1);
12887 case CEIL_MOD_EXPR:
12888 case ROUND_MOD_EXPR:
12890 return tree_simple_nonnegative_warnv_p (code, type);
12893 /* We don't know sign of `t', so be conservative and return false. */
12897 /* Return true if T is known to be non-negative. If the return
12898 value is based on the assumption that signed overflow is undefined,
12899 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12900 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12903 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12905 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12908 switch (TREE_CODE (t))
12911 return tree_int_cst_sgn (t) >= 0;
12914 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12917 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12920 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12923 /* Limit the depth of recursion to avoid quadratic behavior.
12924 This is expected to catch almost all occurrences in practice.
12925 If this code misses important cases that unbounded recursion
12926 would not, passes that need this information could be revised
12927 to provide it through dataflow propagation. */
12928 return (!name_registered_for_update_p (t)
12929 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12930 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12931 strict_overflow_p, depth));
12934 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12938 /* Return true if T is known to be non-negative. If the return
12939 value is based on the assumption that signed overflow is undefined,
12940 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12941 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12944 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12945 bool *strict_overflow_p, int depth)
12947 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12948 switch (DECL_FUNCTION_CODE (fndecl))
12950 CASE_FLT_FN (BUILT_IN_ACOS):
12951 CASE_FLT_FN (BUILT_IN_ACOSH):
12952 CASE_FLT_FN (BUILT_IN_CABS):
12953 CASE_FLT_FN (BUILT_IN_COSH):
12954 CASE_FLT_FN (BUILT_IN_ERFC):
12955 CASE_FLT_FN (BUILT_IN_EXP):
12956 CASE_FLT_FN (BUILT_IN_EXP10):
12957 CASE_FLT_FN (BUILT_IN_EXP2):
12958 CASE_FLT_FN (BUILT_IN_FABS):
12959 CASE_FLT_FN (BUILT_IN_FDIM):
12960 CASE_FLT_FN (BUILT_IN_HYPOT):
12961 CASE_FLT_FN (BUILT_IN_POW10):
12962 CASE_INT_FN (BUILT_IN_FFS):
12963 CASE_INT_FN (BUILT_IN_PARITY):
12964 CASE_INT_FN (BUILT_IN_POPCOUNT):
12965 CASE_INT_FN (BUILT_IN_CLZ):
12966 CASE_INT_FN (BUILT_IN_CLRSB):
12967 case BUILT_IN_BSWAP32:
12968 case BUILT_IN_BSWAP64:
12972 CASE_FLT_FN (BUILT_IN_SQRT):
12973 /* sqrt(-0.0) is -0.0. */
12974 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12976 return RECURSE (arg0);
12978 CASE_FLT_FN (BUILT_IN_ASINH):
12979 CASE_FLT_FN (BUILT_IN_ATAN):
12980 CASE_FLT_FN (BUILT_IN_ATANH):
12981 CASE_FLT_FN (BUILT_IN_CBRT):
12982 CASE_FLT_FN (BUILT_IN_CEIL):
12983 CASE_FLT_FN (BUILT_IN_ERF):
12984 CASE_FLT_FN (BUILT_IN_EXPM1):
12985 CASE_FLT_FN (BUILT_IN_FLOOR):
12986 CASE_FLT_FN (BUILT_IN_FMOD):
12987 CASE_FLT_FN (BUILT_IN_FREXP):
12988 CASE_FLT_FN (BUILT_IN_ICEIL):
12989 CASE_FLT_FN (BUILT_IN_IFLOOR):
12990 CASE_FLT_FN (BUILT_IN_IRINT):
12991 CASE_FLT_FN (BUILT_IN_IROUND):
12992 CASE_FLT_FN (BUILT_IN_LCEIL):
12993 CASE_FLT_FN (BUILT_IN_LDEXP):
12994 CASE_FLT_FN (BUILT_IN_LFLOOR):
12995 CASE_FLT_FN (BUILT_IN_LLCEIL):
12996 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12997 CASE_FLT_FN (BUILT_IN_LLRINT):
12998 CASE_FLT_FN (BUILT_IN_LLROUND):
12999 CASE_FLT_FN (BUILT_IN_LRINT):
13000 CASE_FLT_FN (BUILT_IN_LROUND):
13001 CASE_FLT_FN (BUILT_IN_MODF):
13002 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13003 CASE_FLT_FN (BUILT_IN_RINT):
13004 CASE_FLT_FN (BUILT_IN_ROUND):
13005 CASE_FLT_FN (BUILT_IN_SCALB):
13006 CASE_FLT_FN (BUILT_IN_SCALBLN):
13007 CASE_FLT_FN (BUILT_IN_SCALBN):
13008 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13009 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13010 CASE_FLT_FN (BUILT_IN_SINH):
13011 CASE_FLT_FN (BUILT_IN_TANH):
13012 CASE_FLT_FN (BUILT_IN_TRUNC):
13013 /* True if the 1st argument is nonnegative. */
13014 return RECURSE (arg0);
13016 CASE_FLT_FN (BUILT_IN_FMAX):
13017 /* True if the 1st OR 2nd arguments are nonnegative. */
13018 return RECURSE (arg0) || RECURSE (arg1);
13020 CASE_FLT_FN (BUILT_IN_FMIN):
13021 /* True if the 1st AND 2nd arguments are nonnegative. */
13022 return RECURSE (arg0) && RECURSE (arg1);
13024 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13025 /* True if the 2nd argument is nonnegative. */
13026 return RECURSE (arg1);
13028 CASE_FLT_FN (BUILT_IN_POWI):
13029 /* True if the 1st argument is nonnegative or the second
13030 argument is an even integer. */
13031 if (TREE_CODE (arg1) == INTEGER_CST
13032 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13034 return RECURSE (arg0);
13036 CASE_FLT_FN (BUILT_IN_POW):
13037 /* True if the 1st argument is nonnegative or the second
13038 argument is an even integer valued real. */
13039 if (TREE_CODE (arg1) == REAL_CST)
13044 c = TREE_REAL_CST (arg1);
13045 n = real_to_integer (&c);
13048 REAL_VALUE_TYPE cint;
13049 real_from_integer (&cint, VOIDmode, n, SIGNED);
13050 if (real_identical (&c, &cint))
13054 return RECURSE (arg0);
13059 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13062 /* Return true if T is known to be non-negative. If the return
13063 value is based on the assumption that signed overflow is undefined,
13064 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13065 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13068 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13070 enum tree_code code = TREE_CODE (t);
13071 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13078 tree temp = TARGET_EXPR_SLOT (t);
13079 t = TARGET_EXPR_INITIAL (t);
13081 /* If the initializer is non-void, then it's a normal expression
13082 that will be assigned to the slot. */
13083 if (!VOID_TYPE_P (t))
13084 return RECURSE (t);
13086 /* Otherwise, the initializer sets the slot in some way. One common
13087 way is an assignment statement at the end of the initializer. */
13090 if (TREE_CODE (t) == BIND_EXPR)
13091 t = expr_last (BIND_EXPR_BODY (t));
13092 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13093 || TREE_CODE (t) == TRY_CATCH_EXPR)
13094 t = expr_last (TREE_OPERAND (t, 0));
13095 else if (TREE_CODE (t) == STATEMENT_LIST)
13100 if (TREE_CODE (t) == MODIFY_EXPR
13101 && TREE_OPERAND (t, 0) == temp)
13102 return RECURSE (TREE_OPERAND (t, 1));
13109 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13110 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13112 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13113 get_callee_fndecl (t),
13116 strict_overflow_p, depth);
13118 case COMPOUND_EXPR:
13120 return RECURSE (TREE_OPERAND (t, 1));
13123 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13126 return RECURSE (TREE_OPERAND (t, 0));
13129 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13134 #undef tree_expr_nonnegative_warnv_p
13136 /* Return true if T is known to be non-negative. If the return
13137 value is based on the assumption that signed overflow is undefined,
13138 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13139 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13142 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13144 enum tree_code code;
13145 if (t == error_mark_node)
13148 code = TREE_CODE (t);
13149 switch (TREE_CODE_CLASS (code))
13152 case tcc_comparison:
13153 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13155 TREE_OPERAND (t, 0),
13156 TREE_OPERAND (t, 1),
13157 strict_overflow_p, depth);
13160 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13162 TREE_OPERAND (t, 0),
13163 strict_overflow_p, depth);
13166 case tcc_declaration:
13167 case tcc_reference:
13168 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13176 case TRUTH_AND_EXPR:
13177 case TRUTH_OR_EXPR:
13178 case TRUTH_XOR_EXPR:
13179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13181 TREE_OPERAND (t, 0),
13182 TREE_OPERAND (t, 1),
13183 strict_overflow_p, depth);
13184 case TRUTH_NOT_EXPR:
13185 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13187 TREE_OPERAND (t, 0),
13188 strict_overflow_p, depth);
13195 case WITH_SIZE_EXPR:
13197 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13200 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13204 /* Return true if `t' is known to be non-negative. Handle warnings
13205 about undefined signed overflow. */
13208 tree_expr_nonnegative_p (tree t)
13210 bool ret, strict_overflow_p;
13212 strict_overflow_p = false;
13213 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13214 if (strict_overflow_p)
13215 fold_overflow_warning (("assuming signed overflow does not occur when "
13216 "determining that expression is always "
13218 WARN_STRICT_OVERFLOW_MISC);
13223 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13224 For floating point we further ensure that T is not denormal.
13225 Similar logic is present in nonzero_address in rtlanal.h.
13227 If the return value is based on the assumption that signed overflow
13228 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13229 change *STRICT_OVERFLOW_P. */
13232 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13233 bool *strict_overflow_p)
13238 return tree_expr_nonzero_warnv_p (op0,
13239 strict_overflow_p);
13243 tree inner_type = TREE_TYPE (op0);
13244 tree outer_type = type;
13246 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13247 && tree_expr_nonzero_warnv_p (op0,
13248 strict_overflow_p));
13252 case NON_LVALUE_EXPR:
13253 return tree_expr_nonzero_warnv_p (op0,
13254 strict_overflow_p);
13263 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13264 For floating point we further ensure that T is not denormal.
13265 Similar logic is present in nonzero_address in rtlanal.h.
13267 If the return value is based on the assumption that signed overflow
13268 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13269 change *STRICT_OVERFLOW_P. */
13272 tree_binary_nonzero_warnv_p (enum tree_code code,
13275 tree op1, bool *strict_overflow_p)
13277 bool sub_strict_overflow_p;
13280 case POINTER_PLUS_EXPR:
13282 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13284 /* With the presence of negative values it is hard
13285 to say something. */
13286 sub_strict_overflow_p = false;
13287 if (!tree_expr_nonnegative_warnv_p (op0,
13288 &sub_strict_overflow_p)
13289 || !tree_expr_nonnegative_warnv_p (op1,
13290 &sub_strict_overflow_p))
13292 /* One of operands must be positive and the other non-negative. */
13293 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13294 overflows, on a twos-complement machine the sum of two
13295 nonnegative numbers can never be zero. */
13296 return (tree_expr_nonzero_warnv_p (op0,
13298 || tree_expr_nonzero_warnv_p (op1,
13299 strict_overflow_p));
13304 if (TYPE_OVERFLOW_UNDEFINED (type))
13306 if (tree_expr_nonzero_warnv_p (op0,
13308 && tree_expr_nonzero_warnv_p (op1,
13309 strict_overflow_p))
13311 *strict_overflow_p = true;
13318 sub_strict_overflow_p = false;
13319 if (tree_expr_nonzero_warnv_p (op0,
13320 &sub_strict_overflow_p)
13321 && tree_expr_nonzero_warnv_p (op1,
13322 &sub_strict_overflow_p))
13324 if (sub_strict_overflow_p)
13325 *strict_overflow_p = true;
13330 sub_strict_overflow_p = false;
13331 if (tree_expr_nonzero_warnv_p (op0,
13332 &sub_strict_overflow_p))
13334 if (sub_strict_overflow_p)
13335 *strict_overflow_p = true;
13337 /* When both operands are nonzero, then MAX must be too. */
13338 if (tree_expr_nonzero_warnv_p (op1,
13339 strict_overflow_p))
13342 /* MAX where operand 0 is positive is positive. */
13343 return tree_expr_nonnegative_warnv_p (op0,
13344 strict_overflow_p);
13346 /* MAX where operand 1 is positive is positive. */
13347 else if (tree_expr_nonzero_warnv_p (op1,
13348 &sub_strict_overflow_p)
13349 && tree_expr_nonnegative_warnv_p (op1,
13350 &sub_strict_overflow_p))
13352 if (sub_strict_overflow_p)
13353 *strict_overflow_p = true;
13359 return (tree_expr_nonzero_warnv_p (op1,
13361 || tree_expr_nonzero_warnv_p (op0,
13362 strict_overflow_p));
13371 /* Return true when T is an address and is known to be nonzero.
13372 For floating point we further ensure that T is not denormal.
13373 Similar logic is present in nonzero_address in rtlanal.h.
13375 If the return value is based on the assumption that signed overflow
13376 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13377 change *STRICT_OVERFLOW_P. */
13380 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13382 bool sub_strict_overflow_p;
13383 switch (TREE_CODE (t))
13386 return !integer_zerop (t);
13390 tree base = TREE_OPERAND (t, 0);
13392 if (!DECL_P (base))
13393 base = get_base_address (base);
13398 /* For objects in symbol table check if we know they are non-zero.
13399 Don't do anything for variables and functions before symtab is built;
13400 it is quite possible that they will be declared weak later. */
13401 if (DECL_P (base) && decl_in_symtab_p (base))
13403 struct symtab_node *symbol;
13405 symbol = symtab_node::get_create (base);
13407 return symbol->nonzero_address ();
13412 /* Function local objects are never NULL. */
13414 && (DECL_CONTEXT (base)
13415 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13416 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13419 /* Constants are never weak. */
13420 if (CONSTANT_CLASS_P (base))
13427 sub_strict_overflow_p = false;
13428 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13429 &sub_strict_overflow_p)
13430 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13431 &sub_strict_overflow_p))
13433 if (sub_strict_overflow_p)
13434 *strict_overflow_p = true;
13445 #define integer_valued_real_p(X) \
13446 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13448 #define RECURSE(X) \
13449 ((integer_valued_real_p) (X, depth + 1))
13451 /* Return true if the floating point result of (CODE OP0) has an
13452 integer value. We also allow +Inf, -Inf and NaN to be considered
13455 DEPTH is the current nesting depth of the query. */
13458 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13466 return RECURSE (op0);
13470 tree type = TREE_TYPE (op0);
13471 if (TREE_CODE (type) == INTEGER_TYPE)
13473 if (TREE_CODE (type) == REAL_TYPE)
13474 return RECURSE (op0);
13484 /* Return true if the floating point result of (CODE OP0 OP1) has an
13485 integer value. We also allow +Inf, -Inf and NaN to be considered
13488 DEPTH is the current nesting depth of the query. */
13491 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13500 return RECURSE (op0) && RECURSE (op1);
13508 /* Return true if the floating point result of calling FNDECL with arguments
13509 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13510 considered integer values. If FNDECL takes fewer than 2 arguments,
13511 the remaining ARGn are null.
13513 DEPTH is the current nesting depth of the query. */
13516 integer_valued_real_call_p (tree fndecl, tree arg0, tree arg1, int depth)
13518 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13519 switch (DECL_FUNCTION_CODE (fndecl))
13521 CASE_FLT_FN (BUILT_IN_CEIL):
13522 CASE_FLT_FN (BUILT_IN_FLOOR):
13523 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13524 CASE_FLT_FN (BUILT_IN_RINT):
13525 CASE_FLT_FN (BUILT_IN_ROUND):
13526 CASE_FLT_FN (BUILT_IN_TRUNC):
13529 CASE_FLT_FN (BUILT_IN_FMIN):
13530 CASE_FLT_FN (BUILT_IN_FMAX):
13531 return RECURSE (arg0) && RECURSE (arg1);
13539 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13540 has an integer value. We also allow +Inf, -Inf and NaN to be
13541 considered integer values.
13543 DEPTH is the current nesting depth of the query. */
13546 integer_valued_real_single_p (tree t, int depth)
13548 switch (TREE_CODE (t))
13551 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13554 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13557 /* Limit the depth of recursion to avoid quadratic behavior.
13558 This is expected to catch almost all occurrences in practice.
13559 If this code misses important cases that unbounded recursion
13560 would not, passes that need this information could be revised
13561 to provide it through dataflow propagation. */
13562 return (!name_registered_for_update_p (t)
13563 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13564 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13573 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13574 has an integer value. We also allow +Inf, -Inf and NaN to be
13575 considered integer values.
13577 DEPTH is the current nesting depth of the query. */
13580 integer_valued_real_invalid_p (tree t, int depth)
13582 switch (TREE_CODE (t))
13584 case COMPOUND_EXPR:
13587 return RECURSE (TREE_OPERAND (t, 1));
13590 return RECURSE (TREE_OPERAND (t, 0));
13599 #undef integer_valued_real_p
13601 /* Return true if the floating point expression T has an integer value.
13602 We also allow +Inf, -Inf and NaN to be considered integer values.
13604 DEPTH is the current nesting depth of the query. */
13607 integer_valued_real_p (tree t, int depth)
13609 if (t == error_mark_node)
13612 tree_code code = TREE_CODE (t);
13613 switch (TREE_CODE_CLASS (code))
13616 case tcc_comparison:
13617 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13618 TREE_OPERAND (t, 1), depth);
13621 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13624 case tcc_declaration:
13625 case tcc_reference:
13626 return integer_valued_real_single_p (t, depth);
13636 return integer_valued_real_single_p (t, depth);
13640 tree arg0 = (call_expr_nargs (t) > 0
13641 ? CALL_EXPR_ARG (t, 0)
13643 tree arg1 = (call_expr_nargs (t) > 1
13644 ? CALL_EXPR_ARG (t, 1)
13646 return integer_valued_real_call_p (get_callee_fndecl (t),
13647 arg0, arg1, depth);
13651 return integer_valued_real_invalid_p (t, depth);
13655 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13656 attempt to fold the expression to a constant without modifying TYPE,
13659 If the expression could be simplified to a constant, then return
13660 the constant. If the expression would not be simplified to a
13661 constant, then return NULL_TREE. */
13664 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13666 tree tem = fold_binary (code, type, op0, op1);
13667 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13670 /* Given the components of a unary expression CODE, TYPE and OP0,
13671 attempt to fold the expression to a constant without modifying
13674 If the expression could be simplified to a constant, then return
13675 the constant. If the expression would not be simplified to a
13676 constant, then return NULL_TREE. */
13679 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13681 tree tem = fold_unary (code, type, op0);
13682 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13685 /* If EXP represents referencing an element in a constant string
13686 (either via pointer arithmetic or array indexing), return the
13687 tree representing the value accessed, otherwise return NULL. */
13690 fold_read_from_constant_string (tree exp)
13692 if ((TREE_CODE (exp) == INDIRECT_REF
13693 || TREE_CODE (exp) == ARRAY_REF)
13694 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13696 tree exp1 = TREE_OPERAND (exp, 0);
13699 location_t loc = EXPR_LOCATION (exp);
13701 if (TREE_CODE (exp) == INDIRECT_REF)
13702 string = string_constant (exp1, &index);
13705 tree low_bound = array_ref_low_bound (exp);
13706 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13708 /* Optimize the special-case of a zero lower bound.
13710 We convert the low_bound to sizetype to avoid some problems
13711 with constant folding. (E.g. suppose the lower bound is 1,
13712 and its mode is QI. Without the conversion,l (ARRAY
13713 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13714 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13715 if (! integer_zerop (low_bound))
13716 index = size_diffop_loc (loc, index,
13717 fold_convert_loc (loc, sizetype, low_bound));
13723 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13724 && TREE_CODE (string) == STRING_CST
13725 && TREE_CODE (index) == INTEGER_CST
13726 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13727 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13729 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13730 return build_int_cst_type (TREE_TYPE (exp),
13731 (TREE_STRING_POINTER (string)
13732 [TREE_INT_CST_LOW (index)]));
13737 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13738 an integer constant, real, or fixed-point constant.
13740 TYPE is the type of the result. */
13743 fold_negate_const (tree arg0, tree type)
13745 tree t = NULL_TREE;
13747 switch (TREE_CODE (arg0))
13752 wide_int val = wi::neg (arg0, &overflow);
13753 t = force_fit_type (type, val, 1,
13754 (overflow | TREE_OVERFLOW (arg0))
13755 && !TYPE_UNSIGNED (type));
13760 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13765 FIXED_VALUE_TYPE f;
13766 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13767 &(TREE_FIXED_CST (arg0)), NULL,
13768 TYPE_SATURATING (type));
13769 t = build_fixed (type, f);
13770 /* Propagate overflow flags. */
13771 if (overflow_p | TREE_OVERFLOW (arg0))
13772 TREE_OVERFLOW (t) = 1;
13777 gcc_unreachable ();
13783 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13784 an integer constant or real constant.
13786 TYPE is the type of the result. */
13789 fold_abs_const (tree arg0, tree type)
13791 tree t = NULL_TREE;
13793 switch (TREE_CODE (arg0))
13797 /* If the value is unsigned or non-negative, then the absolute value
13798 is the same as the ordinary value. */
13799 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13802 /* If the value is negative, then the absolute value is
13807 wide_int val = wi::neg (arg0, &overflow);
13808 t = force_fit_type (type, val, -1,
13809 overflow | TREE_OVERFLOW (arg0));
13815 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13816 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13822 gcc_unreachable ();
13828 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13829 constant. TYPE is the type of the result. */
13832 fold_not_const (const_tree arg0, tree type)
13834 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13836 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13839 /* Given CODE, a relational operator, the target type, TYPE and two
13840 constant operands OP0 and OP1, return the result of the
13841 relational operation. If the result is not a compile time
13842 constant, then return NULL_TREE. */
13845 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13847 int result, invert;
13849 /* From here on, the only cases we handle are when the result is
13850 known to be a constant. */
13852 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13854 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13855 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13857 /* Handle the cases where either operand is a NaN. */
13858 if (real_isnan (c0) || real_isnan (c1))
13868 case UNORDERED_EXPR:
13882 if (flag_trapping_math)
13888 gcc_unreachable ();
13891 return constant_boolean_node (result, type);
13894 return constant_boolean_node (real_compare (code, c0, c1), type);
13897 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13899 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13900 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13901 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13904 /* Handle equality/inequality of complex constants. */
13905 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13907 tree rcond = fold_relational_const (code, type,
13908 TREE_REALPART (op0),
13909 TREE_REALPART (op1));
13910 tree icond = fold_relational_const (code, type,
13911 TREE_IMAGPART (op0),
13912 TREE_IMAGPART (op1));
13913 if (code == EQ_EXPR)
13914 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13915 else if (code == NE_EXPR)
13916 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13921 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13923 unsigned count = VECTOR_CST_NELTS (op0);
13924 tree *elts = XALLOCAVEC (tree, count);
13925 gcc_assert (VECTOR_CST_NELTS (op1) == count
13926 && TYPE_VECTOR_SUBPARTS (type) == count);
13928 for (unsigned i = 0; i < count; i++)
13930 tree elem_type = TREE_TYPE (type);
13931 tree elem0 = VECTOR_CST_ELT (op0, i);
13932 tree elem1 = VECTOR_CST_ELT (op1, i);
13934 tree tem = fold_relational_const (code, elem_type,
13937 if (tem == NULL_TREE)
13940 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13943 return build_vector (type, elts);
13946 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13948 To compute GT, swap the arguments and do LT.
13949 To compute GE, do LT and invert the result.
13950 To compute LE, swap the arguments, do LT and invert the result.
13951 To compute NE, do EQ and invert the result.
13953 Therefore, the code below must handle only EQ and LT. */
13955 if (code == LE_EXPR || code == GT_EXPR)
13957 std::swap (op0, op1);
13958 code = swap_tree_comparison (code);
13961 /* Note that it is safe to invert for real values here because we
13962 have already handled the one case that it matters. */
13965 if (code == NE_EXPR || code == GE_EXPR)
13968 code = invert_tree_comparison (code, false);
13971 /* Compute a result for LT or EQ if args permit;
13972 Otherwise return T. */
13973 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13975 if (code == EQ_EXPR)
13976 result = tree_int_cst_equal (op0, op1);
13978 result = tree_int_cst_lt (op0, op1);
13985 return constant_boolean_node (result, type);
13988 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13989 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13993 fold_build_cleanup_point_expr (tree type, tree expr)
13995 /* If the expression does not have side effects then we don't have to wrap
13996 it with a cleanup point expression. */
13997 if (!TREE_SIDE_EFFECTS (expr))
14000 /* If the expression is a return, check to see if the expression inside the
14001 return has no side effects or the right hand side of the modify expression
14002 inside the return. If either don't have side effects set we don't need to
14003 wrap the expression in a cleanup point expression. Note we don't check the
14004 left hand side of the modify because it should always be a return decl. */
14005 if (TREE_CODE (expr) == RETURN_EXPR)
14007 tree op = TREE_OPERAND (expr, 0);
14008 if (!op || !TREE_SIDE_EFFECTS (op))
14010 op = TREE_OPERAND (op, 1);
14011 if (!TREE_SIDE_EFFECTS (op))
14015 return build1 (CLEANUP_POINT_EXPR, type, expr);
14018 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14019 of an indirection through OP0, or NULL_TREE if no simplification is
14023 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14029 subtype = TREE_TYPE (sub);
14030 if (!POINTER_TYPE_P (subtype))
14033 if (TREE_CODE (sub) == ADDR_EXPR)
14035 tree op = TREE_OPERAND (sub, 0);
14036 tree optype = TREE_TYPE (op);
14037 /* *&CONST_DECL -> to the value of the const decl. */
14038 if (TREE_CODE (op) == CONST_DECL)
14039 return DECL_INITIAL (op);
14040 /* *&p => p; make sure to handle *&"str"[cst] here. */
14041 if (type == optype)
14043 tree fop = fold_read_from_constant_string (op);
14049 /* *(foo *)&fooarray => fooarray[0] */
14050 else if (TREE_CODE (optype) == ARRAY_TYPE
14051 && type == TREE_TYPE (optype)
14052 && (!in_gimple_form
14053 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14055 tree type_domain = TYPE_DOMAIN (optype);
14056 tree min_val = size_zero_node;
14057 if (type_domain && TYPE_MIN_VALUE (type_domain))
14058 min_val = TYPE_MIN_VALUE (type_domain);
14060 && TREE_CODE (min_val) != INTEGER_CST)
14062 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14063 NULL_TREE, NULL_TREE);
14065 /* *(foo *)&complexfoo => __real__ complexfoo */
14066 else if (TREE_CODE (optype) == COMPLEX_TYPE
14067 && type == TREE_TYPE (optype))
14068 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14069 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14070 else if (TREE_CODE (optype) == VECTOR_TYPE
14071 && type == TREE_TYPE (optype))
14073 tree part_width = TYPE_SIZE (type);
14074 tree index = bitsize_int (0);
14075 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14079 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14080 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14082 tree op00 = TREE_OPERAND (sub, 0);
14083 tree op01 = TREE_OPERAND (sub, 1);
14086 if (TREE_CODE (op00) == ADDR_EXPR)
14089 op00 = TREE_OPERAND (op00, 0);
14090 op00type = TREE_TYPE (op00);
14092 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14093 if (TREE_CODE (op00type) == VECTOR_TYPE
14094 && type == TREE_TYPE (op00type))
14096 HOST_WIDE_INT offset = tree_to_shwi (op01);
14097 tree part_width = TYPE_SIZE (type);
14098 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14099 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14100 tree index = bitsize_int (indexi);
14102 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14103 return fold_build3_loc (loc,
14104 BIT_FIELD_REF, type, op00,
14105 part_width, index);
14108 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14109 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14110 && type == TREE_TYPE (op00type))
14112 tree size = TYPE_SIZE_UNIT (type);
14113 if (tree_int_cst_equal (size, op01))
14114 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14116 /* ((foo *)&fooarray)[1] => fooarray[1] */
14117 else if (TREE_CODE (op00type) == ARRAY_TYPE
14118 && type == TREE_TYPE (op00type))
14120 tree type_domain = TYPE_DOMAIN (op00type);
14121 tree min_val = size_zero_node;
14122 if (type_domain && TYPE_MIN_VALUE (type_domain))
14123 min_val = TYPE_MIN_VALUE (type_domain);
14124 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14125 TYPE_SIZE_UNIT (type));
14126 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14127 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14128 NULL_TREE, NULL_TREE);
14133 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14134 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14135 && type == TREE_TYPE (TREE_TYPE (subtype))
14136 && (!in_gimple_form
14137 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14140 tree min_val = size_zero_node;
14141 sub = build_fold_indirect_ref_loc (loc, sub);
14142 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14143 if (type_domain && TYPE_MIN_VALUE (type_domain))
14144 min_val = TYPE_MIN_VALUE (type_domain);
14146 && TREE_CODE (min_val) != INTEGER_CST)
14148 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14155 /* Builds an expression for an indirection through T, simplifying some
14159 build_fold_indirect_ref_loc (location_t loc, tree t)
14161 tree type = TREE_TYPE (TREE_TYPE (t));
14162 tree sub = fold_indirect_ref_1 (loc, type, t);
14167 return build1_loc (loc, INDIRECT_REF, type, t);
14170 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14173 fold_indirect_ref_loc (location_t loc, tree t)
14175 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14183 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14184 whose result is ignored. The type of the returned tree need not be
14185 the same as the original expression. */
14188 fold_ignored_result (tree t)
14190 if (!TREE_SIDE_EFFECTS (t))
14191 return integer_zero_node;
14194 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14197 t = TREE_OPERAND (t, 0);
14201 case tcc_comparison:
14202 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14203 t = TREE_OPERAND (t, 0);
14204 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14205 t = TREE_OPERAND (t, 1);
14210 case tcc_expression:
14211 switch (TREE_CODE (t))
14213 case COMPOUND_EXPR:
14214 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14216 t = TREE_OPERAND (t, 0);
14220 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14221 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14223 t = TREE_OPERAND (t, 0);
14236 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14239 round_up_loc (location_t loc, tree value, unsigned int divisor)
14241 tree div = NULL_TREE;
14246 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14247 have to do anything. Only do this when we are not given a const,
14248 because in that case, this check is more expensive than just
14250 if (TREE_CODE (value) != INTEGER_CST)
14252 div = build_int_cst (TREE_TYPE (value), divisor);
14254 if (multiple_of_p (TREE_TYPE (value), value, div))
14258 /* If divisor is a power of two, simplify this to bit manipulation. */
14259 if (divisor == (divisor & -divisor))
14261 if (TREE_CODE (value) == INTEGER_CST)
14263 wide_int val = value;
14266 if ((val & (divisor - 1)) == 0)
14269 overflow_p = TREE_OVERFLOW (value);
14270 val += divisor - 1;
14271 val &= - (int) divisor;
14275 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14281 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14282 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14283 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14284 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14290 div = build_int_cst (TREE_TYPE (value), divisor);
14291 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14292 value = size_binop_loc (loc, MULT_EXPR, value, div);
14298 /* Likewise, but round down. */
14301 round_down_loc (location_t loc, tree value, int divisor)
14303 tree div = NULL_TREE;
14305 gcc_assert (divisor > 0);
14309 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14310 have to do anything. Only do this when we are not given a const,
14311 because in that case, this check is more expensive than just
14313 if (TREE_CODE (value) != INTEGER_CST)
14315 div = build_int_cst (TREE_TYPE (value), divisor);
14317 if (multiple_of_p (TREE_TYPE (value), value, div))
14321 /* If divisor is a power of two, simplify this to bit manipulation. */
14322 if (divisor == (divisor & -divisor))
14326 t = build_int_cst (TREE_TYPE (value), -divisor);
14327 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14332 div = build_int_cst (TREE_TYPE (value), divisor);
14333 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14334 value = size_binop_loc (loc, MULT_EXPR, value, div);
14340 /* Returns the pointer to the base of the object addressed by EXP and
14341 extracts the information about the offset of the access, storing it
14342 to PBITPOS and POFFSET. */
14345 split_address_to_core_and_offset (tree exp,
14346 HOST_WIDE_INT *pbitpos, tree *poffset)
14350 int unsignedp, volatilep;
14351 HOST_WIDE_INT bitsize;
14352 location_t loc = EXPR_LOCATION (exp);
14354 if (TREE_CODE (exp) == ADDR_EXPR)
14356 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14357 poffset, &mode, &unsignedp, &volatilep,
14359 core = build_fold_addr_expr_loc (loc, core);
14365 *poffset = NULL_TREE;
14371 /* Returns true if addresses of E1 and E2 differ by a constant, false
14372 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14375 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14378 HOST_WIDE_INT bitpos1, bitpos2;
14379 tree toffset1, toffset2, tdiff, type;
14381 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14382 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14384 if (bitpos1 % BITS_PER_UNIT != 0
14385 || bitpos2 % BITS_PER_UNIT != 0
14386 || !operand_equal_p (core1, core2, 0))
14389 if (toffset1 && toffset2)
14391 type = TREE_TYPE (toffset1);
14392 if (type != TREE_TYPE (toffset2))
14393 toffset2 = fold_convert (type, toffset2);
14395 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14396 if (!cst_and_fits_in_hwi (tdiff))
14399 *diff = int_cst_value (tdiff);
14401 else if (toffset1 || toffset2)
14403 /* If only one of the offsets is non-constant, the difference cannot
14410 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14414 /* Return OFF converted to a pointer offset type suitable as offset for
14415 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14417 convert_to_ptrofftype_loc (location_t loc, tree off)
14419 return fold_convert_loc (loc, sizetype, off);
14422 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14424 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14426 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14427 ptr, convert_to_ptrofftype_loc (loc, off));
14430 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14432 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14434 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14435 ptr, size_int (off));