1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
82 #include "tree-ssa-operands.h"
83 #include "tree-into-ssa.h"
85 #ifndef LOAD_EXTEND_OP
86 #define LOAD_EXTEND_OP(M) UNKNOWN
89 /* Nonzero if we are folding constants inside an initializer; zero
91 int folding_initializer = 0;
93 /* The following constants represent a bit based encoding of GCC's
94 comparison operators. This encoding simplifies transformations
95 on relational comparison operators, such as AND and OR. */
96 enum comparison_code {
115 static bool negate_mathfn_p (enum built_in_function);
116 static bool negate_expr_p (tree);
117 static tree negate_expr (tree);
118 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
119 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
120 static enum comparison_code comparison_to_compcode (enum tree_code);
121 static enum tree_code compcode_to_comparison (enum comparison_code);
122 static int operand_equal_for_comparison_p (tree, tree, tree);
123 static int twoval_comparison_p (tree, tree *, tree *, int *);
124 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
133 static int simple_operand_p (const_tree);
134 static bool simple_operand_p_2 (tree);
135 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
136 static tree range_predecessor (tree);
137 static tree range_successor (tree);
138 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
139 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
144 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
145 static tree fold_binary_op_with_conditional_arg (location_t,
146 enum tree_code, tree,
149 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
150 static bool reorder_operands_p (const_tree, const_tree);
151 static tree fold_negate_const (tree, tree);
152 static tree fold_not_const (const_tree, tree);
153 static tree fold_relational_const (enum tree_code, tree, tree, tree);
154 static tree fold_convert_const (enum tree_code, tree, tree);
155 static tree fold_view_convert_expr (tree, tree);
156 static bool vec_cst_ctor_to_array (tree, tree *);
159 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
160 Otherwise, return LOC. */
163 expr_location_or (tree t, location_t loc)
165 location_t tloc = EXPR_LOCATION (t);
166 return tloc == UNKNOWN_LOCATION ? loc : tloc;
169 /* Similar to protected_set_expr_location, but never modify x in place,
170 if location can and needs to be set, unshare it. */
173 protected_set_expr_location_unshare (tree x, location_t loc)
175 if (CAN_HAVE_LOCATION_P (x)
176 && EXPR_LOCATION (x) != loc
177 && !(TREE_CODE (x) == SAVE_EXPR
178 || TREE_CODE (x) == TARGET_EXPR
179 || TREE_CODE (x) == BIND_EXPR))
182 SET_EXPR_LOCATION (x, loc);
187 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
188 division and returns the quotient. Otherwise returns
192 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
198 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 /* This is nonzero if we should defer warnings about undefined
204 overflow. This facility exists because these warnings are a
205 special case. The code to estimate loop iterations does not want
206 to issue any warnings, since it works with expressions which do not
207 occur in user code. Various bits of cleanup code call fold(), but
208 only use the result if it has certain characteristics (e.g., is a
209 constant); that code only wants to issue a warning if the result is
212 static int fold_deferring_overflow_warnings;
214 /* If a warning about undefined overflow is deferred, this is the
215 warning. Note that this may cause us to turn two warnings into
216 one, but that is fine since it is sufficient to only give one
217 warning per expression. */
219 static const char* fold_deferred_overflow_warning;
221 /* If a warning about undefined overflow is deferred, this is the
222 level at which the warning should be emitted. */
224 static enum warn_strict_overflow_code fold_deferred_overflow_code;
226 /* Start deferring overflow warnings. We could use a stack here to
227 permit nested calls, but at present it is not necessary. */
230 fold_defer_overflow_warnings (void)
232 ++fold_deferring_overflow_warnings;
235 /* Stop deferring overflow warnings. If there is a pending warning,
236 and ISSUE is true, then issue the warning if appropriate. STMT is
237 the statement with which the warning should be associated (used for
238 location information); STMT may be NULL. CODE is the level of the
239 warning--a warn_strict_overflow_code value. This function will use
240 the smaller of CODE and the deferred code when deciding whether to
241 issue the warning. CODE may be zero to mean to always use the
245 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
250 gcc_assert (fold_deferring_overflow_warnings > 0);
251 --fold_deferring_overflow_warnings;
252 if (fold_deferring_overflow_warnings > 0)
254 if (fold_deferred_overflow_warning != NULL
256 && code < (int) fold_deferred_overflow_code)
257 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 warnmsg = fold_deferred_overflow_warning;
262 fold_deferred_overflow_warning = NULL;
264 if (!issue || warnmsg == NULL)
267 if (gimple_no_warning_p (stmt))
270 /* Use the smallest code level when deciding to issue the
272 if (code == 0 || code > (int) fold_deferred_overflow_code)
273 code = fold_deferred_overflow_code;
275 if (!issue_strict_overflow_warning (code))
279 locus = input_location;
281 locus = gimple_location (stmt);
282 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
285 /* Stop deferring overflow warnings, ignoring any deferred
289 fold_undefer_and_ignore_overflow_warnings (void)
291 fold_undefer_overflow_warnings (false, NULL, 0);
294 /* Whether we are deferring overflow warnings. */
297 fold_deferring_overflow_warnings_p (void)
299 return fold_deferring_overflow_warnings > 0;
302 /* This is called when we fold something based on the fact that signed
303 overflow is undefined. */
306 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
308 if (fold_deferring_overflow_warnings > 0)
310 if (fold_deferred_overflow_warning == NULL
311 || wc < fold_deferred_overflow_code)
313 fold_deferred_overflow_warning = gmsgid;
314 fold_deferred_overflow_code = wc;
317 else if (issue_strict_overflow_warning (wc))
318 warning (OPT_Wstrict_overflow, gmsgid);
321 /* Return true if the built-in mathematical function specified by CODE
322 is odd, i.e. -f(x) == f(-x). */
325 negate_mathfn_p (enum built_in_function code)
329 CASE_FLT_FN (BUILT_IN_ASIN):
330 CASE_FLT_FN (BUILT_IN_ASINH):
331 CASE_FLT_FN (BUILT_IN_ATAN):
332 CASE_FLT_FN (BUILT_IN_ATANH):
333 CASE_FLT_FN (BUILT_IN_CASIN):
334 CASE_FLT_FN (BUILT_IN_CASINH):
335 CASE_FLT_FN (BUILT_IN_CATAN):
336 CASE_FLT_FN (BUILT_IN_CATANH):
337 CASE_FLT_FN (BUILT_IN_CBRT):
338 CASE_FLT_FN (BUILT_IN_CPROJ):
339 CASE_FLT_FN (BUILT_IN_CSIN):
340 CASE_FLT_FN (BUILT_IN_CSINH):
341 CASE_FLT_FN (BUILT_IN_CTAN):
342 CASE_FLT_FN (BUILT_IN_CTANH):
343 CASE_FLT_FN (BUILT_IN_ERF):
344 CASE_FLT_FN (BUILT_IN_LLROUND):
345 CASE_FLT_FN (BUILT_IN_LROUND):
346 CASE_FLT_FN (BUILT_IN_ROUND):
347 CASE_FLT_FN (BUILT_IN_SIN):
348 CASE_FLT_FN (BUILT_IN_SINH):
349 CASE_FLT_FN (BUILT_IN_TAN):
350 CASE_FLT_FN (BUILT_IN_TANH):
351 CASE_FLT_FN (BUILT_IN_TRUNC):
354 CASE_FLT_FN (BUILT_IN_LLRINT):
355 CASE_FLT_FN (BUILT_IN_LRINT):
356 CASE_FLT_FN (BUILT_IN_NEARBYINT):
357 CASE_FLT_FN (BUILT_IN_RINT):
358 return !flag_rounding_math;
366 /* Check whether we may negate an integer constant T without causing
370 may_negate_without_overflow_p (const_tree t)
374 gcc_assert (TREE_CODE (t) == INTEGER_CST);
376 type = TREE_TYPE (t);
377 if (TYPE_UNSIGNED (type))
380 return !wi::only_sign_bit_p (t);
383 /* Determine whether an expression T can be cheaply negated using
384 the function negate_expr without introducing undefined overflow. */
387 negate_expr_p (tree t)
394 type = TREE_TYPE (t);
397 switch (TREE_CODE (t))
400 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
403 /* Check that -CST will not overflow type. */
404 return may_negate_without_overflow_p (t);
406 return (INTEGRAL_TYPE_P (type)
407 && TYPE_OVERFLOW_WRAPS (type));
413 return !TYPE_OVERFLOW_SANITIZED (type);
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
443 return negate_expr_p (TREE_OPERAND (t, 0));
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
447 || HONOR_SIGNED_ZEROS (element_mode (type)))
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
460 && !HONOR_SIGNED_ZEROS (element_mode (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return negate_expr_p (TREE_OPERAND (t, 1));
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
509 return negate_expr_p (tem);
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || (ANY_INTEGRAL_TYPE_P (type)
559 && !TYPE_OVERFLOW_TRAPS (type)
560 && TYPE_OVERFLOW_WRAPS (type))
561 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
566 tem = fold_negate_const (t, type);
570 tem = fold_negate_const (t, type);
575 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
576 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
578 return build_complex (type, rpart, ipart);
584 int count = TYPE_VECTOR_SUBPARTS (type), i;
585 tree *elts = XALLOCAVEC (tree, count);
587 for (i = 0; i < count; i++)
589 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
590 if (elts[i] == NULL_TREE)
594 return build_vector (type, elts);
598 if (negate_expr_p (t))
599 return fold_build2_loc (loc, COMPLEX_EXPR, type,
600 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
601 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
605 if (negate_expr_p (t))
606 return fold_build1_loc (loc, CONJ_EXPR, type,
607 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
611 if (!TYPE_OVERFLOW_SANITIZED (type))
612 return TREE_OPERAND (t, 0);
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
617 && !HONOR_SIGNED_ZEROS (element_mode (type)))
619 /* -(A + B) -> (-B) - A. */
620 if (negate_expr_p (TREE_OPERAND (t, 1))
621 && reorder_operands_p (TREE_OPERAND (t, 0),
622 TREE_OPERAND (t, 1)))
624 tem = negate_expr (TREE_OPERAND (t, 1));
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 tem, TREE_OPERAND (t, 0));
629 /* -(A + B) -> (-A) - B. */
630 if (negate_expr_p (TREE_OPERAND (t, 0)))
632 tem = negate_expr (TREE_OPERAND (t, 0));
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 tem, TREE_OPERAND (t, 1));
640 /* - (A - B) -> B - A */
641 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
642 && !HONOR_SIGNED_ZEROS (element_mode (type))
643 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 if (TYPE_UNSIGNED (type))
655 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
659 return fold_build2_loc (loc, TREE_CODE (t), type,
660 TREE_OPERAND (t, 0), negate_expr (tem));
661 tem = TREE_OPERAND (t, 0);
662 if (negate_expr_p (tem))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
671 /* In general we can't negate A / B, because if A is INT_MIN and
672 B is 1, we may turn this into INT_MIN / -1 which is undefined
673 and actually traps on some architectures. But if overflow is
674 undefined, we can negate, because - (INT_MIN / 1) is an
676 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
678 const char * const warnmsg = G_("assuming signed overflow does not "
679 "occur when negating a division");
680 tem = TREE_OPERAND (t, 1);
681 if (negate_expr_p (tem))
683 if (INTEGRAL_TYPE_P (type)
684 && (TREE_CODE (tem) != INTEGER_CST
685 || integer_onep (tem)))
686 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
687 return fold_build2_loc (loc, TREE_CODE (t), type,
688 TREE_OPERAND (t, 0), negate_expr (tem));
690 /* If overflow is undefined then we have to be careful because
691 we ask whether it's ok to associate the negate with the
692 division which is not ok for example for
693 -((a - b) / c) where (-(a - b)) / c may invoke undefined
694 overflow because of negating INT_MIN. So do not use
695 negate_expr_p here but open-code the two important cases. */
696 tem = TREE_OPERAND (t, 0);
697 if ((INTEGRAL_TYPE_P (type)
698 && (TREE_CODE (tem) == NEGATE_EXPR
699 || (TREE_CODE (tem) == INTEGER_CST
700 && may_negate_without_overflow_p (tem))))
701 || !INTEGRAL_TYPE_P (type))
702 return fold_build2_loc (loc, TREE_CODE (t), type,
703 negate_expr (tem), TREE_OPERAND (t, 1));
708 /* Convert -((double)float) into (double)(-float). */
709 if (TREE_CODE (type) == REAL_TYPE)
711 tem = strip_float_extensions (t);
712 if (tem != t && negate_expr_p (tem))
713 return fold_convert_loc (loc, type, negate_expr (tem));
718 /* Negate -f(x) as f(-x). */
719 if (negate_mathfn_p (builtin_mathfn_code (t))
720 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 fndecl = get_callee_fndecl (t);
725 arg = negate_expr (CALL_EXPR_ARG (t, 0));
726 return build_call_expr_loc (loc, fndecl, 1, arg);
731 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
732 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
734 tree op1 = TREE_OPERAND (t, 1);
735 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
737 tree ntype = TYPE_UNSIGNED (type)
738 ? signed_type_for (type)
739 : unsigned_type_for (type);
740 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
741 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
742 return fold_convert_loc (loc, type, temp);
754 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
755 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
767 loc = EXPR_LOCATION (t);
768 type = TREE_TYPE (t);
771 tem = fold_negate_expr (loc, t);
773 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
774 return fold_convert_loc (loc, type, tem);
777 /* Split a tree IN into a constant, literal and variable parts that could be
778 combined with CODE to make IN. "constant" means an expression with
779 TREE_CONSTANT but that isn't an actual constant. CODE must be a
780 commutative arithmetic operation. Store the constant part into *CONP,
781 the literal in *LITP and return the variable part. If a part isn't
782 present, set it to null. If the tree does not decompose in this way,
783 return the entire tree as the variable part and the other parts as null.
785 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
786 case, we negate an operand that was subtracted. Except if it is a
787 literal for which we use *MINUS_LITP instead.
789 If NEGATE_P is true, we are negating all of IN, again except a literal
790 for which we use *MINUS_LITP instead.
792 If IN is itself a literal or constant, return it as appropriate.
794 Note that we do not guarantee that any of the three values will be the
795 same type as IN, but they will have the same signedness and mode. */
798 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
799 tree *minus_litp, int negate_p)
807 /* Strip any conversions that don't change the machine mode or signedness. */
808 STRIP_SIGN_NOPS (in);
810 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
811 || TREE_CODE (in) == FIXED_CST)
813 else if (TREE_CODE (in) == code
814 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
815 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
816 /* We can associate addition and subtraction together (even
817 though the C standard doesn't say so) for integers because
818 the value is not affected. For reals, the value might be
819 affected, so we can't. */
820 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
821 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
823 tree op0 = TREE_OPERAND (in, 0);
824 tree op1 = TREE_OPERAND (in, 1);
825 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
826 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
828 /* First see if either of the operands is a literal, then a constant. */
829 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
830 || TREE_CODE (op0) == FIXED_CST)
831 *litp = op0, op0 = 0;
832 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
833 || TREE_CODE (op1) == FIXED_CST)
834 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
836 if (op0 != 0 && TREE_CONSTANT (op0))
837 *conp = op0, op0 = 0;
838 else if (op1 != 0 && TREE_CONSTANT (op1))
839 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
841 /* If we haven't dealt with either operand, this is not a case we can
842 decompose. Otherwise, VAR is either of the ones remaining, if any. */
843 if (op0 != 0 && op1 != 0)
848 var = op1, neg_var_p = neg1_p;
850 /* Now do any needed negations. */
852 *minus_litp = *litp, *litp = 0;
854 *conp = negate_expr (*conp);
856 var = negate_expr (var);
858 else if (TREE_CODE (in) == BIT_NOT_EXPR
859 && code == PLUS_EXPR)
861 /* -X - 1 is folded to ~X, undo that here. */
862 *minus_litp = build_one_cst (TREE_TYPE (in));
863 var = negate_expr (TREE_OPERAND (in, 0));
865 else if (TREE_CONSTANT (in))
873 *minus_litp = *litp, *litp = 0;
874 else if (*minus_litp)
875 *litp = *minus_litp, *minus_litp = 0;
876 *conp = negate_expr (*conp);
877 var = negate_expr (var);
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
896 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
897 try to fold this since we will have infinite recursion. But do
898 deal with any NEGATE_EXPRs. */
899 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
900 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
902 if (code == PLUS_EXPR)
904 if (TREE_CODE (t1) == NEGATE_EXPR)
905 return build2_loc (loc, MINUS_EXPR, type,
906 fold_convert_loc (loc, type, t2),
907 fold_convert_loc (loc, type,
908 TREE_OPERAND (t1, 0)));
909 else if (TREE_CODE (t2) == NEGATE_EXPR)
910 return build2_loc (loc, MINUS_EXPR, type,
911 fold_convert_loc (loc, type, t1),
912 fold_convert_loc (loc, type,
913 TREE_OPERAND (t2, 0)));
914 else if (integer_zerop (t2))
915 return fold_convert_loc (loc, type, t1);
917 else if (code == MINUS_EXPR)
919 if (integer_zerop (t2))
920 return fold_convert_loc (loc, type, t1);
923 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type, t2));
927 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
931 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
932 for use in int_const_binop, size_binop and size_diffop. */
935 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
937 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
939 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
955 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
956 && TYPE_MODE (type1) == TYPE_MODE (type2);
960 /* Combine two integer constants ARG1 and ARG2 under operation CODE
961 to produce a new constant. Return NULL_TREE if we don't know how
962 to evaluate CODE at compile-time. */
965 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
970 tree type = TREE_TYPE (arg1);
971 signop sign = TYPE_SIGN (type);
972 bool overflow = false;
974 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
975 TYPE_SIGN (TREE_TYPE (parg2)));
980 res = wi::bit_or (arg1, arg2);
984 res = wi::bit_xor (arg1, arg2);
988 res = wi::bit_and (arg1, arg2);
993 if (wi::neg_p (arg2))
996 if (code == RSHIFT_EXPR)
1002 if (code == RSHIFT_EXPR)
1003 /* It's unclear from the C standard whether shifts can overflow.
1004 The following code ignores overflow; perhaps a C standard
1005 interpretation ruling is needed. */
1006 res = wi::rshift (arg1, arg2, sign);
1008 res = wi::lshift (arg1, arg2);
1013 if (wi::neg_p (arg2))
1016 if (code == RROTATE_EXPR)
1017 code = LROTATE_EXPR;
1019 code = RROTATE_EXPR;
1022 if (code == RROTATE_EXPR)
1023 res = wi::rrotate (arg1, arg2);
1025 res = wi::lrotate (arg1, arg2);
1029 res = wi::add (arg1, arg2, sign, &overflow);
1033 res = wi::sub (arg1, arg2, sign, &overflow);
1037 res = wi::mul (arg1, arg2, sign, &overflow);
1040 case MULT_HIGHPART_EXPR:
1041 res = wi::mul_high (arg1, arg2, sign);
1044 case TRUNC_DIV_EXPR:
1045 case EXACT_DIV_EXPR:
1048 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 case FLOOR_DIV_EXPR:
1054 res = wi::div_floor (arg1, arg2, sign, &overflow);
1060 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 case ROUND_DIV_EXPR:
1066 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 case TRUNC_MOD_EXPR:
1072 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 case FLOOR_MOD_EXPR:
1078 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1084 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 case ROUND_MOD_EXPR:
1090 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 res = wi::min (arg1, arg2, sign);
1098 res = wi::max (arg1, arg2, sign);
1105 t = force_fit_type (type, res, overflowable,
1106 (((sign == SIGNED || overflowable == -1)
1108 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1114 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1116 return int_const_binop_1 (code, arg1, arg2, 1);
1119 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1120 constant. We assume ARG1 and ARG2 have the same data type, or at least
1121 are the same kind of constant and the same machine mode. Return zero if
1122 combining the constants is not allowed in the current operating mode. */
1125 const_binop (enum tree_code code, tree arg1, tree arg2)
1127 /* Sanity check for the recursive cases. */
1134 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1136 if (code == POINTER_PLUS_EXPR)
1137 return int_const_binop (PLUS_EXPR,
1138 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1140 return int_const_binop (code, arg1, arg2);
1143 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1148 REAL_VALUE_TYPE value;
1149 REAL_VALUE_TYPE result;
1153 /* The following codes are handled by real_arithmetic. */
1168 d1 = TREE_REAL_CST (arg1);
1169 d2 = TREE_REAL_CST (arg2);
1171 type = TREE_TYPE (arg1);
1172 mode = TYPE_MODE (type);
1174 /* Don't perform operation if we honor signaling NaNs and
1175 either operand is a NaN. */
1176 if (HONOR_SNANS (mode)
1177 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1180 /* Don't perform operation if it would raise a division
1181 by zero exception. */
1182 if (code == RDIV_EXPR
1183 && real_equal (&d2, &dconst0)
1184 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1187 /* If either operand is a NaN, just return it. Otherwise, set up
1188 for floating-point trap; we return an overflow. */
1189 if (REAL_VALUE_ISNAN (d1))
1191 else if (REAL_VALUE_ISNAN (d2))
1194 inexact = real_arithmetic (&value, code, &d1, &d2);
1195 real_convert (&result, mode, &value);
1197 /* Don't constant fold this floating point operation if
1198 the result has overflowed and flag_trapping_math. */
1199 if (flag_trapping_math
1200 && MODE_HAS_INFINITIES (mode)
1201 && REAL_VALUE_ISINF (result)
1202 && !REAL_VALUE_ISINF (d1)
1203 && !REAL_VALUE_ISINF (d2))
1206 /* Don't constant fold this floating point operation if the
1207 result may dependent upon the run-time rounding mode and
1208 flag_rounding_math is set, or if GCC's software emulation
1209 is unable to accurately represent the result. */
1210 if ((flag_rounding_math
1211 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1212 && (inexact || !real_identical (&result, &value)))
1215 t = build_real (type, result);
1217 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1221 if (TREE_CODE (arg1) == FIXED_CST)
1223 FIXED_VALUE_TYPE f1;
1224 FIXED_VALUE_TYPE f2;
1225 FIXED_VALUE_TYPE result;
1230 /* The following codes are handled by fixed_arithmetic. */
1236 case TRUNC_DIV_EXPR:
1237 if (TREE_CODE (arg2) != FIXED_CST)
1239 f2 = TREE_FIXED_CST (arg2);
1245 if (TREE_CODE (arg2) != INTEGER_CST)
1248 f2.data.high = w2.elt (1);
1249 f2.data.low = w2.elt (0);
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1269 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1306 case TRUNC_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1346 if (integer_nonzerop (compare))
1348 /* In the TRUE branch, we compute
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1368 /* In the FALSE branch, we compute
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1395 return build_complex (type, real, imag);
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1405 for (i = 0; i < count; i++)
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1410 elts[i] = const_binop (code, elem1, elem2);
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1418 return build_vector (type, elts);
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1429 for (i = 0; i < count; i++)
1431 tree elem1 = VECTOR_CST_ELT (arg1, i);
1433 elts[i] = const_binop (code, elem1, arg2);
1435 /* It is possible that const_binop cannot handle the given
1436 code and return NULL_TREE. */
1437 if (elts[i] == NULL_TREE)
1441 return build_vector (type, elts);
1446 /* Overload that adds a TYPE parameter to be able to dispatch
1447 to fold_relational_const. */
1450 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1452 if (TREE_CODE_CLASS (code) == tcc_comparison)
1453 return fold_relational_const (code, type, arg1, arg2);
1455 /* ??? Until we make the const_binop worker take the type of the
1456 result as argument put those cases that need it here. */
1460 if ((TREE_CODE (arg1) == REAL_CST
1461 && TREE_CODE (arg2) == REAL_CST)
1462 || (TREE_CODE (arg1) == INTEGER_CST
1463 && TREE_CODE (arg2) == INTEGER_CST))
1464 return build_complex (type, arg1, arg2);
1467 case VEC_PACK_TRUNC_EXPR:
1468 case VEC_PACK_FIX_TRUNC_EXPR:
1470 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1473 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1474 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1475 if (TREE_CODE (arg1) != VECTOR_CST
1476 || TREE_CODE (arg2) != VECTOR_CST)
1479 elts = XALLOCAVEC (tree, nelts);
1480 if (!vec_cst_ctor_to_array (arg1, elts)
1481 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1484 for (i = 0; i < nelts; i++)
1486 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1487 ? NOP_EXPR : FIX_TRUNC_EXPR,
1488 TREE_TYPE (type), elts[i]);
1489 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1493 return build_vector (type, elts);
1496 case VEC_WIDEN_MULT_LO_EXPR:
1497 case VEC_WIDEN_MULT_HI_EXPR:
1498 case VEC_WIDEN_MULT_EVEN_EXPR:
1499 case VEC_WIDEN_MULT_ODD_EXPR:
1501 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1502 unsigned int out, ofs, scale;
1505 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1506 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1507 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1510 elts = XALLOCAVEC (tree, nelts * 4);
1511 if (!vec_cst_ctor_to_array (arg1, elts)
1512 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1515 if (code == VEC_WIDEN_MULT_LO_EXPR)
1516 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1517 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1518 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1519 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1521 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1524 for (out = 0; out < nelts; out++)
1526 unsigned int in1 = (out << scale) + ofs;
1527 unsigned int in2 = in1 + nelts * 2;
1530 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1531 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1533 if (t1 == NULL_TREE || t2 == NULL_TREE)
1535 elts[out] = const_binop (MULT_EXPR, t1, t2);
1536 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1540 return build_vector (type, elts);
1546 if (TREE_CODE_CLASS (code) != tcc_binary)
1549 /* Make sure type and arg0 have the same saturating flag. */
1550 gcc_checking_assert (TYPE_SATURATING (type)
1551 == TYPE_SATURATING (TREE_TYPE (arg1)));
1553 return const_binop (code, arg1, arg2);
1556 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1557 Return zero if computing the constants is not possible. */
1560 const_unop (enum tree_code code, tree type, tree arg0)
1566 case FIX_TRUNC_EXPR:
1567 case FIXED_CONVERT_EXPR:
1568 return fold_convert_const (code, type, arg0);
1570 case ADDR_SPACE_CONVERT_EXPR:
1571 if (integer_zerop (arg0))
1572 return fold_convert_const (code, type, arg0);
1575 case VIEW_CONVERT_EXPR:
1576 return fold_view_convert_expr (type, arg0);
1580 /* Can't call fold_negate_const directly here as that doesn't
1581 handle all cases and we might not be able to negate some
1583 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1584 if (tem && CONSTANT_CLASS_P (tem))
1590 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1591 return fold_abs_const (arg0, type);
1595 if (TREE_CODE (arg0) == COMPLEX_CST)
1597 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1599 return build_complex (type, TREE_REALPART (arg0), ipart);
1604 if (TREE_CODE (arg0) == INTEGER_CST)
1605 return fold_not_const (arg0, type);
1606 /* Perform BIT_NOT_EXPR on each element individually. */
1607 else if (TREE_CODE (arg0) == VECTOR_CST)
1611 unsigned count = VECTOR_CST_NELTS (arg0), i;
1613 elements = XALLOCAVEC (tree, count);
1614 for (i = 0; i < count; i++)
1616 elem = VECTOR_CST_ELT (arg0, i);
1617 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1618 if (elem == NULL_TREE)
1623 return build_vector (type, elements);
1627 case TRUTH_NOT_EXPR:
1628 if (TREE_CODE (arg0) == INTEGER_CST)
1629 return constant_boolean_node (integer_zerop (arg0), type);
1633 if (TREE_CODE (arg0) == COMPLEX_CST)
1634 return fold_convert (type, TREE_REALPART (arg0));
1638 if (TREE_CODE (arg0) == COMPLEX_CST)
1639 return fold_convert (type, TREE_IMAGPART (arg0));
1642 case VEC_UNPACK_LO_EXPR:
1643 case VEC_UNPACK_HI_EXPR:
1644 case VEC_UNPACK_FLOAT_LO_EXPR:
1645 case VEC_UNPACK_FLOAT_HI_EXPR:
1647 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1649 enum tree_code subcode;
1651 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1652 if (TREE_CODE (arg0) != VECTOR_CST)
1655 elts = XALLOCAVEC (tree, nelts * 2);
1656 if (!vec_cst_ctor_to_array (arg0, elts))
1659 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1660 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1663 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1666 subcode = FLOAT_EXPR;
1668 for (i = 0; i < nelts; i++)
1670 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1671 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1675 return build_vector (type, elts);
1678 case REDUC_MIN_EXPR:
1679 case REDUC_MAX_EXPR:
1680 case REDUC_PLUS_EXPR:
1682 unsigned int nelts, i;
1684 enum tree_code subcode;
1686 if (TREE_CODE (arg0) != VECTOR_CST)
1688 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1690 elts = XALLOCAVEC (tree, nelts);
1691 if (!vec_cst_ctor_to_array (arg0, elts))
1696 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1697 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1698 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1699 default: gcc_unreachable ();
1702 for (i = 1; i < nelts; i++)
1704 elts[0] = const_binop (subcode, elts[0], elts[i]);
1705 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1719 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1720 indicates which particular sizetype to create. */
1723 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1725 return build_int_cst (sizetype_tab[(int) kind], number);
1728 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1729 is a tree code. The type of the result is taken from the operands.
1730 Both must be equivalent integer types, ala int_binop_types_match_p.
1731 If the operands are constant, so is the result. */
1734 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1736 tree type = TREE_TYPE (arg0);
1738 if (arg0 == error_mark_node || arg1 == error_mark_node)
1739 return error_mark_node;
1741 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1744 /* Handle the special case of two integer constants faster. */
1745 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1747 /* And some specific cases even faster than that. */
1748 if (code == PLUS_EXPR)
1750 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1752 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1755 else if (code == MINUS_EXPR)
1757 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1760 else if (code == MULT_EXPR)
1762 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1766 /* Handle general case of two integer constants. For sizetype
1767 constant calculations we always want to know about overflow,
1768 even in the unsigned case. */
1769 return int_const_binop_1 (code, arg0, arg1, -1);
1772 return fold_build2_loc (loc, code, type, arg0, arg1);
1775 /* Given two values, either both of sizetype or both of bitsizetype,
1776 compute the difference between the two values. Return the value
1777 in signed type corresponding to the type of the operands. */
1780 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1782 tree type = TREE_TYPE (arg0);
1785 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1788 /* If the type is already signed, just do the simple thing. */
1789 if (!TYPE_UNSIGNED (type))
1790 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1792 if (type == sizetype)
1794 else if (type == bitsizetype)
1795 ctype = sbitsizetype;
1797 ctype = signed_type_for (type);
1799 /* If either operand is not a constant, do the conversions to the signed
1800 type and subtract. The hardware will do the right thing with any
1801 overflow in the subtraction. */
1802 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1803 return size_binop_loc (loc, MINUS_EXPR,
1804 fold_convert_loc (loc, ctype, arg0),
1805 fold_convert_loc (loc, ctype, arg1));
1807 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1808 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1809 overflow) and negate (which can't either). Special-case a result
1810 of zero while we're here. */
1811 if (tree_int_cst_equal (arg0, arg1))
1812 return build_int_cst (ctype, 0);
1813 else if (tree_int_cst_lt (arg1, arg0))
1814 return fold_convert_loc (loc, ctype,
1815 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1817 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1818 fold_convert_loc (loc, ctype,
1819 size_binop_loc (loc,
1824 /* A subroutine of fold_convert_const handling conversions of an
1825 INTEGER_CST to another integer type. */
1828 fold_convert_const_int_from_int (tree type, const_tree arg1)
1830 /* Given an integer constant, make new constant with new type,
1831 appropriately sign-extended or truncated. Use widest_int
1832 so that any extension is done according ARG1's type. */
1833 return force_fit_type (type, wi::to_widest (arg1),
1834 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1835 TREE_OVERFLOW (arg1));
1838 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1839 to an integer type. */
1842 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1844 bool overflow = false;
1847 /* The following code implements the floating point to integer
1848 conversion rules required by the Java Language Specification,
1849 that IEEE NaNs are mapped to zero and values that overflow
1850 the target precision saturate, i.e. values greater than
1851 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1852 are mapped to INT_MIN. These semantics are allowed by the
1853 C and C++ standards that simply state that the behavior of
1854 FP-to-integer conversion is unspecified upon overflow. */
1858 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1862 case FIX_TRUNC_EXPR:
1863 real_trunc (&r, VOIDmode, &x);
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1874 val = wi::zero (TYPE_PRECISION (type));
1877 /* See if R is less than the lower bound or greater than the
1882 tree lt = TYPE_MIN_VALUE (type);
1883 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1884 if (real_less (&r, &l))
1893 tree ut = TYPE_MAX_VALUE (type);
1896 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1897 if (real_less (&u, &r))
1906 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1908 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1912 /* A subroutine of fold_convert_const handling conversions of a
1913 FIXED_CST to an integer type. */
1916 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1919 double_int temp, temp_trunc;
1922 /* Right shift FIXED_CST to temp by fbit. */
1923 temp = TREE_FIXED_CST (arg1).data;
1924 mode = TREE_FIXED_CST (arg1).mode;
1925 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1927 temp = temp.rshift (GET_MODE_FBIT (mode),
1928 HOST_BITS_PER_DOUBLE_INT,
1929 SIGNED_FIXED_POINT_MODE_P (mode));
1931 /* Left shift temp to temp_trunc by fbit. */
1932 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1933 HOST_BITS_PER_DOUBLE_INT,
1934 SIGNED_FIXED_POINT_MODE_P (mode));
1938 temp = double_int_zero;
1939 temp_trunc = double_int_zero;
1942 /* If FIXED_CST is negative, we need to round the value toward 0.
1943 By checking if the fractional bits are not zero to add 1 to temp. */
1944 if (SIGNED_FIXED_POINT_MODE_P (mode)
1945 && temp_trunc.is_negative ()
1946 && TREE_FIXED_CST (arg1).data != temp_trunc)
1947 temp += double_int_one;
1949 /* Given a fixed-point constant, make new constant with new type,
1950 appropriately sign-extended or truncated. */
1951 t = force_fit_type (type, temp, -1,
1952 (temp.is_negative ()
1953 && (TYPE_UNSIGNED (type)
1954 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1955 | TREE_OVERFLOW (arg1));
1960 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1961 to another floating point type. */
1964 fold_convert_const_real_from_real (tree type, const_tree arg1)
1966 REAL_VALUE_TYPE value;
1969 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1970 t = build_real (type, value);
1972 /* If converting an infinity or NAN to a representation that doesn't
1973 have one, set the overflow bit so that we can produce some kind of
1974 error message at the appropriate point if necessary. It's not the
1975 most user-friendly message, but it's better than nothing. */
1976 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1977 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1978 TREE_OVERFLOW (t) = 1;
1979 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1980 && !MODE_HAS_NANS (TYPE_MODE (type)))
1981 TREE_OVERFLOW (t) = 1;
1982 /* Regular overflow, conversion produced an infinity in a mode that
1983 can't represent them. */
1984 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1985 && REAL_VALUE_ISINF (value)
1986 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1987 TREE_OVERFLOW (t) = 1;
1989 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1993 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1994 to a floating point type. */
1997 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1999 REAL_VALUE_TYPE value;
2002 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2003 t = build_real (type, value);
2005 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2010 to another fixed-point type. */
2013 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2015 FIXED_VALUE_TYPE value;
2019 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2020 TYPE_SATURATING (type));
2021 t = build_fixed (type, value);
2023 /* Propagate overflow flags. */
2024 if (overflow_p | TREE_OVERFLOW (arg1))
2025 TREE_OVERFLOW (t) = 1;
2029 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2030 to a fixed-point type. */
2033 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2035 FIXED_VALUE_TYPE value;
2040 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2042 di.low = TREE_INT_CST_ELT (arg1, 0);
2043 if (TREE_INT_CST_NUNITS (arg1) == 1)
2044 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2046 di.high = TREE_INT_CST_ELT (arg1, 1);
2048 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2049 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2050 TYPE_SATURATING (type));
2051 t = build_fixed (type, value);
2053 /* Propagate overflow flags. */
2054 if (overflow_p | TREE_OVERFLOW (arg1))
2055 TREE_OVERFLOW (t) = 1;
2059 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2060 to a fixed-point type. */
2063 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2065 FIXED_VALUE_TYPE value;
2069 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2070 &TREE_REAL_CST (arg1),
2071 TYPE_SATURATING (type));
2072 t = build_fixed (type, value);
2074 /* Propagate overflow flags. */
2075 if (overflow_p | TREE_OVERFLOW (arg1))
2076 TREE_OVERFLOW (t) = 1;
2080 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2081 type TYPE. If no simplification can be done return NULL_TREE. */
2084 fold_convert_const (enum tree_code code, tree type, tree arg1)
2086 if (TREE_TYPE (arg1) == type)
2089 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2090 || TREE_CODE (type) == OFFSET_TYPE)
2092 if (TREE_CODE (arg1) == INTEGER_CST)
2093 return fold_convert_const_int_from_int (type, arg1);
2094 else if (TREE_CODE (arg1) == REAL_CST)
2095 return fold_convert_const_int_from_real (code, type, arg1);
2096 else if (TREE_CODE (arg1) == FIXED_CST)
2097 return fold_convert_const_int_from_fixed (type, arg1);
2099 else if (TREE_CODE (type) == REAL_TYPE)
2101 if (TREE_CODE (arg1) == INTEGER_CST)
2102 return build_real_from_int_cst (type, arg1);
2103 else if (TREE_CODE (arg1) == REAL_CST)
2104 return fold_convert_const_real_from_real (type, arg1);
2105 else if (TREE_CODE (arg1) == FIXED_CST)
2106 return fold_convert_const_real_from_fixed (type, arg1);
2108 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2110 if (TREE_CODE (arg1) == FIXED_CST)
2111 return fold_convert_const_fixed_from_fixed (type, arg1);
2112 else if (TREE_CODE (arg1) == INTEGER_CST)
2113 return fold_convert_const_fixed_from_int (type, arg1);
2114 else if (TREE_CODE (arg1) == REAL_CST)
2115 return fold_convert_const_fixed_from_real (type, arg1);
2120 /* Construct a vector of zero elements of vector type TYPE. */
2123 build_zero_vector (tree type)
2127 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2128 return build_vector_from_val (type, t);
2131 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2134 fold_convertible_p (const_tree type, const_tree arg)
2136 tree orig = TREE_TYPE (arg);
2141 if (TREE_CODE (arg) == ERROR_MARK
2142 || TREE_CODE (type) == ERROR_MARK
2143 || TREE_CODE (orig) == ERROR_MARK)
2146 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2149 switch (TREE_CODE (type))
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2154 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2155 || TREE_CODE (orig) == OFFSET_TYPE)
2157 return (TREE_CODE (orig) == VECTOR_TYPE
2158 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2161 case FIXED_POINT_TYPE:
2165 return TREE_CODE (type) == TREE_CODE (orig);
2172 /* Convert expression ARG to type TYPE. Used by the middle-end for
2173 simple conversions in preference to calling the front-end's convert. */
2176 fold_convert_loc (location_t loc, tree type, tree arg)
2178 tree orig = TREE_TYPE (arg);
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return error_mark_node;
2189 switch (TREE_CODE (type))
2192 case REFERENCE_TYPE:
2193 /* Handle conversions between pointers to different address spaces. */
2194 if (POINTER_TYPE_P (orig)
2195 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2196 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2197 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2200 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2202 if (TREE_CODE (arg) == INTEGER_CST)
2204 tem = fold_convert_const (NOP_EXPR, type, arg);
2205 if (tem != NULL_TREE)
2208 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2209 || TREE_CODE (orig) == OFFSET_TYPE)
2210 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2211 if (TREE_CODE (orig) == COMPLEX_TYPE)
2212 return fold_convert_loc (loc, type,
2213 fold_build1_loc (loc, REALPART_EXPR,
2214 TREE_TYPE (orig), arg));
2215 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2216 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2217 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2220 if (TREE_CODE (arg) == INTEGER_CST)
2222 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2223 if (tem != NULL_TREE)
2226 else if (TREE_CODE (arg) == REAL_CST)
2228 tem = fold_convert_const (NOP_EXPR, type, arg);
2229 if (tem != NULL_TREE)
2232 else if (TREE_CODE (arg) == FIXED_CST)
2234 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2239 switch (TREE_CODE (orig))
2242 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2243 case POINTER_TYPE: case REFERENCE_TYPE:
2244 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2247 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2249 case FIXED_POINT_TYPE:
2250 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2253 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2254 return fold_convert_loc (loc, type, tem);
2260 case FIXED_POINT_TYPE:
2261 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2262 || TREE_CODE (arg) == REAL_CST)
2264 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2265 if (tem != NULL_TREE)
2266 goto fold_convert_exit;
2269 switch (TREE_CODE (orig))
2271 case FIXED_POINT_TYPE:
2276 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2279 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2280 return fold_convert_loc (loc, type, tem);
2287 switch (TREE_CODE (orig))
2290 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2291 case POINTER_TYPE: case REFERENCE_TYPE:
2293 case FIXED_POINT_TYPE:
2294 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2295 fold_convert_loc (loc, TREE_TYPE (type), arg),
2296 fold_convert_loc (loc, TREE_TYPE (type),
2297 integer_zero_node));
2302 if (TREE_CODE (arg) == COMPLEX_EXPR)
2304 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2305 TREE_OPERAND (arg, 0));
2306 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2307 TREE_OPERAND (arg, 1));
2308 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2311 arg = save_expr (arg);
2312 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2313 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2314 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2315 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2316 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2324 if (integer_zerop (arg))
2325 return build_zero_vector (type);
2326 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2327 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2328 || TREE_CODE (orig) == VECTOR_TYPE);
2329 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2332 tem = fold_ignored_result (arg);
2333 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2336 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2337 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2341 protected_set_expr_location_unshare (tem, loc);
2345 /* Return false if expr can be assumed not to be an lvalue, true
2349 maybe_lvalue_p (const_tree x)
2351 /* We only need to wrap lvalue tree codes. */
2352 switch (TREE_CODE (x))
2365 case ARRAY_RANGE_REF:
2371 case PREINCREMENT_EXPR:
2372 case PREDECREMENT_EXPR:
2374 case TRY_CATCH_EXPR:
2375 case WITH_CLEANUP_EXPR:
2384 /* Assume the worst for front-end tree codes. */
2385 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2393 /* Return an expr equal to X but certainly not valid as an lvalue. */
2396 non_lvalue_loc (location_t loc, tree x)
2398 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2403 if (! maybe_lvalue_p (x))
2405 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2408 /* When pedantic, return an expr equal to X but certainly not valid as a
2409 pedantic lvalue. Otherwise, return X. */
2412 pedantic_non_lvalue_loc (location_t loc, tree x)
2414 return protected_set_expr_location_unshare (x, loc);
2417 /* Given a tree comparison code, return the code that is the logical inverse.
2418 It is generally not safe to do this for floating-point comparisons, except
2419 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2420 ERROR_MARK in this case. */
2423 invert_tree_comparison (enum tree_code code, bool honor_nans)
2425 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2426 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2468 swap_tree_comparison (enum tree_code code)
2475 case UNORDERED_EXPR:
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2527 return COMPCODE_UNLT;
2529 return COMPCODE_UNEQ;
2531 return COMPCODE_UNLE;
2533 return COMPCODE_UNGT;
2535 return COMPCODE_LTGT;
2537 return COMPCODE_UNGE;
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2592 combine_comparisons (location_t loc,
2593 enum tree_code code, enum tree_code lcode,
2594 enum tree_code rcode, tree truth_type,
2595 tree ll_arg, tree lr_arg)
2597 bool honor_nans = HONOR_NANS (ll_arg);
2598 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2599 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2604 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2605 compcode = lcompcode & rcompcode;
2608 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2609 compcode = lcompcode | rcompcode;
2618 /* Eliminate unordered comparisons, as well as LTGT and ORD
2619 which are not used unless the mode has NaNs. */
2620 compcode &= ~COMPCODE_UNORD;
2621 if (compcode == COMPCODE_LTGT)
2622 compcode = COMPCODE_NE;
2623 else if (compcode == COMPCODE_ORD)
2624 compcode = COMPCODE_TRUE;
2626 else if (flag_trapping_math)
2628 /* Check that the original operation and the optimized ones will trap
2629 under the same condition. */
2630 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2631 && (lcompcode != COMPCODE_EQ)
2632 && (lcompcode != COMPCODE_ORD);
2633 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2634 && (rcompcode != COMPCODE_EQ)
2635 && (rcompcode != COMPCODE_ORD);
2636 bool trap = (compcode & COMPCODE_UNORD) == 0
2637 && (compcode != COMPCODE_EQ)
2638 && (compcode != COMPCODE_ORD);
2640 /* In a short-circuited boolean expression the LHS might be
2641 such that the RHS, if evaluated, will never trap. For
2642 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2643 if neither x nor y is NaN. (This is a mixed blessing: for
2644 example, the expression above will never trap, hence
2645 optimizing it to x < y would be invalid). */
2646 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2647 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2650 /* If the comparison was short-circuited, and only the RHS
2651 trapped, we may now generate a spurious trap. */
2653 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2656 /* If we changed the conditions that cause a trap, we lose. */
2657 if ((ltrap || rtrap) != trap)
2661 if (compcode == COMPCODE_TRUE)
2662 return constant_boolean_node (true, truth_type);
2663 else if (compcode == COMPCODE_FALSE)
2664 return constant_boolean_node (false, truth_type);
2667 enum tree_code tcode;
2669 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2670 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2674 /* Return nonzero if two operands (typically of the same tree node)
2675 are necessarily equal. If either argument has side-effects this
2676 function returns zero. FLAGS modifies behavior as follows:
2678 If OEP_ONLY_CONST is set, only return nonzero for constants.
2679 This function tests whether the operands are indistinguishable;
2680 it does not test whether they are equal using C's == operation.
2681 The distinction is important for IEEE floating point, because
2682 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2683 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2685 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2686 even though it may hold multiple values during a function.
2687 This is because a GCC tree node guarantees that nothing else is
2688 executed between the evaluation of its "operands" (which may often
2689 be evaluated in arbitrary order). Hence if the operands themselves
2690 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2691 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2692 unset means assuming isochronic (or instantaneous) tree equivalence.
2693 Unless comparing arbitrary expression trees, such as from different
2694 statements, this flag can usually be left unset.
2696 If OEP_PURE_SAME is set, then pure functions with identical arguments
2697 are considered the same. It is used when the caller has other ways
2698 to ensure that global memory is unchanged in between.
2700 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2701 not values of expressions. OEP_CONSTANT_ADDRESS_OF in addition to
2702 OEP_ADDRESS_OF is used for ADDR_EXPR with TREE_CONSTANT flag set and we
2703 further ignore any side effects on SAVE_EXPRs then. */
2706 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2708 /* If either is ERROR_MARK, they aren't equal. */
2709 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2710 || TREE_TYPE (arg0) == error_mark_node
2711 || TREE_TYPE (arg1) == error_mark_node)
2714 /* Similar, if either does not have a type (like a released SSA name),
2715 they aren't equal. */
2716 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2719 /* Check equality of integer constants before bailing out due to
2720 precision differences. */
2721 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2723 /* Address of INTEGER_CST is not defined; check that we did not forget
2724 to drop the OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2725 gcc_checking_assert (!(flags
2726 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2727 return tree_int_cst_equal (arg0, arg1);
2730 if (!(flags & OEP_ADDRESS_OF))
2732 /* If both types don't have the same signedness, then we can't consider
2733 them equal. We must check this before the STRIP_NOPS calls
2734 because they may change the signedness of the arguments. As pointers
2735 strictly don't have a signedness, require either two pointers or
2736 two non-pointers as well. */
2737 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2738 || POINTER_TYPE_P (TREE_TYPE (arg0))
2739 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2742 /* We cannot consider pointers to different address space equal. */
2743 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2744 && POINTER_TYPE_P (TREE_TYPE (arg1))
2745 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2746 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2749 /* If both types don't have the same precision, then it is not safe
2751 if (element_precision (TREE_TYPE (arg0))
2752 != element_precision (TREE_TYPE (arg1)))
2759 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2760 sanity check once the issue is solved. */
2762 /* Addresses of conversions and SSA_NAMEs (and many other things)
2763 are not defined. Check that we did not forget to drop the
2764 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2765 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2766 && TREE_CODE (arg0) != SSA_NAME);
2769 /* In case both args are comparisons but with different comparison
2770 code, try to swap the comparison operands of one arg to produce
2771 a match and compare that variant. */
2772 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2773 && COMPARISON_CLASS_P (arg0)
2774 && COMPARISON_CLASS_P (arg1))
2776 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2778 if (TREE_CODE (arg0) == swap_code)
2779 return operand_equal_p (TREE_OPERAND (arg0, 0),
2780 TREE_OPERAND (arg1, 1), flags)
2781 && operand_equal_p (TREE_OPERAND (arg0, 1),
2782 TREE_OPERAND (arg1, 0), flags);
2785 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2787 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2788 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2790 else if (flags & OEP_ADDRESS_OF)
2792 /* If we are interested in comparing addresses ignore
2793 MEM_REF wrappings of the base that can appear just for
2795 if (TREE_CODE (arg0) == MEM_REF
2797 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2798 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2799 && integer_zerop (TREE_OPERAND (arg0, 1)))
2801 else if (TREE_CODE (arg1) == MEM_REF
2803 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2804 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2805 && integer_zerop (TREE_OPERAND (arg1, 1)))
2813 /* This is needed for conversions and for COMPONENT_REF.
2814 Might as well play it safe and always test this. */
2815 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2816 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2817 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2820 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2821 We don't care about side effects in that case because the SAVE_EXPR
2822 takes care of that for us. In all other cases, two expressions are
2823 equal if they have no side effects. If we have two identical
2824 expressions with side effects that should be treated the same due
2825 to the only side effects being identical SAVE_EXPR's, that will
2826 be detected in the recursive calls below.
2827 If we are taking an invariant address of two identical objects
2828 they are necessarily equal as well. */
2829 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2830 && (TREE_CODE (arg0) == SAVE_EXPR
2831 || (flags & OEP_CONSTANT_ADDRESS_OF)
2832 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2835 /* Next handle constant cases, those for which we can return 1 even
2836 if ONLY_CONST is set. */
2837 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2838 switch (TREE_CODE (arg0))
2841 return tree_int_cst_equal (arg0, arg1);
2844 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2845 TREE_FIXED_CST (arg1));
2848 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2852 if (!HONOR_SIGNED_ZEROS (arg0))
2854 /* If we do not distinguish between signed and unsigned zero,
2855 consider them equal. */
2856 if (real_zerop (arg0) && real_zerop (arg1))
2865 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2868 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2870 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2871 VECTOR_CST_ELT (arg1, i), flags))
2878 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2880 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2884 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2885 && ! memcmp (TREE_STRING_POINTER (arg0),
2886 TREE_STRING_POINTER (arg1),
2887 TREE_STRING_LENGTH (arg0)));
2890 gcc_checking_assert (!(flags
2891 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2892 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2893 flags | OEP_ADDRESS_OF
2894 | OEP_CONSTANT_ADDRESS_OF);
2899 if (flags & OEP_ONLY_CONST)
2902 /* Define macros to test an operand from arg0 and arg1 for equality and a
2903 variant that allows null and views null as being different from any
2904 non-null value. In the latter case, if either is null, the both
2905 must be; otherwise, do the normal comparison. */
2906 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2907 TREE_OPERAND (arg1, N), flags)
2909 #define OP_SAME_WITH_NULL(N) \
2910 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2911 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2913 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2916 /* Two conversions are equal only if signedness and modes match. */
2917 switch (TREE_CODE (arg0))
2920 case FIX_TRUNC_EXPR:
2921 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2922 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2932 case tcc_comparison:
2934 if (OP_SAME (0) && OP_SAME (1))
2937 /* For commutative ops, allow the other order. */
2938 return (commutative_tree_code (TREE_CODE (arg0))
2939 && operand_equal_p (TREE_OPERAND (arg0, 0),
2940 TREE_OPERAND (arg1, 1), flags)
2941 && operand_equal_p (TREE_OPERAND (arg0, 1),
2942 TREE_OPERAND (arg1, 0), flags));
2945 /* If either of the pointer (or reference) expressions we are
2946 dereferencing contain a side effect, these cannot be equal,
2947 but their addresses can be. */
2948 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2949 && (TREE_SIDE_EFFECTS (arg0)
2950 || TREE_SIDE_EFFECTS (arg1)))
2953 switch (TREE_CODE (arg0))
2956 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF))
2957 && (TYPE_ALIGN (TREE_TYPE (arg0))
2958 != TYPE_ALIGN (TREE_TYPE (arg1))))
2960 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2967 case TARGET_MEM_REF:
2969 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)))
2971 /* Require equal access sizes */
2972 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2973 && (!TYPE_SIZE (TREE_TYPE (arg0))
2974 || !TYPE_SIZE (TREE_TYPE (arg1))
2975 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2976 TYPE_SIZE (TREE_TYPE (arg1)),
2979 /* Verify that access happens in similar types. */
2980 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2982 /* Verify that accesses are TBAA compatible. */
2983 if (flag_strict_aliasing
2984 && (!alias_ptr_types_compatible_p
2985 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2986 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2987 || (MR_DEPENDENCE_CLIQUE (arg0)
2988 != MR_DEPENDENCE_CLIQUE (arg1))
2989 || (MR_DEPENDENCE_BASE (arg0)
2990 != MR_DEPENDENCE_BASE (arg1))))
2992 /* Verify that alignment is compatible. */
2993 if (TYPE_ALIGN (TREE_TYPE (arg0))
2994 != TYPE_ALIGN (TREE_TYPE (arg1)))
2997 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2998 return (OP_SAME (0) && OP_SAME (1)
2999 /* TARGET_MEM_REF require equal extra operands. */
3000 && (TREE_CODE (arg0) != TARGET_MEM_REF
3001 || (OP_SAME_WITH_NULL (2)
3002 && OP_SAME_WITH_NULL (3)
3003 && OP_SAME_WITH_NULL (4))));
3006 case ARRAY_RANGE_REF:
3007 /* Operands 2 and 3 may be null.
3008 Compare the array index by value if it is constant first as we
3009 may have different types but same value here. */
3012 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3013 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3014 TREE_OPERAND (arg1, 1))
3016 && OP_SAME_WITH_NULL (2)
3017 && OP_SAME_WITH_NULL (3));
3020 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3021 may be NULL when we're called to compare MEM_EXPRs. */
3022 if (!OP_SAME_WITH_NULL (0)
3025 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3026 return OP_SAME_WITH_NULL (2);
3031 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3032 return OP_SAME (1) && OP_SAME (2);
3038 case tcc_expression:
3039 switch (TREE_CODE (arg0))
3042 /* Be sure we pass right ADDRESS_OF flag. */
3043 gcc_checking_assert (!(flags
3045 | OEP_CONSTANT_ADDRESS_OF)));
3046 return operand_equal_p (TREE_OPERAND (arg0, 0),
3047 TREE_OPERAND (arg1, 0),
3048 flags | OEP_ADDRESS_OF);
3050 case TRUTH_NOT_EXPR:
3053 case TRUTH_ANDIF_EXPR:
3054 case TRUTH_ORIF_EXPR:
3055 return OP_SAME (0) && OP_SAME (1);
3058 case WIDEN_MULT_PLUS_EXPR:
3059 case WIDEN_MULT_MINUS_EXPR:
3062 /* The multiplcation operands are commutative. */
3065 case TRUTH_AND_EXPR:
3067 case TRUTH_XOR_EXPR:
3068 if (OP_SAME (0) && OP_SAME (1))
3071 /* Otherwise take into account this is a commutative operation. */
3072 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3073 TREE_OPERAND (arg1, 1), flags)
3074 && operand_equal_p (TREE_OPERAND (arg0, 1),
3075 TREE_OPERAND (arg1, 0), flags));
3080 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3087 switch (TREE_CODE (arg0))
3090 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3091 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3092 /* If not both CALL_EXPRs are either internal or normal function
3093 functions, then they are not equal. */
3095 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3097 /* If the CALL_EXPRs call different internal functions, then they
3099 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3104 /* If the CALL_EXPRs call different functions, then they are not
3106 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3112 unsigned int cef = call_expr_flags (arg0);
3113 if (flags & OEP_PURE_SAME)
3114 cef &= ECF_CONST | ECF_PURE;
3121 /* Now see if all the arguments are the same. */
3123 const_call_expr_arg_iterator iter0, iter1;
3125 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3126 a1 = first_const_call_expr_arg (arg1, &iter1);
3128 a0 = next_const_call_expr_arg (&iter0),
3129 a1 = next_const_call_expr_arg (&iter1))
3130 if (! operand_equal_p (a0, a1, flags))
3133 /* If we get here and both argument lists are exhausted
3134 then the CALL_EXPRs are equal. */
3135 return ! (a0 || a1);
3141 case tcc_declaration:
3142 /* Consider __builtin_sqrt equal to sqrt. */
3143 return (TREE_CODE (arg0) == FUNCTION_DECL
3144 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3145 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3146 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3153 #undef OP_SAME_WITH_NULL
3156 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3157 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3159 When in doubt, return 0. */
3162 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3164 int unsignedp1, unsignedpo;
3165 tree primarg0, primarg1, primother;
3166 unsigned int correct_width;
3168 if (operand_equal_p (arg0, arg1, 0))
3171 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3172 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3175 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3176 and see if the inner values are the same. This removes any
3177 signedness comparison, which doesn't matter here. */
3178 primarg0 = arg0, primarg1 = arg1;
3179 STRIP_NOPS (primarg0);
3180 STRIP_NOPS (primarg1);
3181 if (operand_equal_p (primarg0, primarg1, 0))
3184 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3185 actual comparison operand, ARG0.
3187 First throw away any conversions to wider types
3188 already present in the operands. */
3190 primarg1 = get_narrower (arg1, &unsignedp1);
3191 primother = get_narrower (other, &unsignedpo);
3193 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3194 if (unsignedp1 == unsignedpo
3195 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3196 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3198 tree type = TREE_TYPE (arg0);
3200 /* Make sure shorter operand is extended the right way
3201 to match the longer operand. */
3202 primarg1 = fold_convert (signed_or_unsigned_type_for
3203 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3205 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3212 /* See if ARG is an expression that is either a comparison or is performing
3213 arithmetic on comparisons. The comparisons must only be comparing
3214 two different values, which will be stored in *CVAL1 and *CVAL2; if
3215 they are nonzero it means that some operands have already been found.
3216 No variables may be used anywhere else in the expression except in the
3217 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3218 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3220 If this is true, return 1. Otherwise, return zero. */
3223 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3225 enum tree_code code = TREE_CODE (arg);
3226 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3228 /* We can handle some of the tcc_expression cases here. */
3229 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3231 else if (tclass == tcc_expression
3232 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3233 || code == COMPOUND_EXPR))
3234 tclass = tcc_binary;
3236 else if (tclass == tcc_expression && code == SAVE_EXPR
3237 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3239 /* If we've already found a CVAL1 or CVAL2, this expression is
3240 two complex to handle. */
3241 if (*cval1 || *cval2)
3251 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3254 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3255 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3256 cval1, cval2, save_p));
3261 case tcc_expression:
3262 if (code == COND_EXPR)
3263 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3264 cval1, cval2, save_p)
3265 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3266 cval1, cval2, save_p)
3267 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3268 cval1, cval2, save_p));
3271 case tcc_comparison:
3272 /* First see if we can handle the first operand, then the second. For
3273 the second operand, we know *CVAL1 can't be zero. It must be that
3274 one side of the comparison is each of the values; test for the
3275 case where this isn't true by failing if the two operands
3278 if (operand_equal_p (TREE_OPERAND (arg, 0),
3279 TREE_OPERAND (arg, 1), 0))
3283 *cval1 = TREE_OPERAND (arg, 0);
3284 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3286 else if (*cval2 == 0)
3287 *cval2 = TREE_OPERAND (arg, 0);
3288 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3293 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3295 else if (*cval2 == 0)
3296 *cval2 = TREE_OPERAND (arg, 1);
3297 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3309 /* ARG is a tree that is known to contain just arithmetic operations and
3310 comparisons. Evaluate the operations in the tree substituting NEW0 for
3311 any occurrence of OLD0 as an operand of a comparison and likewise for
3315 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3316 tree old1, tree new1)
3318 tree type = TREE_TYPE (arg);
3319 enum tree_code code = TREE_CODE (arg);
3320 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3322 /* We can handle some of the tcc_expression cases here. */
3323 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3325 else if (tclass == tcc_expression
3326 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3327 tclass = tcc_binary;
3332 return fold_build1_loc (loc, code, type,
3333 eval_subst (loc, TREE_OPERAND (arg, 0),
3334 old0, new0, old1, new1));
3337 return fold_build2_loc (loc, code, type,
3338 eval_subst (loc, TREE_OPERAND (arg, 0),
3339 old0, new0, old1, new1),
3340 eval_subst (loc, TREE_OPERAND (arg, 1),
3341 old0, new0, old1, new1));
3343 case tcc_expression:
3347 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3351 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3355 return fold_build3_loc (loc, code, type,
3356 eval_subst (loc, TREE_OPERAND (arg, 0),
3357 old0, new0, old1, new1),
3358 eval_subst (loc, TREE_OPERAND (arg, 1),
3359 old0, new0, old1, new1),
3360 eval_subst (loc, TREE_OPERAND (arg, 2),
3361 old0, new0, old1, new1));
3365 /* Fall through - ??? */
3367 case tcc_comparison:
3369 tree arg0 = TREE_OPERAND (arg, 0);
3370 tree arg1 = TREE_OPERAND (arg, 1);
3372 /* We need to check both for exact equality and tree equality. The
3373 former will be true if the operand has a side-effect. In that
3374 case, we know the operand occurred exactly once. */
3376 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3378 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3381 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3383 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3386 return fold_build2_loc (loc, code, type, arg0, arg1);
3394 /* Return a tree for the case when the result of an expression is RESULT
3395 converted to TYPE and OMITTED was previously an operand of the expression
3396 but is now not needed (e.g., we folded OMITTED * 0).
3398 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3399 the conversion of RESULT to TYPE. */
3402 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3404 tree t = fold_convert_loc (loc, type, result);
3406 /* If the resulting operand is an empty statement, just return the omitted
3407 statement casted to void. */
3408 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3409 return build1_loc (loc, NOP_EXPR, void_type_node,
3410 fold_ignored_result (omitted));
3412 if (TREE_SIDE_EFFECTS (omitted))
3413 return build2_loc (loc, COMPOUND_EXPR, type,
3414 fold_ignored_result (omitted), t);
3416 return non_lvalue_loc (loc, t);
3419 /* Return a tree for the case when the result of an expression is RESULT
3420 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3421 of the expression but are now not needed.
3423 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3424 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3425 evaluated before OMITTED2. Otherwise, if neither has side effects,
3426 just do the conversion of RESULT to TYPE. */
3429 omit_two_operands_loc (location_t loc, tree type, tree result,
3430 tree omitted1, tree omitted2)
3432 tree t = fold_convert_loc (loc, type, result);
3434 if (TREE_SIDE_EFFECTS (omitted2))
3435 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3436 if (TREE_SIDE_EFFECTS (omitted1))
3437 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3439 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3443 /* Return a simplified tree node for the truth-negation of ARG. This
3444 never alters ARG itself. We assume that ARG is an operation that
3445 returns a truth value (0 or 1).
3447 FIXME: one would think we would fold the result, but it causes
3448 problems with the dominator optimizer. */
3451 fold_truth_not_expr (location_t loc, tree arg)
3453 tree type = TREE_TYPE (arg);
3454 enum tree_code code = TREE_CODE (arg);
3455 location_t loc1, loc2;
3457 /* If this is a comparison, we can simply invert it, except for
3458 floating-point non-equality comparisons, in which case we just
3459 enclose a TRUTH_NOT_EXPR around what we have. */
3461 if (TREE_CODE_CLASS (code) == tcc_comparison)
3463 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3464 if (FLOAT_TYPE_P (op_type)
3465 && flag_trapping_math
3466 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3467 && code != NE_EXPR && code != EQ_EXPR)
3470 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3471 if (code == ERROR_MARK)
3474 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3475 TREE_OPERAND (arg, 1));
3481 return constant_boolean_node (integer_zerop (arg), type);
3483 case TRUTH_AND_EXPR:
3484 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3485 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3486 return build2_loc (loc, TRUTH_OR_EXPR, type,
3487 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3488 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3491 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3492 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3493 return build2_loc (loc, TRUTH_AND_EXPR, type,
3494 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3495 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3497 case TRUTH_XOR_EXPR:
3498 /* Here we can invert either operand. We invert the first operand
3499 unless the second operand is a TRUTH_NOT_EXPR in which case our
3500 result is the XOR of the first operand with the inside of the
3501 negation of the second operand. */
3503 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3504 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3505 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3507 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3508 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3509 TREE_OPERAND (arg, 1));
3511 case TRUTH_ANDIF_EXPR:
3512 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3513 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3514 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3515 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3516 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3518 case TRUTH_ORIF_EXPR:
3519 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3520 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3521 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3522 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3523 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3525 case TRUTH_NOT_EXPR:
3526 return TREE_OPERAND (arg, 0);
3530 tree arg1 = TREE_OPERAND (arg, 1);
3531 tree arg2 = TREE_OPERAND (arg, 2);
3533 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3534 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3536 /* A COND_EXPR may have a throw as one operand, which
3537 then has void type. Just leave void operands
3539 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3540 VOID_TYPE_P (TREE_TYPE (arg1))
3541 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3542 VOID_TYPE_P (TREE_TYPE (arg2))
3543 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3547 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3548 return build2_loc (loc, COMPOUND_EXPR, type,
3549 TREE_OPERAND (arg, 0),
3550 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3552 case NON_LVALUE_EXPR:
3553 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3554 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3557 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3558 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3560 /* ... fall through ... */
3563 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3564 return build1_loc (loc, TREE_CODE (arg), type,
3565 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3568 if (!integer_onep (TREE_OPERAND (arg, 1)))
3570 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3573 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3575 case CLEANUP_POINT_EXPR:
3576 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3577 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3578 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3585 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3586 assume that ARG is an operation that returns a truth value (0 or 1
3587 for scalars, 0 or -1 for vectors). Return the folded expression if
3588 folding is successful. Otherwise, return NULL_TREE. */
3591 fold_invert_truthvalue (location_t loc, tree arg)
3593 tree type = TREE_TYPE (arg);
3594 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3600 /* Return a simplified tree node for the truth-negation of ARG. This
3601 never alters ARG itself. We assume that ARG is an operation that
3602 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3605 invert_truthvalue_loc (location_t loc, tree arg)
3607 if (TREE_CODE (arg) == ERROR_MARK)
3610 tree type = TREE_TYPE (arg);
3611 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3617 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3618 with code CODE. This optimization is unsafe. */
3620 distribute_real_division (location_t loc, enum tree_code code, tree type,
3621 tree arg0, tree arg1)
3623 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3624 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3626 /* (A / C) +- (B / C) -> (A +- B) / C. */
3628 && operand_equal_p (TREE_OPERAND (arg0, 1),
3629 TREE_OPERAND (arg1, 1), 0))
3630 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3631 fold_build2_loc (loc, code, type,
3632 TREE_OPERAND (arg0, 0),
3633 TREE_OPERAND (arg1, 0)),
3634 TREE_OPERAND (arg0, 1));
3636 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3637 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3638 TREE_OPERAND (arg1, 0), 0)
3639 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3640 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3642 REAL_VALUE_TYPE r0, r1;
3643 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3644 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3646 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3648 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3649 real_arithmetic (&r0, code, &r0, &r1);
3650 return fold_build2_loc (loc, MULT_EXPR, type,
3651 TREE_OPERAND (arg0, 0),
3652 build_real (type, r0));
3658 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3659 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3662 make_bit_field_ref (location_t loc, tree inner, tree type,
3663 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3665 tree result, bftype;
3669 tree size = TYPE_SIZE (TREE_TYPE (inner));
3670 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3671 || POINTER_TYPE_P (TREE_TYPE (inner)))
3672 && tree_fits_shwi_p (size)
3673 && tree_to_shwi (size) == bitsize)
3674 return fold_convert_loc (loc, type, inner);
3678 if (TYPE_PRECISION (bftype) != bitsize
3679 || TYPE_UNSIGNED (bftype) == !unsignedp)
3680 bftype = build_nonstandard_integer_type (bitsize, 0);
3682 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3683 size_int (bitsize), bitsize_int (bitpos));
3686 result = fold_convert_loc (loc, type, result);
3691 /* Optimize a bit-field compare.
3693 There are two cases: First is a compare against a constant and the
3694 second is a comparison of two items where the fields are at the same
3695 bit position relative to the start of a chunk (byte, halfword, word)
3696 large enough to contain it. In these cases we can avoid the shift
3697 implicit in bitfield extractions.
3699 For constants, we emit a compare of the shifted constant with the
3700 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3701 compared. For two fields at the same position, we do the ANDs with the
3702 similar mask and compare the result of the ANDs.
3704 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3705 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3706 are the left and right operands of the comparison, respectively.
3708 If the optimization described above can be done, we return the resulting
3709 tree. Otherwise we return zero. */
3712 optimize_bit_field_compare (location_t loc, enum tree_code code,
3713 tree compare_type, tree lhs, tree rhs)
3715 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3716 tree type = TREE_TYPE (lhs);
3718 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3719 machine_mode lmode, rmode, nmode;
3720 int lunsignedp, runsignedp;
3721 int lvolatilep = 0, rvolatilep = 0;
3722 tree linner, rinner = NULL_TREE;
3726 /* Get all the information about the extractions being done. If the bit size
3727 if the same as the size of the underlying object, we aren't doing an
3728 extraction at all and so can do nothing. We also don't want to
3729 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3730 then will no longer be able to replace it. */
3731 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3732 &lunsignedp, &lvolatilep, false);
3733 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3734 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3739 /* If this is not a constant, we can only do something if bit positions,
3740 sizes, and signedness are the same. */
3741 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3742 &runsignedp, &rvolatilep, false);
3744 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3745 || lunsignedp != runsignedp || offset != 0
3746 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3750 /* See if we can find a mode to refer to this field. We should be able to,
3751 but fail if we can't. */
3752 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3753 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3754 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3755 TYPE_ALIGN (TREE_TYPE (rinner))),
3757 if (nmode == VOIDmode)
3760 /* Set signed and unsigned types of the precision of this mode for the
3762 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3764 /* Compute the bit position and size for the new reference and our offset
3765 within it. If the new reference is the same size as the original, we
3766 won't optimize anything, so return zero. */
3767 nbitsize = GET_MODE_BITSIZE (nmode);
3768 nbitpos = lbitpos & ~ (nbitsize - 1);
3770 if (nbitsize == lbitsize)
3773 if (BYTES_BIG_ENDIAN)
3774 lbitpos = nbitsize - lbitsize - lbitpos;
3776 /* Make the mask to be used against the extracted field. */
3777 mask = build_int_cst_type (unsigned_type, -1);
3778 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3779 mask = const_binop (RSHIFT_EXPR, mask,
3780 size_int (nbitsize - lbitsize - lbitpos));
3783 /* If not comparing with constant, just rework the comparison
3785 return fold_build2_loc (loc, code, compare_type,
3786 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3787 make_bit_field_ref (loc, linner,
3792 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3793 make_bit_field_ref (loc, rinner,
3799 /* Otherwise, we are handling the constant case. See if the constant is too
3800 big for the field. Warn and return a tree of for 0 (false) if so. We do
3801 this not only for its own sake, but to avoid having to test for this
3802 error case below. If we didn't, we might generate wrong code.
3804 For unsigned fields, the constant shifted right by the field length should
3805 be all zero. For signed fields, the high-order bits should agree with
3810 if (wi::lrshift (rhs, lbitsize) != 0)
3812 warning (0, "comparison is always %d due to width of bit-field",
3814 return constant_boolean_node (code == NE_EXPR, compare_type);
3819 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3820 if (tem != 0 && tem != -1)
3822 warning (0, "comparison is always %d due to width of bit-field",
3824 return constant_boolean_node (code == NE_EXPR, compare_type);
3828 /* Single-bit compares should always be against zero. */
3829 if (lbitsize == 1 && ! integer_zerop (rhs))
3831 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3832 rhs = build_int_cst (type, 0);
3835 /* Make a new bitfield reference, shift the constant over the
3836 appropriate number of bits and mask it with the computed mask
3837 (in case this was a signed field). If we changed it, make a new one. */
3838 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3840 rhs = const_binop (BIT_AND_EXPR,
3841 const_binop (LSHIFT_EXPR,
3842 fold_convert_loc (loc, unsigned_type, rhs),
3843 size_int (lbitpos)),
3846 lhs = build2_loc (loc, code, compare_type,
3847 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3851 /* Subroutine for fold_truth_andor_1: decode a field reference.
3853 If EXP is a comparison reference, we return the innermost reference.
3855 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3856 set to the starting bit number.
3858 If the innermost field can be completely contained in a mode-sized
3859 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3861 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3862 otherwise it is not changed.
3864 *PUNSIGNEDP is set to the signedness of the field.
3866 *PMASK is set to the mask used. This is either contained in a
3867 BIT_AND_EXPR or derived from the width of the field.
3869 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3871 Return 0 if this is not a component reference or is one that we can't
3872 do anything with. */
3875 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3876 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3877 int *punsignedp, int *pvolatilep,
3878 tree *pmask, tree *pand_mask)
3880 tree outer_type = 0;
3882 tree mask, inner, offset;
3884 unsigned int precision;
3886 /* All the optimizations using this function assume integer fields.
3887 There are problems with FP fields since the type_for_size call
3888 below can fail for, e.g., XFmode. */
3889 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3892 /* We are interested in the bare arrangement of bits, so strip everything
3893 that doesn't affect the machine mode. However, record the type of the
3894 outermost expression if it may matter below. */
3895 if (CONVERT_EXPR_P (exp)
3896 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3897 outer_type = TREE_TYPE (exp);
3900 if (TREE_CODE (exp) == BIT_AND_EXPR)
3902 and_mask = TREE_OPERAND (exp, 1);
3903 exp = TREE_OPERAND (exp, 0);
3904 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3905 if (TREE_CODE (and_mask) != INTEGER_CST)
3909 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3910 punsignedp, pvolatilep, false);
3911 if ((inner == exp && and_mask == 0)
3912 || *pbitsize < 0 || offset != 0
3913 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3916 /* If the number of bits in the reference is the same as the bitsize of
3917 the outer type, then the outer type gives the signedness. Otherwise
3918 (in case of a small bitfield) the signedness is unchanged. */
3919 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3920 *punsignedp = TYPE_UNSIGNED (outer_type);
3922 /* Compute the mask to access the bitfield. */
3923 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3924 precision = TYPE_PRECISION (unsigned_type);
3926 mask = build_int_cst_type (unsigned_type, -1);
3928 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3929 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3931 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3933 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3934 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3937 *pand_mask = and_mask;
3941 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3942 bit positions and MASK is SIGNED. */
3945 all_ones_mask_p (const_tree mask, unsigned int size)
3947 tree type = TREE_TYPE (mask);
3948 unsigned int precision = TYPE_PRECISION (type);
3950 /* If this function returns true when the type of the mask is
3951 UNSIGNED, then there will be errors. In particular see
3952 gcc.c-torture/execute/990326-1.c. There does not appear to be
3953 any documentation paper trail as to why this is so. But the pre
3954 wide-int worked with that restriction and it has been preserved
3956 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3959 return wi::mask (size, false, precision) == mask;
3962 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3963 represents the sign bit of EXP's type. If EXP represents a sign
3964 or zero extension, also test VAL against the unextended type.
3965 The return value is the (sub)expression whose sign bit is VAL,
3966 or NULL_TREE otherwise. */
3969 sign_bit_p (tree exp, const_tree val)
3974 /* Tree EXP must have an integral type. */
3975 t = TREE_TYPE (exp);
3976 if (! INTEGRAL_TYPE_P (t))
3979 /* Tree VAL must be an integer constant. */
3980 if (TREE_CODE (val) != INTEGER_CST
3981 || TREE_OVERFLOW (val))
3984 width = TYPE_PRECISION (t);
3985 if (wi::only_sign_bit_p (val, width))
3988 /* Handle extension from a narrower type. */
3989 if (TREE_CODE (exp) == NOP_EXPR
3990 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3991 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3996 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3997 to be evaluated unconditionally. */
4000 simple_operand_p (const_tree exp)
4002 /* Strip any conversions that don't change the machine mode. */
4005 return (CONSTANT_CLASS_P (exp)
4006 || TREE_CODE (exp) == SSA_NAME
4008 && ! TREE_ADDRESSABLE (exp)
4009 && ! TREE_THIS_VOLATILE (exp)
4010 && ! DECL_NONLOCAL (exp)
4011 /* Don't regard global variables as simple. They may be
4012 allocated in ways unknown to the compiler (shared memory,
4013 #pragma weak, etc). */
4014 && ! TREE_PUBLIC (exp)
4015 && ! DECL_EXTERNAL (exp)
4016 /* Weakrefs are not safe to be read, since they can be NULL.
4017 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4018 have DECL_WEAK flag set. */
4019 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4020 /* Loading a static variable is unduly expensive, but global
4021 registers aren't expensive. */
4022 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4025 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4026 to be evaluated unconditionally.
4027 I addition to simple_operand_p, we assume that comparisons, conversions,
4028 and logic-not operations are simple, if their operands are simple, too. */
4031 simple_operand_p_2 (tree exp)
4033 enum tree_code code;
4035 if (TREE_SIDE_EFFECTS (exp)
4036 || tree_could_trap_p (exp))
4039 while (CONVERT_EXPR_P (exp))
4040 exp = TREE_OPERAND (exp, 0);
4042 code = TREE_CODE (exp);
4044 if (TREE_CODE_CLASS (code) == tcc_comparison)
4045 return (simple_operand_p (TREE_OPERAND (exp, 0))
4046 && simple_operand_p (TREE_OPERAND (exp, 1)));
4048 if (code == TRUTH_NOT_EXPR)
4049 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4051 return simple_operand_p (exp);
4055 /* The following functions are subroutines to fold_range_test and allow it to
4056 try to change a logical combination of comparisons into a range test.
4059 X == 2 || X == 3 || X == 4 || X == 5
4063 (unsigned) (X - 2) <= 3
4065 We describe each set of comparisons as being either inside or outside
4066 a range, using a variable named like IN_P, and then describe the
4067 range with a lower and upper bound. If one of the bounds is omitted,
4068 it represents either the highest or lowest value of the type.
4070 In the comments below, we represent a range by two numbers in brackets
4071 preceded by a "+" to designate being inside that range, or a "-" to
4072 designate being outside that range, so the condition can be inverted by
4073 flipping the prefix. An omitted bound is represented by a "-". For
4074 example, "- [-, 10]" means being outside the range starting at the lowest
4075 possible value and ending at 10, in other words, being greater than 10.
4076 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4079 We set up things so that the missing bounds are handled in a consistent
4080 manner so neither a missing bound nor "true" and "false" need to be
4081 handled using a special case. */
4083 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4084 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4085 and UPPER1_P are nonzero if the respective argument is an upper bound
4086 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4087 must be specified for a comparison. ARG1 will be converted to ARG0's
4088 type if both are specified. */
4091 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4092 tree arg1, int upper1_p)
4098 /* If neither arg represents infinity, do the normal operation.
4099 Else, if not a comparison, return infinity. Else handle the special
4100 comparison rules. Note that most of the cases below won't occur, but
4101 are handled for consistency. */
4103 if (arg0 != 0 && arg1 != 0)
4105 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4106 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4108 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4111 if (TREE_CODE_CLASS (code) != tcc_comparison)
4114 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4115 for neither. In real maths, we cannot assume open ended ranges are
4116 the same. But, this is computer arithmetic, where numbers are finite.
4117 We can therefore make the transformation of any unbounded range with
4118 the value Z, Z being greater than any representable number. This permits
4119 us to treat unbounded ranges as equal. */
4120 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4121 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4125 result = sgn0 == sgn1;
4128 result = sgn0 != sgn1;
4131 result = sgn0 < sgn1;
4134 result = sgn0 <= sgn1;
4137 result = sgn0 > sgn1;
4140 result = sgn0 >= sgn1;
4146 return constant_boolean_node (result, type);
4149 /* Helper routine for make_range. Perform one step for it, return
4150 new expression if the loop should continue or NULL_TREE if it should
4154 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4155 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4156 bool *strict_overflow_p)
4158 tree arg0_type = TREE_TYPE (arg0);
4159 tree n_low, n_high, low = *p_low, high = *p_high;
4160 int in_p = *p_in_p, n_in_p;
4164 case TRUTH_NOT_EXPR:
4165 /* We can only do something if the range is testing for zero. */
4166 if (low == NULL_TREE || high == NULL_TREE
4167 || ! integer_zerop (low) || ! integer_zerop (high))
4172 case EQ_EXPR: case NE_EXPR:
4173 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4174 /* We can only do something if the range is testing for zero
4175 and if the second operand is an integer constant. Note that
4176 saying something is "in" the range we make is done by
4177 complementing IN_P since it will set in the initial case of
4178 being not equal to zero; "out" is leaving it alone. */
4179 if (low == NULL_TREE || high == NULL_TREE
4180 || ! integer_zerop (low) || ! integer_zerop (high)
4181 || TREE_CODE (arg1) != INTEGER_CST)
4186 case NE_EXPR: /* - [c, c] */
4189 case EQ_EXPR: /* + [c, c] */
4190 in_p = ! in_p, low = high = arg1;
4192 case GT_EXPR: /* - [-, c] */
4193 low = 0, high = arg1;
4195 case GE_EXPR: /* + [c, -] */
4196 in_p = ! in_p, low = arg1, high = 0;
4198 case LT_EXPR: /* - [c, -] */
4199 low = arg1, high = 0;
4201 case LE_EXPR: /* + [-, c] */
4202 in_p = ! in_p, low = 0, high = arg1;
4208 /* If this is an unsigned comparison, we also know that EXP is
4209 greater than or equal to zero. We base the range tests we make
4210 on that fact, so we record it here so we can parse existing
4211 range tests. We test arg0_type since often the return type
4212 of, e.g. EQ_EXPR, is boolean. */
4213 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4215 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4217 build_int_cst (arg0_type, 0),
4221 in_p = n_in_p, low = n_low, high = n_high;
4223 /* If the high bound is missing, but we have a nonzero low
4224 bound, reverse the range so it goes from zero to the low bound
4226 if (high == 0 && low && ! integer_zerop (low))
4229 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4230 build_int_cst (TREE_TYPE (low), 1), 0);
4231 low = build_int_cst (arg0_type, 0);
4241 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4242 low and high are non-NULL, then normalize will DTRT. */
4243 if (!TYPE_UNSIGNED (arg0_type)
4244 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4246 if (low == NULL_TREE)
4247 low = TYPE_MIN_VALUE (arg0_type);
4248 if (high == NULL_TREE)
4249 high = TYPE_MAX_VALUE (arg0_type);
4252 /* (-x) IN [a,b] -> x in [-b, -a] */
4253 n_low = range_binop (MINUS_EXPR, exp_type,
4254 build_int_cst (exp_type, 0),
4256 n_high = range_binop (MINUS_EXPR, exp_type,
4257 build_int_cst (exp_type, 0),
4259 if (n_high != 0 && TREE_OVERFLOW (n_high))
4265 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4266 build_int_cst (exp_type, 1));
4270 if (TREE_CODE (arg1) != INTEGER_CST)
4273 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4274 move a constant to the other side. */
4275 if (!TYPE_UNSIGNED (arg0_type)
4276 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4279 /* If EXP is signed, any overflow in the computation is undefined,
4280 so we don't worry about it so long as our computations on
4281 the bounds don't overflow. For unsigned, overflow is defined
4282 and this is exactly the right thing. */
4283 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4284 arg0_type, low, 0, arg1, 0);
4285 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4286 arg0_type, high, 1, arg1, 0);
4287 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4288 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4291 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4292 *strict_overflow_p = true;
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 build_int_cst (TREE_TYPE (n_high), 1), 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 build_int_cst (TREE_TYPE (n_low), 1), 0);
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4315 low = n_low, high = n_high;
4323 case NON_LVALUE_EXPR:
4324 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4327 if (! INTEGRAL_TYPE_P (arg0_type)
4328 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4329 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4332 n_low = low, n_high = high;
4335 n_low = fold_convert_loc (loc, arg0_type, n_low);
4338 n_high = fold_convert_loc (loc, arg0_type, n_high);
4340 /* If we're converting arg0 from an unsigned type, to exp,
4341 a signed type, we will be doing the comparison as unsigned.
4342 The tests above have already verified that LOW and HIGH
4345 So we have to ensure that we will handle large unsigned
4346 values the same way that the current signed bounds treat
4349 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4353 /* For fixed-point modes, we need to pass the saturating flag
4354 as the 2nd parameter. */
4355 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4357 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4358 TYPE_SATURATING (arg0_type));
4361 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4363 /* A range without an upper bound is, naturally, unbounded.
4364 Since convert would have cropped a very large value, use
4365 the max value for the destination type. */
4367 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4368 : TYPE_MAX_VALUE (arg0_type);
4370 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4371 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4372 fold_convert_loc (loc, arg0_type,
4374 build_int_cst (arg0_type, 1));
4376 /* If the low bound is specified, "and" the range with the
4377 range for which the original unsigned value will be
4381 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4382 1, fold_convert_loc (loc, arg0_type,
4387 in_p = (n_in_p == in_p);
4391 /* Otherwise, "or" the range with the range of the input
4392 that will be interpreted as negative. */
4393 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4394 1, fold_convert_loc (loc, arg0_type,
4399 in_p = (in_p != n_in_p);
4413 /* Given EXP, a logical expression, set the range it is testing into
4414 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4415 actually being tested. *PLOW and *PHIGH will be made of the same
4416 type as the returned expression. If EXP is not a comparison, we
4417 will most likely not be returning a useful value and range. Set
4418 *STRICT_OVERFLOW_P to true if the return value is only valid
4419 because signed overflow is undefined; otherwise, do not change
4420 *STRICT_OVERFLOW_P. */
4423 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4424 bool *strict_overflow_p)
4426 enum tree_code code;
4427 tree arg0, arg1 = NULL_TREE;
4428 tree exp_type, nexp;
4431 location_t loc = EXPR_LOCATION (exp);
4433 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4434 and see if we can refine the range. Some of the cases below may not
4435 happen, but it doesn't seem worth worrying about this. We "continue"
4436 the outer loop when we've changed something; otherwise we "break"
4437 the switch, which will "break" the while. */
4440 low = high = build_int_cst (TREE_TYPE (exp), 0);
4444 code = TREE_CODE (exp);
4445 exp_type = TREE_TYPE (exp);
4448 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4450 if (TREE_OPERAND_LENGTH (exp) > 0)
4451 arg0 = TREE_OPERAND (exp, 0);
4452 if (TREE_CODE_CLASS (code) == tcc_binary
4453 || TREE_CODE_CLASS (code) == tcc_comparison
4454 || (TREE_CODE_CLASS (code) == tcc_expression
4455 && TREE_OPERAND_LENGTH (exp) > 1))
4456 arg1 = TREE_OPERAND (exp, 1);
4458 if (arg0 == NULL_TREE)
4461 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4462 &high, &in_p, strict_overflow_p);
4463 if (nexp == NULL_TREE)
4468 /* If EXP is a constant, we can evaluate whether this is true or false. */
4469 if (TREE_CODE (exp) == INTEGER_CST)
4471 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4473 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4479 *pin_p = in_p, *plow = low, *phigh = high;
4483 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4484 type, TYPE, return an expression to test if EXP is in (or out of, depending
4485 on IN_P) the range. Return 0 if the test couldn't be created. */
4488 build_range_check (location_t loc, tree type, tree exp, int in_p,
4489 tree low, tree high)
4491 tree etype = TREE_TYPE (exp), value;
4493 /* Disable this optimization for function pointer expressions
4494 on targets that require function pointer canonicalization. */
4495 if (targetm.have_canonicalize_funcptr_for_compare ()
4496 && TREE_CODE (etype) == POINTER_TYPE
4497 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4502 value = build_range_check (loc, type, exp, 1, low, high);
4504 return invert_truthvalue_loc (loc, value);
4509 if (low == 0 && high == 0)
4510 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4513 return fold_build2_loc (loc, LE_EXPR, type, exp,
4514 fold_convert_loc (loc, etype, high));
4517 return fold_build2_loc (loc, GE_EXPR, type, exp,
4518 fold_convert_loc (loc, etype, low));
4520 if (operand_equal_p (low, high, 0))
4521 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4522 fold_convert_loc (loc, etype, low));
4524 if (integer_zerop (low))
4526 if (! TYPE_UNSIGNED (etype))
4528 etype = unsigned_type_for (etype);
4529 high = fold_convert_loc (loc, etype, high);
4530 exp = fold_convert_loc (loc, etype, exp);
4532 return build_range_check (loc, type, exp, 1, 0, high);
4535 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4536 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4538 int prec = TYPE_PRECISION (etype);
4540 if (wi::mask (prec - 1, false, prec) == high)
4542 if (TYPE_UNSIGNED (etype))
4544 tree signed_etype = signed_type_for (etype);
4545 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4547 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4549 etype = signed_etype;
4550 exp = fold_convert_loc (loc, etype, exp);
4552 return fold_build2_loc (loc, GT_EXPR, type, exp,
4553 build_int_cst (etype, 0));
4557 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4558 This requires wrap-around arithmetics for the type of the expression.
4559 First make sure that arithmetics in this type is valid, then make sure
4560 that it wraps around. */
4561 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4562 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4563 TYPE_UNSIGNED (etype));
4565 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4567 tree utype, minv, maxv;
4569 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4570 for the type in question, as we rely on this here. */
4571 utype = unsigned_type_for (etype);
4572 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4573 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4574 build_int_cst (TREE_TYPE (maxv), 1), 1);
4575 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4577 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4584 high = fold_convert_loc (loc, etype, high);
4585 low = fold_convert_loc (loc, etype, low);
4586 exp = fold_convert_loc (loc, etype, exp);
4588 value = const_binop (MINUS_EXPR, high, low);
4591 if (POINTER_TYPE_P (etype))
4593 if (value != 0 && !TREE_OVERFLOW (value))
4595 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4596 return build_range_check (loc, type,
4597 fold_build_pointer_plus_loc (loc, exp, low),
4598 1, build_int_cst (etype, 0), value);
4603 if (value != 0 && !TREE_OVERFLOW (value))
4604 return build_range_check (loc, type,
4605 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4606 1, build_int_cst (etype, 0), value);
4611 /* Return the predecessor of VAL in its type, handling the infinite case. */
4614 range_predecessor (tree val)
4616 tree type = TREE_TYPE (val);
4618 if (INTEGRAL_TYPE_P (type)
4619 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4622 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4623 build_int_cst (TREE_TYPE (val), 1), 0);
4626 /* Return the successor of VAL in its type, handling the infinite case. */
4629 range_successor (tree val)
4631 tree type = TREE_TYPE (val);
4633 if (INTEGRAL_TYPE_P (type)
4634 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4637 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4638 build_int_cst (TREE_TYPE (val), 1), 0);
4641 /* Given two ranges, see if we can merge them into one. Return 1 if we
4642 can, 0 if we can't. Set the output range into the specified parameters. */
4645 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4646 tree high0, int in1_p, tree low1, tree high1)
4654 int lowequal = ((low0 == 0 && low1 == 0)
4655 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4656 low0, 0, low1, 0)));
4657 int highequal = ((high0 == 0 && high1 == 0)
4658 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4659 high0, 1, high1, 1)));
4661 /* Make range 0 be the range that starts first, or ends last if they
4662 start at the same value. Swap them if it isn't. */
4663 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4666 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4667 high1, 1, high0, 1))))
4669 temp = in0_p, in0_p = in1_p, in1_p = temp;
4670 tem = low0, low0 = low1, low1 = tem;
4671 tem = high0, high0 = high1, high1 = tem;
4674 /* Now flag two cases, whether the ranges are disjoint or whether the
4675 second range is totally subsumed in the first. Note that the tests
4676 below are simplified by the ones above. */
4677 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4678 high0, 1, low1, 0));
4679 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4680 high1, 1, high0, 1));
4682 /* We now have four cases, depending on whether we are including or
4683 excluding the two ranges. */
4686 /* If they don't overlap, the result is false. If the second range
4687 is a subset it is the result. Otherwise, the range is from the start
4688 of the second to the end of the first. */
4690 in_p = 0, low = high = 0;
4692 in_p = 1, low = low1, high = high1;
4694 in_p = 1, low = low1, high = high0;
4697 else if (in0_p && ! in1_p)
4699 /* If they don't overlap, the result is the first range. If they are
4700 equal, the result is false. If the second range is a subset of the
4701 first, and the ranges begin at the same place, we go from just after
4702 the end of the second range to the end of the first. If the second
4703 range is not a subset of the first, or if it is a subset and both
4704 ranges end at the same place, the range starts at the start of the
4705 first range and ends just before the second range.
4706 Otherwise, we can't describe this as a single range. */
4708 in_p = 1, low = low0, high = high0;
4709 else if (lowequal && highequal)
4710 in_p = 0, low = high = 0;
4711 else if (subset && lowequal)
4713 low = range_successor (high1);
4718 /* We are in the weird situation where high0 > high1 but
4719 high1 has no successor. Punt. */
4723 else if (! subset || highequal)
4726 high = range_predecessor (low1);
4730 /* low0 < low1 but low1 has no predecessor. Punt. */
4738 else if (! in0_p && in1_p)
4740 /* If they don't overlap, the result is the second range. If the second
4741 is a subset of the first, the result is false. Otherwise,
4742 the range starts just after the first range and ends at the
4743 end of the second. */
4745 in_p = 1, low = low1, high = high1;
4746 else if (subset || highequal)
4747 in_p = 0, low = high = 0;
4750 low = range_successor (high0);
4755 /* high1 > high0 but high0 has no successor. Punt. */
4763 /* The case where we are excluding both ranges. Here the complex case
4764 is if they don't overlap. In that case, the only time we have a
4765 range is if they are adjacent. If the second is a subset of the
4766 first, the result is the first. Otherwise, the range to exclude
4767 starts at the beginning of the first range and ends at the end of the
4771 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4772 range_successor (high0),
4774 in_p = 0, low = low0, high = high1;
4777 /* Canonicalize - [min, x] into - [-, x]. */
4778 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4779 switch (TREE_CODE (TREE_TYPE (low0)))
4782 if (TYPE_PRECISION (TREE_TYPE (low0))
4783 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4787 if (tree_int_cst_equal (low0,
4788 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4792 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4793 && integer_zerop (low0))
4800 /* Canonicalize - [x, max] into - [x, -]. */
4801 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4802 switch (TREE_CODE (TREE_TYPE (high1)))
4805 if (TYPE_PRECISION (TREE_TYPE (high1))
4806 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4810 if (tree_int_cst_equal (high1,
4811 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4815 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4816 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4818 build_int_cst (TREE_TYPE (high1), 1),
4826 /* The ranges might be also adjacent between the maximum and
4827 minimum values of the given type. For
4828 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4829 return + [x + 1, y - 1]. */
4830 if (low0 == 0 && high1 == 0)
4832 low = range_successor (high0);
4833 high = range_predecessor (low1);
4834 if (low == 0 || high == 0)
4844 in_p = 0, low = low0, high = high0;
4846 in_p = 0, low = low0, high = high1;
4849 *pin_p = in_p, *plow = low, *phigh = high;
4854 /* Subroutine of fold, looking inside expressions of the form
4855 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4856 of the COND_EXPR. This function is being used also to optimize
4857 A op B ? C : A, by reversing the comparison first.
4859 Return a folded expression whose code is not a COND_EXPR
4860 anymore, or NULL_TREE if no folding opportunity is found. */
4863 fold_cond_expr_with_comparison (location_t loc, tree type,
4864 tree arg0, tree arg1, tree arg2)
4866 enum tree_code comp_code = TREE_CODE (arg0);
4867 tree arg00 = TREE_OPERAND (arg0, 0);
4868 tree arg01 = TREE_OPERAND (arg0, 1);
4869 tree arg1_type = TREE_TYPE (arg1);
4875 /* If we have A op 0 ? A : -A, consider applying the following
4878 A == 0? A : -A same as -A
4879 A != 0? A : -A same as A
4880 A >= 0? A : -A same as abs (A)
4881 A > 0? A : -A same as abs (A)
4882 A <= 0? A : -A same as -abs (A)
4883 A < 0? A : -A same as -abs (A)
4885 None of these transformations work for modes with signed
4886 zeros. If A is +/-0, the first two transformations will
4887 change the sign of the result (from +0 to -0, or vice
4888 versa). The last four will fix the sign of the result,
4889 even though the original expressions could be positive or
4890 negative, depending on the sign of A.
4892 Note that all these transformations are correct if A is
4893 NaN, since the two alternatives (A and -A) are also NaNs. */
4894 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4895 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4896 ? real_zerop (arg01)
4897 : integer_zerop (arg01))
4898 && ((TREE_CODE (arg2) == NEGATE_EXPR
4899 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4900 /* In the case that A is of the form X-Y, '-A' (arg2) may
4901 have already been folded to Y-X, check for that. */
4902 || (TREE_CODE (arg1) == MINUS_EXPR
4903 && TREE_CODE (arg2) == MINUS_EXPR
4904 && operand_equal_p (TREE_OPERAND (arg1, 0),
4905 TREE_OPERAND (arg2, 1), 0)
4906 && operand_equal_p (TREE_OPERAND (arg1, 1),
4907 TREE_OPERAND (arg2, 0), 0))))
4912 tem = fold_convert_loc (loc, arg1_type, arg1);
4913 return pedantic_non_lvalue_loc (loc,
4914 fold_convert_loc (loc, type,
4915 negate_expr (tem)));
4918 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4921 if (flag_trapping_math)
4926 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4927 arg1 = fold_convert_loc (loc, signed_type_for
4928 (TREE_TYPE (arg1)), arg1);
4929 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4930 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4933 if (flag_trapping_math)
4937 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4938 arg1 = fold_convert_loc (loc, signed_type_for
4939 (TREE_TYPE (arg1)), arg1);
4940 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4941 return negate_expr (fold_convert_loc (loc, type, tem));
4943 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4947 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4948 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4949 both transformations are correct when A is NaN: A != 0
4950 is then true, and A == 0 is false. */
4952 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4953 && integer_zerop (arg01) && integer_zerop (arg2))
4955 if (comp_code == NE_EXPR)
4956 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4957 else if (comp_code == EQ_EXPR)
4958 return build_zero_cst (type);
4961 /* Try some transformations of A op B ? A : B.
4963 A == B? A : B same as B
4964 A != B? A : B same as A
4965 A >= B? A : B same as max (A, B)
4966 A > B? A : B same as max (B, A)
4967 A <= B? A : B same as min (A, B)
4968 A < B? A : B same as min (B, A)
4970 As above, these transformations don't work in the presence
4971 of signed zeros. For example, if A and B are zeros of
4972 opposite sign, the first two transformations will change
4973 the sign of the result. In the last four, the original
4974 expressions give different results for (A=+0, B=-0) and
4975 (A=-0, B=+0), but the transformed expressions do not.
4977 The first two transformations are correct if either A or B
4978 is a NaN. In the first transformation, the condition will
4979 be false, and B will indeed be chosen. In the case of the
4980 second transformation, the condition A != B will be true,
4981 and A will be chosen.
4983 The conversions to max() and min() are not correct if B is
4984 a number and A is not. The conditions in the original
4985 expressions will be false, so all four give B. The min()
4986 and max() versions would give a NaN instead. */
4987 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4988 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4989 /* Avoid these transformations if the COND_EXPR may be used
4990 as an lvalue in the C++ front-end. PR c++/19199. */
4992 || VECTOR_TYPE_P (type)
4993 || (! lang_GNU_CXX ()
4994 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4995 || ! maybe_lvalue_p (arg1)
4996 || ! maybe_lvalue_p (arg2)))
4998 tree comp_op0 = arg00;
4999 tree comp_op1 = arg01;
5000 tree comp_type = TREE_TYPE (comp_op0);
5002 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5003 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5013 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5015 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5020 /* In C++ a ?: expression can be an lvalue, so put the
5021 operand which will be used if they are equal first
5022 so that we can convert this back to the
5023 corresponding COND_EXPR. */
5024 if (!HONOR_NANS (arg1))
5026 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5027 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5028 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5029 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5030 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5031 comp_op1, comp_op0);
5032 return pedantic_non_lvalue_loc (loc,
5033 fold_convert_loc (loc, type, tem));
5040 if (!HONOR_NANS (arg1))
5042 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5043 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5044 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5045 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5046 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5047 comp_op1, comp_op0);
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type, tem));
5053 if (!HONOR_NANS (arg1))
5054 return pedantic_non_lvalue_loc (loc,
5055 fold_convert_loc (loc, type, arg2));
5058 if (!HONOR_NANS (arg1))
5059 return pedantic_non_lvalue_loc (loc,
5060 fold_convert_loc (loc, type, arg1));
5063 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5068 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5069 we might still be able to simplify this. For example,
5070 if C1 is one less or one more than C2, this might have started
5071 out as a MIN or MAX and been transformed by this function.
5072 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5074 if (INTEGRAL_TYPE_P (type)
5075 && TREE_CODE (arg01) == INTEGER_CST
5076 && TREE_CODE (arg2) == INTEGER_CST)
5080 if (TREE_CODE (arg1) == INTEGER_CST)
5082 /* We can replace A with C1 in this case. */
5083 arg1 = fold_convert_loc (loc, type, arg01);
5084 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5087 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5088 MIN_EXPR, to preserve the signedness of the comparison. */
5089 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5091 && operand_equal_p (arg01,
5092 const_binop (PLUS_EXPR, arg2,
5093 build_int_cst (type, 1)),
5096 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5097 fold_convert_loc (loc, TREE_TYPE (arg00),
5099 return pedantic_non_lvalue_loc (loc,
5100 fold_convert_loc (loc, type, tem));
5105 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5107 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5109 && operand_equal_p (arg01,
5110 const_binop (MINUS_EXPR, arg2,
5111 build_int_cst (type, 1)),
5114 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5115 fold_convert_loc (loc, TREE_TYPE (arg00),
5117 return pedantic_non_lvalue_loc (loc,
5118 fold_convert_loc (loc, type, tem));
5123 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5124 MAX_EXPR, to preserve the signedness of the comparison. */
5125 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5127 && operand_equal_p (arg01,
5128 const_binop (MINUS_EXPR, arg2,
5129 build_int_cst (type, 1)),
5132 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5133 fold_convert_loc (loc, TREE_TYPE (arg00),
5135 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5140 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5141 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5143 && operand_equal_p (arg01,
5144 const_binop (PLUS_EXPR, arg2,
5145 build_int_cst (type, 1)),
5148 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5149 fold_convert_loc (loc, TREE_TYPE (arg00),
5151 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5165 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5166 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5167 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5171 /* EXP is some logical combination of boolean tests. See if we can
5172 merge it into some range test. Return the new tree if so. */
5175 fold_range_test (location_t loc, enum tree_code code, tree type,
5178 int or_op = (code == TRUTH_ORIF_EXPR
5179 || code == TRUTH_OR_EXPR);
5180 int in0_p, in1_p, in_p;
5181 tree low0, low1, low, high0, high1, high;
5182 bool strict_overflow_p = false;
5184 const char * const warnmsg = G_("assuming signed overflow does not occur "
5185 "when simplifying range test");
5187 if (!INTEGRAL_TYPE_P (type))
5190 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5191 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5193 /* If this is an OR operation, invert both sides; we will invert
5194 again at the end. */
5196 in0_p = ! in0_p, in1_p = ! in1_p;
5198 /* If both expressions are the same, if we can merge the ranges, and we
5199 can build the range test, return it or it inverted. If one of the
5200 ranges is always true or always false, consider it to be the same
5201 expression as the other. */
5202 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5203 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5205 && 0 != (tem = (build_range_check (loc, type,
5207 : rhs != 0 ? rhs : integer_zero_node,
5210 if (strict_overflow_p)
5211 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5212 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5215 /* On machines where the branch cost is expensive, if this is a
5216 short-circuited branch and the underlying object on both sides
5217 is the same, make a non-short-circuit operation. */
5218 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5219 && lhs != 0 && rhs != 0
5220 && (code == TRUTH_ANDIF_EXPR
5221 || code == TRUTH_ORIF_EXPR)
5222 && operand_equal_p (lhs, rhs, 0))
5224 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5225 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5226 which cases we can't do this. */
5227 if (simple_operand_p (lhs))
5228 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5229 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5232 else if (!lang_hooks.decls.global_bindings_p ()
5233 && !CONTAINS_PLACEHOLDER_P (lhs))
5235 tree common = save_expr (lhs);
5237 if (0 != (lhs = build_range_check (loc, type, common,
5238 or_op ? ! in0_p : in0_p,
5240 && (0 != (rhs = build_range_check (loc, type, common,
5241 or_op ? ! in1_p : in1_p,
5244 if (strict_overflow_p)
5245 fold_overflow_warning (warnmsg,
5246 WARN_STRICT_OVERFLOW_COMPARISON);
5247 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5248 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5257 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5258 bit value. Arrange things so the extra bits will be set to zero if and
5259 only if C is signed-extended to its full width. If MASK is nonzero,
5260 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5263 unextend (tree c, int p, int unsignedp, tree mask)
5265 tree type = TREE_TYPE (c);
5266 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5269 if (p == modesize || unsignedp)
5272 /* We work by getting just the sign bit into the low-order bit, then
5273 into the high-order bit, then sign-extend. We then XOR that value
5275 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5277 /* We must use a signed type in order to get an arithmetic right shift.
5278 However, we must also avoid introducing accidental overflows, so that
5279 a subsequent call to integer_zerop will work. Hence we must
5280 do the type conversion here. At this point, the constant is either
5281 zero or one, and the conversion to a signed type can never overflow.
5282 We could get an overflow if this conversion is done anywhere else. */
5283 if (TYPE_UNSIGNED (type))
5284 temp = fold_convert (signed_type_for (type), temp);
5286 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5287 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5289 temp = const_binop (BIT_AND_EXPR, temp,
5290 fold_convert (TREE_TYPE (c), mask));
5291 /* If necessary, convert the type back to match the type of C. */
5292 if (TYPE_UNSIGNED (type))
5293 temp = fold_convert (type, temp);
5295 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5298 /* For an expression that has the form
5302 we can drop one of the inner expressions and simplify to
5306 LOC is the location of the resulting expression. OP is the inner
5307 logical operation; the left-hand side in the examples above, while CMPOP
5308 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5309 removing a condition that guards another, as in
5310 (A != NULL && A->...) || A == NULL
5311 which we must not transform. If RHS_ONLY is true, only eliminate the
5312 right-most operand of the inner logical operation. */
5315 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5318 tree type = TREE_TYPE (cmpop);
5319 enum tree_code code = TREE_CODE (cmpop);
5320 enum tree_code truthop_code = TREE_CODE (op);
5321 tree lhs = TREE_OPERAND (op, 0);
5322 tree rhs = TREE_OPERAND (op, 1);
5323 tree orig_lhs = lhs, orig_rhs = rhs;
5324 enum tree_code rhs_code = TREE_CODE (rhs);
5325 enum tree_code lhs_code = TREE_CODE (lhs);
5326 enum tree_code inv_code;
5328 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5331 if (TREE_CODE_CLASS (code) != tcc_comparison)
5334 if (rhs_code == truthop_code)
5336 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5337 if (newrhs != NULL_TREE)
5340 rhs_code = TREE_CODE (rhs);
5343 if (lhs_code == truthop_code && !rhs_only)
5345 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5346 if (newlhs != NULL_TREE)
5349 lhs_code = TREE_CODE (lhs);
5353 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5354 if (inv_code == rhs_code
5355 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5356 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5358 if (!rhs_only && inv_code == lhs_code
5359 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5360 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5362 if (rhs != orig_rhs || lhs != orig_lhs)
5363 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5368 /* Find ways of folding logical expressions of LHS and RHS:
5369 Try to merge two comparisons to the same innermost item.
5370 Look for range tests like "ch >= '0' && ch <= '9'".
5371 Look for combinations of simple terms on machines with expensive branches
5372 and evaluate the RHS unconditionally.
5374 For example, if we have p->a == 2 && p->b == 4 and we can make an
5375 object large enough to span both A and B, we can do this with a comparison
5376 against the object ANDed with the a mask.
5378 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5379 operations to do this with one comparison.
5381 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5382 function and the one above.
5384 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5385 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5387 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5390 We return the simplified tree or 0 if no optimization is possible. */
5393 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5396 /* If this is the "or" of two comparisons, we can do something if
5397 the comparisons are NE_EXPR. If this is the "and", we can do something
5398 if the comparisons are EQ_EXPR. I.e.,
5399 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5401 WANTED_CODE is this operation code. For single bit fields, we can
5402 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5403 comparison for one-bit fields. */
5405 enum tree_code wanted_code;
5406 enum tree_code lcode, rcode;
5407 tree ll_arg, lr_arg, rl_arg, rr_arg;
5408 tree ll_inner, lr_inner, rl_inner, rr_inner;
5409 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5410 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5411 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5412 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5413 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5414 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5415 machine_mode lnmode, rnmode;
5416 tree ll_mask, lr_mask, rl_mask, rr_mask;
5417 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5418 tree l_const, r_const;
5419 tree lntype, rntype, result;
5420 HOST_WIDE_INT first_bit, end_bit;
5423 /* Start by getting the comparison codes. Fail if anything is volatile.
5424 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5425 it were surrounded with a NE_EXPR. */
5427 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5430 lcode = TREE_CODE (lhs);
5431 rcode = TREE_CODE (rhs);
5433 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5435 lhs = build2 (NE_EXPR, truth_type, lhs,
5436 build_int_cst (TREE_TYPE (lhs), 0));
5440 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5442 rhs = build2 (NE_EXPR, truth_type, rhs,
5443 build_int_cst (TREE_TYPE (rhs), 0));
5447 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5448 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5451 ll_arg = TREE_OPERAND (lhs, 0);
5452 lr_arg = TREE_OPERAND (lhs, 1);
5453 rl_arg = TREE_OPERAND (rhs, 0);
5454 rr_arg = TREE_OPERAND (rhs, 1);
5456 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5457 if (simple_operand_p (ll_arg)
5458 && simple_operand_p (lr_arg))
5460 if (operand_equal_p (ll_arg, rl_arg, 0)
5461 && operand_equal_p (lr_arg, rr_arg, 0))
5463 result = combine_comparisons (loc, code, lcode, rcode,
5464 truth_type, ll_arg, lr_arg);
5468 else if (operand_equal_p (ll_arg, rr_arg, 0)
5469 && operand_equal_p (lr_arg, rl_arg, 0))
5471 result = combine_comparisons (loc, code, lcode,
5472 swap_tree_comparison (rcode),
5473 truth_type, ll_arg, lr_arg);
5479 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5480 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5482 /* If the RHS can be evaluated unconditionally and its operands are
5483 simple, it wins to evaluate the RHS unconditionally on machines
5484 with expensive branches. In this case, this isn't a comparison
5485 that can be merged. */
5487 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5489 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5490 && simple_operand_p (rl_arg)
5491 && simple_operand_p (rr_arg))
5493 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5494 if (code == TRUTH_OR_EXPR
5495 && lcode == NE_EXPR && integer_zerop (lr_arg)
5496 && rcode == NE_EXPR && integer_zerop (rr_arg)
5497 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5498 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5499 return build2_loc (loc, NE_EXPR, truth_type,
5500 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5502 build_int_cst (TREE_TYPE (ll_arg), 0));
5504 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5505 if (code == TRUTH_AND_EXPR
5506 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5507 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5508 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5509 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5510 return build2_loc (loc, EQ_EXPR, truth_type,
5511 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5513 build_int_cst (TREE_TYPE (ll_arg), 0));
5516 /* See if the comparisons can be merged. Then get all the parameters for
5519 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5520 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5524 ll_inner = decode_field_reference (loc, ll_arg,
5525 &ll_bitsize, &ll_bitpos, &ll_mode,
5526 &ll_unsignedp, &volatilep, &ll_mask,
5528 lr_inner = decode_field_reference (loc, lr_arg,
5529 &lr_bitsize, &lr_bitpos, &lr_mode,
5530 &lr_unsignedp, &volatilep, &lr_mask,
5532 rl_inner = decode_field_reference (loc, rl_arg,
5533 &rl_bitsize, &rl_bitpos, &rl_mode,
5534 &rl_unsignedp, &volatilep, &rl_mask,
5536 rr_inner = decode_field_reference (loc, rr_arg,
5537 &rr_bitsize, &rr_bitpos, &rr_mode,
5538 &rr_unsignedp, &volatilep, &rr_mask,
5541 /* It must be true that the inner operation on the lhs of each
5542 comparison must be the same if we are to be able to do anything.
5543 Then see if we have constants. If not, the same must be true for
5545 if (volatilep || ll_inner == 0 || rl_inner == 0
5546 || ! operand_equal_p (ll_inner, rl_inner, 0))
5549 if (TREE_CODE (lr_arg) == INTEGER_CST
5550 && TREE_CODE (rr_arg) == INTEGER_CST)
5551 l_const = lr_arg, r_const = rr_arg;
5552 else if (lr_inner == 0 || rr_inner == 0
5553 || ! operand_equal_p (lr_inner, rr_inner, 0))
5556 l_const = r_const = 0;
5558 /* If either comparison code is not correct for our logical operation,
5559 fail. However, we can convert a one-bit comparison against zero into
5560 the opposite comparison against that bit being set in the field. */
5562 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5563 if (lcode != wanted_code)
5565 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5567 /* Make the left operand unsigned, since we are only interested
5568 in the value of one bit. Otherwise we are doing the wrong
5577 /* This is analogous to the code for l_const above. */
5578 if (rcode != wanted_code)
5580 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5589 /* See if we can find a mode that contains both fields being compared on
5590 the left. If we can't, fail. Otherwise, update all constants and masks
5591 to be relative to a field of that size. */
5592 first_bit = MIN (ll_bitpos, rl_bitpos);
5593 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5594 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5595 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5597 if (lnmode == VOIDmode)
5600 lnbitsize = GET_MODE_BITSIZE (lnmode);
5601 lnbitpos = first_bit & ~ (lnbitsize - 1);
5602 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5603 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5605 if (BYTES_BIG_ENDIAN)
5607 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5608 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5611 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5612 size_int (xll_bitpos));
5613 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5614 size_int (xrl_bitpos));
5618 l_const = fold_convert_loc (loc, lntype, l_const);
5619 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5620 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5621 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5622 fold_build1_loc (loc, BIT_NOT_EXPR,
5625 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5627 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5632 r_const = fold_convert_loc (loc, lntype, r_const);
5633 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5634 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5635 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5636 fold_build1_loc (loc, BIT_NOT_EXPR,
5639 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5641 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5645 /* If the right sides are not constant, do the same for it. Also,
5646 disallow this optimization if a size or signedness mismatch occurs
5647 between the left and right sides. */
5650 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5651 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5652 /* Make sure the two fields on the right
5653 correspond to the left without being swapped. */
5654 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5657 first_bit = MIN (lr_bitpos, rr_bitpos);
5658 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5659 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5660 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5662 if (rnmode == VOIDmode)
5665 rnbitsize = GET_MODE_BITSIZE (rnmode);
5666 rnbitpos = first_bit & ~ (rnbitsize - 1);
5667 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5668 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5670 if (BYTES_BIG_ENDIAN)
5672 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5673 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5676 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5678 size_int (xlr_bitpos));
5679 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5681 size_int (xrr_bitpos));
5683 /* Make a mask that corresponds to both fields being compared.
5684 Do this for both items being compared. If the operands are the
5685 same size and the bits being compared are in the same position
5686 then we can do this by masking both and comparing the masked
5688 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5689 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5690 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5692 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5693 ll_unsignedp || rl_unsignedp);
5694 if (! all_ones_mask_p (ll_mask, lnbitsize))
5695 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5697 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5698 lr_unsignedp || rr_unsignedp);
5699 if (! all_ones_mask_p (lr_mask, rnbitsize))
5700 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5702 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5705 /* There is still another way we can do something: If both pairs of
5706 fields being compared are adjacent, we may be able to make a wider
5707 field containing them both.
5709 Note that we still must mask the lhs/rhs expressions. Furthermore,
5710 the mask must be shifted to account for the shift done by
5711 make_bit_field_ref. */
5712 if ((ll_bitsize + ll_bitpos == rl_bitpos
5713 && lr_bitsize + lr_bitpos == rr_bitpos)
5714 || (ll_bitpos == rl_bitpos + rl_bitsize
5715 && lr_bitpos == rr_bitpos + rr_bitsize))
5719 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5720 ll_bitsize + rl_bitsize,
5721 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5722 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5723 lr_bitsize + rr_bitsize,
5724 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5726 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5727 size_int (MIN (xll_bitpos, xrl_bitpos)));
5728 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5729 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5731 /* Convert to the smaller type before masking out unwanted bits. */
5733 if (lntype != rntype)
5735 if (lnbitsize > rnbitsize)
5737 lhs = fold_convert_loc (loc, rntype, lhs);
5738 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5741 else if (lnbitsize < rnbitsize)
5743 rhs = fold_convert_loc (loc, lntype, rhs);
5744 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5749 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5750 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5752 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5753 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5755 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5761 /* Handle the case of comparisons with constants. If there is something in
5762 common between the masks, those bits of the constants must be the same.
5763 If not, the condition is always false. Test for this to avoid generating
5764 incorrect code below. */
5765 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5766 if (! integer_zerop (result)
5767 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5768 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5770 if (wanted_code == NE_EXPR)
5772 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5773 return constant_boolean_node (true, truth_type);
5777 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5778 return constant_boolean_node (false, truth_type);
5782 /* Construct the expression we will return. First get the component
5783 reference we will make. Unless the mask is all ones the width of
5784 that field, perform the mask operation. Then compare with the
5786 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5787 ll_unsignedp || rl_unsignedp);
5789 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5790 if (! all_ones_mask_p (ll_mask, lnbitsize))
5791 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5793 return build2_loc (loc, wanted_code, truth_type, result,
5794 const_binop (BIT_IOR_EXPR, l_const, r_const));
5797 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5801 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5805 enum tree_code op_code;
5808 int consts_equal, consts_lt;
5811 STRIP_SIGN_NOPS (arg0);
5813 op_code = TREE_CODE (arg0);
5814 minmax_const = TREE_OPERAND (arg0, 1);
5815 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5816 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5817 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5818 inner = TREE_OPERAND (arg0, 0);
5820 /* If something does not permit us to optimize, return the original tree. */
5821 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5822 || TREE_CODE (comp_const) != INTEGER_CST
5823 || TREE_OVERFLOW (comp_const)
5824 || TREE_CODE (minmax_const) != INTEGER_CST
5825 || TREE_OVERFLOW (minmax_const))
5828 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5829 and GT_EXPR, doing the rest with recursive calls using logical
5833 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5836 = optimize_minmax_comparison (loc,
5837 invert_tree_comparison (code, false),
5840 return invert_truthvalue_loc (loc, tem);
5846 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5847 optimize_minmax_comparison
5848 (loc, EQ_EXPR, type, arg0, comp_const),
5849 optimize_minmax_comparison
5850 (loc, GT_EXPR, type, arg0, comp_const));
5853 if (op_code == MAX_EXPR && consts_equal)
5854 /* MAX (X, 0) == 0 -> X <= 0 */
5855 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5857 else if (op_code == MAX_EXPR && consts_lt)
5858 /* MAX (X, 0) == 5 -> X == 5 */
5859 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5861 else if (op_code == MAX_EXPR)
5862 /* MAX (X, 0) == -1 -> false */
5863 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5865 else if (consts_equal)
5866 /* MIN (X, 0) == 0 -> X >= 0 */
5867 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5870 /* MIN (X, 0) == 5 -> false */
5871 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5874 /* MIN (X, 0) == -1 -> X == -1 */
5875 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5878 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5879 /* MAX (X, 0) > 0 -> X > 0
5880 MAX (X, 0) > 5 -> X > 5 */
5881 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5883 else if (op_code == MAX_EXPR)
5884 /* MAX (X, 0) > -1 -> true */
5885 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5887 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5888 /* MIN (X, 0) > 0 -> false
5889 MIN (X, 0) > 5 -> false */
5890 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5893 /* MIN (X, 0) > -1 -> X > -1 */
5894 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5901 /* T is an integer expression that is being multiplied, divided, or taken a
5902 modulus (CODE says which and what kind of divide or modulus) by a
5903 constant C. See if we can eliminate that operation by folding it with
5904 other operations already in T. WIDE_TYPE, if non-null, is a type that
5905 should be used for the computation if wider than our type.
5907 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5908 (X * 2) + (Y * 4). We must, however, be assured that either the original
5909 expression would not overflow or that overflow is undefined for the type
5910 in the language in question.
5912 If we return a non-null expression, it is an equivalent form of the
5913 original computation, but need not be in the original type.
5915 We set *STRICT_OVERFLOW_P to true if the return values depends on
5916 signed overflow being undefined. Otherwise we do not change
5917 *STRICT_OVERFLOW_P. */
5920 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5921 bool *strict_overflow_p)
5923 /* To avoid exponential search depth, refuse to allow recursion past
5924 three levels. Beyond that (1) it's highly unlikely that we'll find
5925 something interesting and (2) we've probably processed it before
5926 when we built the inner expression. */
5935 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5942 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5943 bool *strict_overflow_p)
5945 tree type = TREE_TYPE (t);
5946 enum tree_code tcode = TREE_CODE (t);
5947 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5948 > GET_MODE_SIZE (TYPE_MODE (type)))
5949 ? wide_type : type);
5951 int same_p = tcode == code;
5952 tree op0 = NULL_TREE, op1 = NULL_TREE;
5953 bool sub_strict_overflow_p;
5955 /* Don't deal with constants of zero here; they confuse the code below. */
5956 if (integer_zerop (c))
5959 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5960 op0 = TREE_OPERAND (t, 0);
5962 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5963 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5965 /* Note that we need not handle conditional operations here since fold
5966 already handles those cases. So just do arithmetic here. */
5970 /* For a constant, we can always simplify if we are a multiply
5971 or (for divide and modulus) if it is a multiple of our constant. */
5972 if (code == MULT_EXPR
5973 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5974 return const_binop (code, fold_convert (ctype, t),
5975 fold_convert (ctype, c));
5978 CASE_CONVERT: case NON_LVALUE_EXPR:
5979 /* If op0 is an expression ... */
5980 if ((COMPARISON_CLASS_P (op0)
5981 || UNARY_CLASS_P (op0)
5982 || BINARY_CLASS_P (op0)
5983 || VL_EXP_CLASS_P (op0)
5984 || EXPRESSION_CLASS_P (op0))
5985 /* ... and has wrapping overflow, and its type is smaller
5986 than ctype, then we cannot pass through as widening. */
5987 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5988 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5989 && (TYPE_PRECISION (ctype)
5990 > TYPE_PRECISION (TREE_TYPE (op0))))
5991 /* ... or this is a truncation (t is narrower than op0),
5992 then we cannot pass through this narrowing. */
5993 || (TYPE_PRECISION (type)
5994 < TYPE_PRECISION (TREE_TYPE (op0)))
5995 /* ... or signedness changes for division or modulus,
5996 then we cannot pass through this conversion. */
5997 || (code != MULT_EXPR
5998 && (TYPE_UNSIGNED (ctype)
5999 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6000 /* ... or has undefined overflow while the converted to
6001 type has not, we cannot do the operation in the inner type
6002 as that would introduce undefined overflow. */
6003 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6004 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6005 && !TYPE_OVERFLOW_UNDEFINED (type))))
6008 /* Pass the constant down and see if we can make a simplification. If
6009 we can, replace this expression with the inner simplification for
6010 possible later conversion to our or some other type. */
6011 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6012 && TREE_CODE (t2) == INTEGER_CST
6013 && !TREE_OVERFLOW (t2)
6014 && (0 != (t1 = extract_muldiv (op0, t2, code,
6016 ? ctype : NULL_TREE,
6017 strict_overflow_p))))
6022 /* If widening the type changes it from signed to unsigned, then we
6023 must avoid building ABS_EXPR itself as unsigned. */
6024 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6026 tree cstype = (*signed_type_for) (ctype);
6027 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6030 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6031 return fold_convert (ctype, t1);
6035 /* If the constant is negative, we cannot simplify this. */
6036 if (tree_int_cst_sgn (c) == -1)
6040 /* For division and modulus, type can't be unsigned, as e.g.
6041 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6042 For signed types, even with wrapping overflow, this is fine. */
6043 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6045 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6047 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6050 case MIN_EXPR: case MAX_EXPR:
6051 /* If widening the type changes the signedness, then we can't perform
6052 this optimization as that changes the result. */
6053 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6056 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6057 sub_strict_overflow_p = false;
6058 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6059 &sub_strict_overflow_p)) != 0
6060 && (t2 = extract_muldiv (op1, c, code, wide_type,
6061 &sub_strict_overflow_p)) != 0)
6063 if (tree_int_cst_sgn (c) < 0)
6064 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6065 if (sub_strict_overflow_p)
6066 *strict_overflow_p = true;
6067 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6068 fold_convert (ctype, t2));
6072 case LSHIFT_EXPR: case RSHIFT_EXPR:
6073 /* If the second operand is constant, this is a multiplication
6074 or floor division, by a power of two, so we can treat it that
6075 way unless the multiplier or divisor overflows. Signed
6076 left-shift overflow is implementation-defined rather than
6077 undefined in C90, so do not convert signed left shift into
6079 if (TREE_CODE (op1) == INTEGER_CST
6080 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6081 /* const_binop may not detect overflow correctly,
6082 so check for it explicitly here. */
6083 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6084 && 0 != (t1 = fold_convert (ctype,
6085 const_binop (LSHIFT_EXPR,
6088 && !TREE_OVERFLOW (t1))
6089 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6090 ? MULT_EXPR : FLOOR_DIV_EXPR,
6092 fold_convert (ctype, op0),
6094 c, code, wide_type, strict_overflow_p);
6097 case PLUS_EXPR: case MINUS_EXPR:
6098 /* See if we can eliminate the operation on both sides. If we can, we
6099 can return a new PLUS or MINUS. If we can't, the only remaining
6100 cases where we can do anything are if the second operand is a
6102 sub_strict_overflow_p = false;
6103 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6104 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6105 if (t1 != 0 && t2 != 0
6106 && (code == MULT_EXPR
6107 /* If not multiplication, we can only do this if both operands
6108 are divisible by c. */
6109 || (multiple_of_p (ctype, op0, c)
6110 && multiple_of_p (ctype, op1, c))))
6112 if (sub_strict_overflow_p)
6113 *strict_overflow_p = true;
6114 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6115 fold_convert (ctype, t2));
6118 /* If this was a subtraction, negate OP1 and set it to be an addition.
6119 This simplifies the logic below. */
6120 if (tcode == MINUS_EXPR)
6122 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6123 /* If OP1 was not easily negatable, the constant may be OP0. */
6124 if (TREE_CODE (op0) == INTEGER_CST)
6126 std::swap (op0, op1);
6131 if (TREE_CODE (op1) != INTEGER_CST)
6134 /* If either OP1 or C are negative, this optimization is not safe for
6135 some of the division and remainder types while for others we need
6136 to change the code. */
6137 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6139 if (code == CEIL_DIV_EXPR)
6140 code = FLOOR_DIV_EXPR;
6141 else if (code == FLOOR_DIV_EXPR)
6142 code = CEIL_DIV_EXPR;
6143 else if (code != MULT_EXPR
6144 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6148 /* If it's a multiply or a division/modulus operation of a multiple
6149 of our constant, do the operation and verify it doesn't overflow. */
6150 if (code == MULT_EXPR
6151 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6153 op1 = const_binop (code, fold_convert (ctype, op1),
6154 fold_convert (ctype, c));
6155 /* We allow the constant to overflow with wrapping semantics. */
6157 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6163 /* If we have an unsigned type, we cannot widen the operation since it
6164 will change the result if the original computation overflowed. */
6165 if (TYPE_UNSIGNED (ctype) && ctype != type)
6168 /* If we were able to eliminate our operation from the first side,
6169 apply our operation to the second side and reform the PLUS. */
6170 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6171 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6173 /* The last case is if we are a multiply. In that case, we can
6174 apply the distributive law to commute the multiply and addition
6175 if the multiplication of the constants doesn't overflow
6176 and overflow is defined. With undefined overflow
6177 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6178 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6179 return fold_build2 (tcode, ctype,
6180 fold_build2 (code, ctype,
6181 fold_convert (ctype, op0),
6182 fold_convert (ctype, c)),
6188 /* We have a special case here if we are doing something like
6189 (C * 8) % 4 since we know that's zero. */
6190 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6191 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6192 /* If the multiplication can overflow we cannot optimize this. */
6193 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6194 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6195 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6197 *strict_overflow_p = true;
6198 return omit_one_operand (type, integer_zero_node, op0);
6201 /* ... fall through ... */
6203 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6204 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6205 /* If we can extract our operation from the LHS, do so and return a
6206 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6207 do something only if the second operand is a constant. */
6209 && (t1 = extract_muldiv (op0, c, code, wide_type,
6210 strict_overflow_p)) != 0)
6211 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6212 fold_convert (ctype, op1));
6213 else if (tcode == MULT_EXPR && code == MULT_EXPR
6214 && (t1 = extract_muldiv (op1, c, code, wide_type,
6215 strict_overflow_p)) != 0)
6216 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6217 fold_convert (ctype, t1));
6218 else if (TREE_CODE (op1) != INTEGER_CST)
6221 /* If these are the same operation types, we can associate them
6222 assuming no overflow. */
6225 bool overflow_p = false;
6226 bool overflow_mul_p;
6227 signop sign = TYPE_SIGN (ctype);
6228 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6229 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6231 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6235 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6236 TYPE_SIGN (TREE_TYPE (op1)));
6237 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6238 wide_int_to_tree (ctype, mul));
6242 /* If these operations "cancel" each other, we have the main
6243 optimizations of this pass, which occur when either constant is a
6244 multiple of the other, in which case we replace this with either an
6245 operation or CODE or TCODE.
6247 If we have an unsigned type, we cannot do this since it will change
6248 the result if the original computation overflowed. */
6249 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6250 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6251 || (tcode == MULT_EXPR
6252 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6253 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6254 && code != MULT_EXPR)))
6256 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6258 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6259 *strict_overflow_p = true;
6260 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6261 fold_convert (ctype,
6262 const_binop (TRUNC_DIV_EXPR,
6265 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6267 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6268 *strict_overflow_p = true;
6269 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6270 fold_convert (ctype,
6271 const_binop (TRUNC_DIV_EXPR,
6284 /* Return a node which has the indicated constant VALUE (either 0 or
6285 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6286 and is of the indicated TYPE. */
6289 constant_boolean_node (bool value, tree type)
6291 if (type == integer_type_node)
6292 return value ? integer_one_node : integer_zero_node;
6293 else if (type == boolean_type_node)
6294 return value ? boolean_true_node : boolean_false_node;
6295 else if (TREE_CODE (type) == VECTOR_TYPE)
6296 return build_vector_from_val (type,
6297 build_int_cst (TREE_TYPE (type),
6300 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6304 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6305 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6306 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6307 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6308 COND is the first argument to CODE; otherwise (as in the example
6309 given here), it is the second argument. TYPE is the type of the
6310 original expression. Return NULL_TREE if no simplification is
6314 fold_binary_op_with_conditional_arg (location_t loc,
6315 enum tree_code code,
6316 tree type, tree op0, tree op1,
6317 tree cond, tree arg, int cond_first_p)
6319 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6320 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6321 tree test, true_value, false_value;
6322 tree lhs = NULL_TREE;
6323 tree rhs = NULL_TREE;
6324 enum tree_code cond_code = COND_EXPR;
6326 if (TREE_CODE (cond) == COND_EXPR
6327 || TREE_CODE (cond) == VEC_COND_EXPR)
6329 test = TREE_OPERAND (cond, 0);
6330 true_value = TREE_OPERAND (cond, 1);
6331 false_value = TREE_OPERAND (cond, 2);
6332 /* If this operand throws an expression, then it does not make
6333 sense to try to perform a logical or arithmetic operation
6335 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6337 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6342 tree testtype = TREE_TYPE (cond);
6344 true_value = constant_boolean_node (true, testtype);
6345 false_value = constant_boolean_node (false, testtype);
6348 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6349 cond_code = VEC_COND_EXPR;
6351 /* This transformation is only worthwhile if we don't have to wrap ARG
6352 in a SAVE_EXPR and the operation can be simplified without recursing
6353 on at least one of the branches once its pushed inside the COND_EXPR. */
6354 if (!TREE_CONSTANT (arg)
6355 && (TREE_SIDE_EFFECTS (arg)
6356 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6357 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6360 arg = fold_convert_loc (loc, arg_type, arg);
6363 true_value = fold_convert_loc (loc, cond_type, true_value);
6365 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6367 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6371 false_value = fold_convert_loc (loc, cond_type, false_value);
6373 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6375 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6378 /* Check that we have simplified at least one of the branches. */
6379 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6382 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6386 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6388 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6389 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6390 ADDEND is the same as X.
6392 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6393 and finite. The problematic cases are when X is zero, and its mode
6394 has signed zeros. In the case of rounding towards -infinity,
6395 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6396 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6399 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6401 if (!real_zerop (addend))
6404 /* Don't allow the fold with -fsignaling-nans. */
6405 if (HONOR_SNANS (element_mode (type)))
6408 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6409 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6412 /* In a vector or complex, we would need to check the sign of all zeros. */
6413 if (TREE_CODE (addend) != REAL_CST)
6416 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6417 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6420 /* The mode has signed zeros, and we have to honor their sign.
6421 In this situation, there is only one case we can return true for.
6422 X - 0 is the same as X unless rounding towards -infinity is
6424 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6427 /* Subroutine of fold() that optimizes comparisons of a division by
6428 a nonzero integer constant against an integer constant, i.e.
6431 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6432 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6433 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6435 The function returns the constant folded tree if a simplification
6436 can be made, and NULL_TREE otherwise. */
6439 fold_div_compare (location_t loc,
6440 enum tree_code code, tree type, tree arg0, tree arg1)
6442 tree prod, tmp, hi, lo;
6443 tree arg00 = TREE_OPERAND (arg0, 0);
6444 tree arg01 = TREE_OPERAND (arg0, 1);
6445 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6446 bool neg_overflow = false;
6449 /* We have to do this the hard way to detect unsigned overflow.
6450 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6451 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6452 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6453 neg_overflow = false;
6455 if (sign == UNSIGNED)
6457 tmp = int_const_binop (MINUS_EXPR, arg01,
6458 build_int_cst (TREE_TYPE (arg01), 1));
6461 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6462 val = wi::add (prod, tmp, sign, &overflow);
6463 hi = force_fit_type (TREE_TYPE (arg00), val,
6464 -1, overflow | TREE_OVERFLOW (prod));
6466 else if (tree_int_cst_sgn (arg01) >= 0)
6468 tmp = int_const_binop (MINUS_EXPR, arg01,
6469 build_int_cst (TREE_TYPE (arg01), 1));
6470 switch (tree_int_cst_sgn (arg1))
6473 neg_overflow = true;
6474 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6479 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6484 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6494 /* A negative divisor reverses the relational operators. */
6495 code = swap_tree_comparison (code);
6497 tmp = int_const_binop (PLUS_EXPR, arg01,
6498 build_int_cst (TREE_TYPE (arg01), 1));
6499 switch (tree_int_cst_sgn (arg1))
6502 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6507 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6512 neg_overflow = true;
6513 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6525 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6526 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6527 if (TREE_OVERFLOW (hi))
6528 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6529 if (TREE_OVERFLOW (lo))
6530 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6531 return build_range_check (loc, type, arg00, 1, lo, hi);
6534 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6535 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6536 if (TREE_OVERFLOW (hi))
6537 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6538 if (TREE_OVERFLOW (lo))
6539 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6540 return build_range_check (loc, type, arg00, 0, lo, hi);
6543 if (TREE_OVERFLOW (lo))
6545 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6546 return omit_one_operand_loc (loc, type, tmp, arg00);
6548 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6551 if (TREE_OVERFLOW (hi))
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand_loc (loc, type, tmp, arg00);
6556 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6559 if (TREE_OVERFLOW (hi))
6561 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6562 return omit_one_operand_loc (loc, type, tmp, arg00);
6564 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6567 if (TREE_OVERFLOW (lo))
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand_loc (loc, type, tmp, arg00);
6572 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6582 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6583 equality/inequality test, then return a simplified form of the test
6584 using a sign testing. Otherwise return NULL. TYPE is the desired
6588 fold_single_bit_test_into_sign_test (location_t loc,
6589 enum tree_code code, tree arg0, tree arg1,
6592 /* If this is testing a single bit, we can optimize the test. */
6593 if ((code == NE_EXPR || code == EQ_EXPR)
6594 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6595 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6597 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6598 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6599 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6601 if (arg00 != NULL_TREE
6602 /* This is only a win if casting to a signed type is cheap,
6603 i.e. when arg00's type is not a partial mode. */
6604 && TYPE_PRECISION (TREE_TYPE (arg00))
6605 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6607 tree stype = signed_type_for (TREE_TYPE (arg00));
6608 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6610 fold_convert_loc (loc, stype, arg00),
6611 build_int_cst (stype, 0));
6618 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6619 equality/inequality test, then return a simplified form of
6620 the test using shifts and logical operations. Otherwise return
6621 NULL. TYPE is the desired result type. */
6624 fold_single_bit_test (location_t loc, enum tree_code code,
6625 tree arg0, tree arg1, tree result_type)
6627 /* If this is testing a single bit, we can optimize the test. */
6628 if ((code == NE_EXPR || code == EQ_EXPR)
6629 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6630 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6632 tree inner = TREE_OPERAND (arg0, 0);
6633 tree type = TREE_TYPE (arg0);
6634 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6635 machine_mode operand_mode = TYPE_MODE (type);
6637 tree signed_type, unsigned_type, intermediate_type;
6640 /* First, see if we can fold the single bit test into a sign-bit
6642 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6647 /* Otherwise we have (A & C) != 0 where C is a single bit,
6648 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6649 Similarly for (A & C) == 0. */
6651 /* If INNER is a right shift of a constant and it plus BITNUM does
6652 not overflow, adjust BITNUM and INNER. */
6653 if (TREE_CODE (inner) == RSHIFT_EXPR
6654 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6655 && bitnum < TYPE_PRECISION (type)
6656 && wi::ltu_p (TREE_OPERAND (inner, 1),
6657 TYPE_PRECISION (type) - bitnum))
6659 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6660 inner = TREE_OPERAND (inner, 0);
6663 /* If we are going to be able to omit the AND below, we must do our
6664 operations as unsigned. If we must use the AND, we have a choice.
6665 Normally unsigned is faster, but for some machines signed is. */
6666 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6667 && !flag_syntax_only) ? 0 : 1;
6669 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6670 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6671 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6672 inner = fold_convert_loc (loc, intermediate_type, inner);
6675 inner = build2 (RSHIFT_EXPR, intermediate_type,
6676 inner, size_int (bitnum));
6678 one = build_int_cst (intermediate_type, 1);
6680 if (code == EQ_EXPR)
6681 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6683 /* Put the AND last so it can combine with more things. */
6684 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6686 /* Make sure to return the proper type. */
6687 inner = fold_convert_loc (loc, result_type, inner);
6694 /* Check whether we are allowed to reorder operands arg0 and arg1,
6695 such that the evaluation of arg1 occurs before arg0. */
6698 reorder_operands_p (const_tree arg0, const_tree arg1)
6700 if (! flag_evaluation_order)
6702 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6704 return ! TREE_SIDE_EFFECTS (arg0)
6705 && ! TREE_SIDE_EFFECTS (arg1);
6708 /* Test whether it is preferable two swap two operands, ARG0 and
6709 ARG1, for example because ARG0 is an integer constant and ARG1
6710 isn't. If REORDER is true, only recommend swapping if we can
6711 evaluate the operands in reverse order. */
6714 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6716 if (CONSTANT_CLASS_P (arg1))
6718 if (CONSTANT_CLASS_P (arg0))
6724 if (TREE_CONSTANT (arg1))
6726 if (TREE_CONSTANT (arg0))
6729 if (reorder && flag_evaluation_order
6730 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6733 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6734 for commutative and comparison operators. Ensuring a canonical
6735 form allows the optimizers to find additional redundancies without
6736 having to explicitly check for both orderings. */
6737 if (TREE_CODE (arg0) == SSA_NAME
6738 && TREE_CODE (arg1) == SSA_NAME
6739 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6742 /* Put SSA_NAMEs last. */
6743 if (TREE_CODE (arg1) == SSA_NAME)
6745 if (TREE_CODE (arg0) == SSA_NAME)
6748 /* Put variables last. */
6758 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6759 means A >= Y && A != MAX, but in this case we know that
6760 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6763 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6765 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6767 if (TREE_CODE (bound) == LT_EXPR)
6768 a = TREE_OPERAND (bound, 0);
6769 else if (TREE_CODE (bound) == GT_EXPR)
6770 a = TREE_OPERAND (bound, 1);
6774 typea = TREE_TYPE (a);
6775 if (!INTEGRAL_TYPE_P (typea)
6776 && !POINTER_TYPE_P (typea))
6779 if (TREE_CODE (ineq) == LT_EXPR)
6781 a1 = TREE_OPERAND (ineq, 1);
6782 y = TREE_OPERAND (ineq, 0);
6784 else if (TREE_CODE (ineq) == GT_EXPR)
6786 a1 = TREE_OPERAND (ineq, 0);
6787 y = TREE_OPERAND (ineq, 1);
6792 if (TREE_TYPE (a1) != typea)
6795 if (POINTER_TYPE_P (typea))
6797 /* Convert the pointer types into integer before taking the difference. */
6798 tree ta = fold_convert_loc (loc, ssizetype, a);
6799 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6800 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6803 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6805 if (!diff || !integer_onep (diff))
6808 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6811 /* Fold a sum or difference of at least one multiplication.
6812 Returns the folded tree or NULL if no simplification could be made. */
6815 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6816 tree arg0, tree arg1)
6818 tree arg00, arg01, arg10, arg11;
6819 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6821 /* (A * C) +- (B * C) -> (A+-B) * C.
6822 (A * C) +- A -> A * (C+-1).
6823 We are most concerned about the case where C is a constant,
6824 but other combinations show up during loop reduction. Since
6825 it is not difficult, try all four possibilities. */
6827 if (TREE_CODE (arg0) == MULT_EXPR)
6829 arg00 = TREE_OPERAND (arg0, 0);
6830 arg01 = TREE_OPERAND (arg0, 1);
6832 else if (TREE_CODE (arg0) == INTEGER_CST)
6834 arg00 = build_one_cst (type);
6839 /* We cannot generate constant 1 for fract. */
6840 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6843 arg01 = build_one_cst (type);
6845 if (TREE_CODE (arg1) == MULT_EXPR)
6847 arg10 = TREE_OPERAND (arg1, 0);
6848 arg11 = TREE_OPERAND (arg1, 1);
6850 else if (TREE_CODE (arg1) == INTEGER_CST)
6852 arg10 = build_one_cst (type);
6853 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6854 the purpose of this canonicalization. */
6855 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6856 && negate_expr_p (arg1)
6857 && code == PLUS_EXPR)
6859 arg11 = negate_expr (arg1);
6867 /* We cannot generate constant 1 for fract. */
6868 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6871 arg11 = build_one_cst (type);
6875 if (operand_equal_p (arg01, arg11, 0))
6876 same = arg01, alt0 = arg00, alt1 = arg10;
6877 else if (operand_equal_p (arg00, arg10, 0))
6878 same = arg00, alt0 = arg01, alt1 = arg11;
6879 else if (operand_equal_p (arg00, arg11, 0))
6880 same = arg00, alt0 = arg01, alt1 = arg10;
6881 else if (operand_equal_p (arg01, arg10, 0))
6882 same = arg01, alt0 = arg00, alt1 = arg11;
6884 /* No identical multiplicands; see if we can find a common
6885 power-of-two factor in non-power-of-two multiplies. This
6886 can help in multi-dimensional array access. */
6887 else if (tree_fits_shwi_p (arg01)
6888 && tree_fits_shwi_p (arg11))
6890 HOST_WIDE_INT int01, int11, tmp;
6893 int01 = tree_to_shwi (arg01);
6894 int11 = tree_to_shwi (arg11);
6896 /* Move min of absolute values to int11. */
6897 if (absu_hwi (int01) < absu_hwi (int11))
6899 tmp = int01, int01 = int11, int11 = tmp;
6900 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6907 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6908 /* The remainder should not be a constant, otherwise we
6909 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6910 increased the number of multiplications necessary. */
6911 && TREE_CODE (arg10) != INTEGER_CST)
6913 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6914 build_int_cst (TREE_TYPE (arg00),
6919 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6924 return fold_build2_loc (loc, MULT_EXPR, type,
6925 fold_build2_loc (loc, code, type,
6926 fold_convert_loc (loc, type, alt0),
6927 fold_convert_loc (loc, type, alt1)),
6928 fold_convert_loc (loc, type, same));
6933 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6934 specified by EXPR into the buffer PTR of length LEN bytes.
6935 Return the number of bytes placed in the buffer, or zero
6939 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6941 tree type = TREE_TYPE (expr);
6942 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6943 int byte, offset, word, words;
6944 unsigned char value;
6946 if ((off == -1 && total_bytes > len)
6947 || off >= total_bytes)
6951 words = total_bytes / UNITS_PER_WORD;
6953 for (byte = 0; byte < total_bytes; byte++)
6955 int bitpos = byte * BITS_PER_UNIT;
6956 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6958 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6960 if (total_bytes > UNITS_PER_WORD)
6962 word = byte / UNITS_PER_WORD;
6963 if (WORDS_BIG_ENDIAN)
6964 word = (words - 1) - word;
6965 offset = word * UNITS_PER_WORD;
6966 if (BYTES_BIG_ENDIAN)
6967 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6969 offset += byte % UNITS_PER_WORD;
6972 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6974 && offset - off < len)
6975 ptr[offset - off] = value;
6977 return MIN (len, total_bytes - off);
6981 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6982 specified by EXPR into the buffer PTR of length LEN bytes.
6983 Return the number of bytes placed in the buffer, or zero
6987 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
6989 tree type = TREE_TYPE (expr);
6990 machine_mode mode = TYPE_MODE (type);
6991 int total_bytes = GET_MODE_SIZE (mode);
6992 FIXED_VALUE_TYPE value;
6993 tree i_value, i_type;
6995 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
6998 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7000 if (NULL_TREE == i_type
7001 || TYPE_PRECISION (i_type) != total_bytes)
7004 value = TREE_FIXED_CST (expr);
7005 i_value = double_int_to_tree (i_type, value.data);
7007 return native_encode_int (i_value, ptr, len, off);
7011 /* Subroutine of native_encode_expr. Encode the REAL_CST
7012 specified by EXPR into the buffer PTR of length LEN bytes.
7013 Return the number of bytes placed in the buffer, or zero
7017 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7019 tree type = TREE_TYPE (expr);
7020 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021 int byte, offset, word, words, bitpos;
7022 unsigned char value;
7024 /* There are always 32 bits in each long, no matter the size of
7025 the hosts long. We handle floating point representations with
7029 if ((off == -1 && total_bytes > len)
7030 || off >= total_bytes)
7034 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7036 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7038 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7039 bitpos += BITS_PER_UNIT)
7041 byte = (bitpos / BITS_PER_UNIT) & 3;
7042 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7044 if (UNITS_PER_WORD < 4)
7046 word = byte / UNITS_PER_WORD;
7047 if (WORDS_BIG_ENDIAN)
7048 word = (words - 1) - word;
7049 offset = word * UNITS_PER_WORD;
7050 if (BYTES_BIG_ENDIAN)
7051 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 offset += byte % UNITS_PER_WORD;
7056 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7057 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7059 && offset - off < len)
7060 ptr[offset - off] = value;
7062 return MIN (len, total_bytes - off);
7065 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7066 specified by EXPR into the buffer PTR of length LEN bytes.
7067 Return the number of bytes placed in the buffer, or zero
7071 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7076 part = TREE_REALPART (expr);
7077 rsize = native_encode_expr (part, ptr, len, off);
7081 part = TREE_IMAGPART (expr);
7083 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7084 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7088 return rsize + isize;
7092 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7093 specified by EXPR into the buffer PTR of length LEN bytes.
7094 Return the number of bytes placed in the buffer, or zero
7098 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7105 count = VECTOR_CST_NELTS (expr);
7106 itype = TREE_TYPE (TREE_TYPE (expr));
7107 size = GET_MODE_SIZE (TYPE_MODE (itype));
7108 for (i = 0; i < count; i++)
7115 elem = VECTOR_CST_ELT (expr, i);
7116 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7117 if ((off == -1 && res != size)
7130 /* Subroutine of native_encode_expr. Encode the STRING_CST
7131 specified by EXPR into the buffer PTR of length LEN bytes.
7132 Return the number of bytes placed in the buffer, or zero
7136 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7138 tree type = TREE_TYPE (expr);
7139 HOST_WIDE_INT total_bytes;
7141 if (TREE_CODE (type) != ARRAY_TYPE
7142 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7143 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7144 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7146 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7147 if ((off == -1 && total_bytes > len)
7148 || off >= total_bytes)
7152 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7155 if (off < TREE_STRING_LENGTH (expr))
7157 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7158 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7160 memset (ptr + written, 0,
7161 MIN (total_bytes - written, len - written));
7164 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7165 return MIN (total_bytes - off, len);
7169 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7170 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7171 buffer PTR of length LEN bytes. If OFF is not -1 then start
7172 the encoding at byte offset OFF and encode at most LEN bytes.
7173 Return the number of bytes placed in the buffer, or zero upon failure. */
7176 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7178 /* We don't support starting at negative offset and -1 is special. */
7182 switch (TREE_CODE (expr))
7185 return native_encode_int (expr, ptr, len, off);
7188 return native_encode_real (expr, ptr, len, off);
7191 return native_encode_fixed (expr, ptr, len, off);
7194 return native_encode_complex (expr, ptr, len, off);
7197 return native_encode_vector (expr, ptr, len, off);
7200 return native_encode_string (expr, ptr, len, off);
7208 /* Subroutine of native_interpret_expr. Interpret the contents of
7209 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7210 If the buffer cannot be interpreted, return NULL_TREE. */
7213 native_interpret_int (tree type, const unsigned char *ptr, int len)
7215 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7217 if (total_bytes > len
7218 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7221 wide_int result = wi::from_buffer (ptr, total_bytes);
7223 return wide_int_to_tree (type, result);
7227 /* Subroutine of native_interpret_expr. Interpret the contents of
7228 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7229 If the buffer cannot be interpreted, return NULL_TREE. */
7232 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7234 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7236 FIXED_VALUE_TYPE fixed_value;
7238 if (total_bytes > len
7239 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7242 result = double_int::from_buffer (ptr, total_bytes);
7243 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7245 return build_fixed (type, fixed_value);
7249 /* Subroutine of native_interpret_expr. Interpret the contents of
7250 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7251 If the buffer cannot be interpreted, return NULL_TREE. */
7254 native_interpret_real (tree type, const unsigned char *ptr, int len)
7256 machine_mode mode = TYPE_MODE (type);
7257 int total_bytes = GET_MODE_SIZE (mode);
7258 unsigned char value;
7259 /* There are always 32 bits in each long, no matter the size of
7260 the hosts long. We handle floating point representations with
7265 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7266 if (total_bytes > len || total_bytes > 24)
7268 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7270 memset (tmp, 0, sizeof (tmp));
7271 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7272 bitpos += BITS_PER_UNIT)
7274 /* Both OFFSET and BYTE index within a long;
7275 bitpos indexes the whole float. */
7276 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7277 if (UNITS_PER_WORD < 4)
7279 int word = byte / UNITS_PER_WORD;
7280 if (WORDS_BIG_ENDIAN)
7281 word = (words - 1) - word;
7282 offset = word * UNITS_PER_WORD;
7283 if (BYTES_BIG_ENDIAN)
7284 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7286 offset += byte % UNITS_PER_WORD;
7291 if (BYTES_BIG_ENDIAN)
7293 /* Reverse bytes within each long, or within the entire float
7294 if it's smaller than a long (for HFmode). */
7295 offset = MIN (3, total_bytes - 1) - offset;
7296 gcc_assert (offset >= 0);
7299 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7301 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7304 real_from_target (&r, tmp, mode);
7305 return build_real (type, r);
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7314 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7316 tree etype, rpart, ipart;
7319 etype = TREE_TYPE (type);
7320 size = GET_MODE_SIZE (TYPE_MODE (etype));
7323 rpart = native_interpret_expr (etype, ptr, size);
7326 ipart = native_interpret_expr (etype, ptr+size, size);
7329 return build_complex (type, rpart, ipart);
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7338 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7344 etype = TREE_TYPE (type);
7345 size = GET_MODE_SIZE (TYPE_MODE (etype));
7346 count = TYPE_VECTOR_SUBPARTS (type);
7347 if (size * count > len)
7350 elements = XALLOCAVEC (tree, count);
7351 for (i = count - 1; i >= 0; i--)
7353 elem = native_interpret_expr (etype, ptr+(i*size), size);
7358 return build_vector (type, elements);
7362 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7363 the buffer PTR of length LEN as a constant of type TYPE. For
7364 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7365 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7366 return NULL_TREE. */
7369 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7371 switch (TREE_CODE (type))
7377 case REFERENCE_TYPE:
7378 return native_interpret_int (type, ptr, len);
7381 return native_interpret_real (type, ptr, len);
7383 case FIXED_POINT_TYPE:
7384 return native_interpret_fixed (type, ptr, len);
7387 return native_interpret_complex (type, ptr, len);
7390 return native_interpret_vector (type, ptr, len);
7397 /* Returns true if we can interpret the contents of a native encoding
7401 can_native_interpret_type_p (tree type)
7403 switch (TREE_CODE (type))
7409 case REFERENCE_TYPE:
7410 case FIXED_POINT_TYPE:
7420 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7421 TYPE at compile-time. If we're unable to perform the conversion
7422 return NULL_TREE. */
7425 fold_view_convert_expr (tree type, tree expr)
7427 /* We support up to 512-bit values (for V8DFmode). */
7428 unsigned char buffer[64];
7431 /* Check that the host and target are sane. */
7432 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7435 len = native_encode_expr (expr, buffer, sizeof (buffer));
7439 return native_interpret_expr (type, buffer, len);
7442 /* Build an expression for the address of T. Folds away INDIRECT_REF
7443 to avoid confusing the gimplify process. */
7446 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7448 /* The size of the object is not relevant when talking about its address. */
7449 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7450 t = TREE_OPERAND (t, 0);
7452 if (TREE_CODE (t) == INDIRECT_REF)
7454 t = TREE_OPERAND (t, 0);
7456 if (TREE_TYPE (t) != ptrtype)
7457 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7459 else if (TREE_CODE (t) == MEM_REF
7460 && integer_zerop (TREE_OPERAND (t, 1)))
7461 return TREE_OPERAND (t, 0);
7462 else if (TREE_CODE (t) == MEM_REF
7463 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7464 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7465 TREE_OPERAND (t, 0),
7466 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7467 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7469 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7471 if (TREE_TYPE (t) != ptrtype)
7472 t = fold_convert_loc (loc, ptrtype, t);
7475 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7480 /* Build an expression for the address of T. */
7483 build_fold_addr_expr_loc (location_t loc, tree t)
7485 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7487 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7490 /* Fold a unary expression of code CODE and type TYPE with operand
7491 OP0. Return the folded expression if folding is successful.
7492 Otherwise, return NULL_TREE. */
7495 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7499 enum tree_code_class kind = TREE_CODE_CLASS (code);
7501 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7502 && TREE_CODE_LENGTH (code) == 1);
7507 if (CONVERT_EXPR_CODE_P (code)
7508 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7510 /* Don't use STRIP_NOPS, because signedness of argument type
7512 STRIP_SIGN_NOPS (arg0);
7516 /* Strip any conversions that don't change the mode. This
7517 is safe for every expression, except for a comparison
7518 expression because its signedness is derived from its
7521 Note that this is done as an internal manipulation within
7522 the constant folder, in order to find the simplest
7523 representation of the arguments so that their form can be
7524 studied. In any cases, the appropriate type conversions
7525 should be put back in the tree that will get out of the
7530 if (CONSTANT_CLASS_P (arg0))
7532 tree tem = const_unop (code, type, arg0);
7535 if (TREE_TYPE (tem) != type)
7536 tem = fold_convert_loc (loc, type, tem);
7542 tem = generic_simplify (loc, code, type, op0);
7546 if (TREE_CODE_CLASS (code) == tcc_unary)
7548 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7549 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7550 fold_build1_loc (loc, code, type,
7551 fold_convert_loc (loc, TREE_TYPE (op0),
7552 TREE_OPERAND (arg0, 1))));
7553 else if (TREE_CODE (arg0) == COND_EXPR)
7555 tree arg01 = TREE_OPERAND (arg0, 1);
7556 tree arg02 = TREE_OPERAND (arg0, 2);
7557 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7558 arg01 = fold_build1_loc (loc, code, type,
7559 fold_convert_loc (loc,
7560 TREE_TYPE (op0), arg01));
7561 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7562 arg02 = fold_build1_loc (loc, code, type,
7563 fold_convert_loc (loc,
7564 TREE_TYPE (op0), arg02));
7565 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7568 /* If this was a conversion, and all we did was to move into
7569 inside the COND_EXPR, bring it back out. But leave it if
7570 it is a conversion from integer to integer and the
7571 result precision is no wider than a word since such a
7572 conversion is cheap and may be optimized away by combine,
7573 while it couldn't if it were outside the COND_EXPR. Then return
7574 so we don't get into an infinite recursion loop taking the
7575 conversion out and then back in. */
7577 if ((CONVERT_EXPR_CODE_P (code)
7578 || code == NON_LVALUE_EXPR)
7579 && TREE_CODE (tem) == COND_EXPR
7580 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7581 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7582 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7583 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7584 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7585 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7586 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7588 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7589 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7590 || flag_syntax_only))
7591 tem = build1_loc (loc, code, type,
7593 TREE_TYPE (TREE_OPERAND
7594 (TREE_OPERAND (tem, 1), 0)),
7595 TREE_OPERAND (tem, 0),
7596 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7597 TREE_OPERAND (TREE_OPERAND (tem, 2),
7605 case NON_LVALUE_EXPR:
7606 if (!maybe_lvalue_p (op0))
7607 return fold_convert_loc (loc, type, op0);
7612 case FIX_TRUNC_EXPR:
7613 if (COMPARISON_CLASS_P (op0))
7615 /* If we have (type) (a CMP b) and type is an integral type, return
7616 new expression involving the new type. Canonicalize
7617 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7619 Do not fold the result as that would not simplify further, also
7620 folding again results in recursions. */
7621 if (TREE_CODE (type) == BOOLEAN_TYPE)
7622 return build2_loc (loc, TREE_CODE (op0), type,
7623 TREE_OPERAND (op0, 0),
7624 TREE_OPERAND (op0, 1));
7625 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7626 && TREE_CODE (type) != VECTOR_TYPE)
7627 return build3_loc (loc, COND_EXPR, type, op0,
7628 constant_boolean_node (true, type),
7629 constant_boolean_node (false, type));
7632 /* Handle (T *)&A.B.C for A being of type T and B and C
7633 living at offset zero. This occurs frequently in
7634 C++ upcasting and then accessing the base. */
7635 if (TREE_CODE (op0) == ADDR_EXPR
7636 && POINTER_TYPE_P (type)
7637 && handled_component_p (TREE_OPERAND (op0, 0)))
7639 HOST_WIDE_INT bitsize, bitpos;
7642 int unsignedp, volatilep;
7643 tree base = TREE_OPERAND (op0, 0);
7644 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7645 &mode, &unsignedp, &volatilep, false);
7646 /* If the reference was to a (constant) zero offset, we can use
7647 the address of the base if it has the same base type
7648 as the result type and the pointer type is unqualified. */
7649 if (! offset && bitpos == 0
7650 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7651 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7652 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7653 return fold_convert_loc (loc, type,
7654 build_fold_addr_expr_loc (loc, base));
7657 if (TREE_CODE (op0) == MODIFY_EXPR
7658 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7659 /* Detect assigning a bitfield. */
7660 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7662 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7664 /* Don't leave an assignment inside a conversion
7665 unless assigning a bitfield. */
7666 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7667 /* First do the assignment, then return converted constant. */
7668 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7669 TREE_NO_WARNING (tem) = 1;
7670 TREE_USED (tem) = 1;
7674 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7675 constants (if x has signed type, the sign bit cannot be set
7676 in c). This folds extension into the BIT_AND_EXPR.
7677 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7678 very likely don't have maximal range for their precision and this
7679 transformation effectively doesn't preserve non-maximal ranges. */
7680 if (TREE_CODE (type) == INTEGER_TYPE
7681 && TREE_CODE (op0) == BIT_AND_EXPR
7682 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7684 tree and_expr = op0;
7685 tree and0 = TREE_OPERAND (and_expr, 0);
7686 tree and1 = TREE_OPERAND (and_expr, 1);
7689 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7690 || (TYPE_PRECISION (type)
7691 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7693 else if (TYPE_PRECISION (TREE_TYPE (and1))
7694 <= HOST_BITS_PER_WIDE_INT
7695 && tree_fits_uhwi_p (and1))
7697 unsigned HOST_WIDE_INT cst;
7699 cst = tree_to_uhwi (and1);
7700 cst &= HOST_WIDE_INT_M1U
7701 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7702 change = (cst == 0);
7704 && !flag_syntax_only
7705 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7708 tree uns = unsigned_type_for (TREE_TYPE (and0));
7709 and0 = fold_convert_loc (loc, uns, and0);
7710 and1 = fold_convert_loc (loc, uns, and1);
7715 tem = force_fit_type (type, wi::to_widest (and1), 0,
7716 TREE_OVERFLOW (and1));
7717 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7718 fold_convert_loc (loc, type, and0), tem);
7722 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7723 when one of the new casts will fold away. Conservatively we assume
7724 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7725 if (POINTER_TYPE_P (type)
7726 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7727 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7728 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7729 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7730 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7732 tree arg00 = TREE_OPERAND (arg0, 0);
7733 tree arg01 = TREE_OPERAND (arg0, 1);
7735 return fold_build_pointer_plus_loc
7736 (loc, fold_convert_loc (loc, type, arg00), arg01);
7739 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7740 of the same precision, and X is an integer type not narrower than
7741 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7742 if (INTEGRAL_TYPE_P (type)
7743 && TREE_CODE (op0) == BIT_NOT_EXPR
7744 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7745 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7746 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7748 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7749 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7750 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7751 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7752 fold_convert_loc (loc, type, tem));
7755 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7756 type of X and Y (integer types only). */
7757 if (INTEGRAL_TYPE_P (type)
7758 && TREE_CODE (op0) == MULT_EXPR
7759 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7760 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7762 /* Be careful not to introduce new overflows. */
7764 if (TYPE_OVERFLOW_WRAPS (type))
7767 mult_type = unsigned_type_for (type);
7769 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7771 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7772 fold_convert_loc (loc, mult_type,
7773 TREE_OPERAND (op0, 0)),
7774 fold_convert_loc (loc, mult_type,
7775 TREE_OPERAND (op0, 1)));
7776 return fold_convert_loc (loc, type, tem);
7782 case VIEW_CONVERT_EXPR:
7783 if (TREE_CODE (op0) == MEM_REF)
7784 return fold_build2_loc (loc, MEM_REF, type,
7785 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7790 tem = fold_negate_expr (loc, arg0);
7792 return fold_convert_loc (loc, type, tem);
7796 /* Convert fabs((double)float) into (double)fabsf(float). */
7797 if (TREE_CODE (arg0) == NOP_EXPR
7798 && TREE_CODE (type) == REAL_TYPE)
7800 tree targ0 = strip_float_extensions (arg0);
7802 return fold_convert_loc (loc, type,
7803 fold_build1_loc (loc, ABS_EXPR,
7808 /* Strip sign ops from argument. */
7809 if (TREE_CODE (type) == REAL_TYPE)
7811 tem = fold_strip_sign_ops (arg0);
7813 return fold_build1_loc (loc, ABS_EXPR, type,
7814 fold_convert_loc (loc, type, tem));
7819 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7820 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7821 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7822 fold_convert_loc (loc, type,
7823 TREE_OPERAND (arg0, 0)))))
7824 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7825 fold_convert_loc (loc, type,
7826 TREE_OPERAND (arg0, 1)));
7827 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7828 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7829 fold_convert_loc (loc, type,
7830 TREE_OPERAND (arg0, 1)))))
7831 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7832 fold_convert_loc (loc, type,
7833 TREE_OPERAND (arg0, 0)), tem);
7837 case TRUTH_NOT_EXPR:
7838 /* Note that the operand of this must be an int
7839 and its values must be 0 or 1.
7840 ("true" is a fixed value perhaps depending on the language,
7841 but we don't handle values other than 1 correctly yet.) */
7842 tem = fold_truth_not_expr (loc, arg0);
7845 return fold_convert_loc (loc, type, tem);
7848 /* Fold *&X to X if X is an lvalue. */
7849 if (TREE_CODE (op0) == ADDR_EXPR)
7851 tree op00 = TREE_OPERAND (op0, 0);
7852 if ((TREE_CODE (op00) == VAR_DECL
7853 || TREE_CODE (op00) == PARM_DECL
7854 || TREE_CODE (op00) == RESULT_DECL)
7855 && !TREE_READONLY (op00))
7862 } /* switch (code) */
7866 /* If the operation was a conversion do _not_ mark a resulting constant
7867 with TREE_OVERFLOW if the original constant was not. These conversions
7868 have implementation defined behavior and retaining the TREE_OVERFLOW
7869 flag here would confuse later passes such as VRP. */
7871 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7872 tree type, tree op0)
7874 tree res = fold_unary_loc (loc, code, type, op0);
7876 && TREE_CODE (res) == INTEGER_CST
7877 && TREE_CODE (op0) == INTEGER_CST
7878 && CONVERT_EXPR_CODE_P (code))
7879 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7884 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7885 operands OP0 and OP1. LOC is the location of the resulting expression.
7886 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7887 Return the folded expression if folding is successful. Otherwise,
7888 return NULL_TREE. */
7890 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7891 tree arg0, tree arg1, tree op0, tree op1)
7895 /* We only do these simplifications if we are optimizing. */
7899 /* Check for things like (A || B) && (A || C). We can convert this
7900 to A || (B && C). Note that either operator can be any of the four
7901 truth and/or operations and the transformation will still be
7902 valid. Also note that we only care about order for the
7903 ANDIF and ORIF operators. If B contains side effects, this
7904 might change the truth-value of A. */
7905 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7906 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7907 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7908 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7909 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7910 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7912 tree a00 = TREE_OPERAND (arg0, 0);
7913 tree a01 = TREE_OPERAND (arg0, 1);
7914 tree a10 = TREE_OPERAND (arg1, 0);
7915 tree a11 = TREE_OPERAND (arg1, 1);
7916 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7917 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7918 && (code == TRUTH_AND_EXPR
7919 || code == TRUTH_OR_EXPR));
7921 if (operand_equal_p (a00, a10, 0))
7922 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7923 fold_build2_loc (loc, code, type, a01, a11));
7924 else if (commutative && operand_equal_p (a00, a11, 0))
7925 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7926 fold_build2_loc (loc, code, type, a01, a10));
7927 else if (commutative && operand_equal_p (a01, a10, 0))
7928 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7929 fold_build2_loc (loc, code, type, a00, a11));
7931 /* This case if tricky because we must either have commutative
7932 operators or else A10 must not have side-effects. */
7934 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7935 && operand_equal_p (a01, a11, 0))
7936 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7937 fold_build2_loc (loc, code, type, a00, a10),
7941 /* See if we can build a range comparison. */
7942 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7945 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7946 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7948 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7950 return fold_build2_loc (loc, code, type, tem, arg1);
7953 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7954 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7956 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7958 return fold_build2_loc (loc, code, type, arg0, tem);
7961 /* Check for the possibility of merging component references. If our
7962 lhs is another similar operation, try to merge its rhs with our
7963 rhs. Then try to merge our lhs and rhs. */
7964 if (TREE_CODE (arg0) == code
7965 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7966 TREE_OPERAND (arg0, 1), arg1)))
7967 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7969 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7972 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7973 && (code == TRUTH_AND_EXPR
7974 || code == TRUTH_ANDIF_EXPR
7975 || code == TRUTH_OR_EXPR
7976 || code == TRUTH_ORIF_EXPR))
7978 enum tree_code ncode, icode;
7980 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
7981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
7982 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
7984 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7985 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7986 We don't want to pack more than two leafs to a non-IF AND/OR
7988 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7989 equal to IF-CODE, then we don't want to add right-hand operand.
7990 If the inner right-hand side of left-hand operand has
7991 side-effects, or isn't simple, then we can't add to it,
7992 as otherwise we might destroy if-sequence. */
7993 if (TREE_CODE (arg0) == icode
7994 && simple_operand_p_2 (arg1)
7995 /* Needed for sequence points to handle trappings, and
7997 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
7999 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8001 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8004 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8005 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8006 else if (TREE_CODE (arg1) == icode
8007 && simple_operand_p_2 (arg0)
8008 /* Needed for sequence points to handle trappings, and
8010 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8012 tem = fold_build2_loc (loc, ncode, type,
8013 arg0, TREE_OPERAND (arg1, 0));
8014 return fold_build2_loc (loc, icode, type, tem,
8015 TREE_OPERAND (arg1, 1));
8017 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8019 For sequence point consistancy, we need to check for trapping,
8020 and side-effects. */
8021 else if (code == icode && simple_operand_p_2 (arg0)
8022 && simple_operand_p_2 (arg1))
8023 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8029 /* Fold a binary expression of code CODE and type TYPE with operands
8030 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8031 Return the folded expression if folding is successful. Otherwise,
8032 return NULL_TREE. */
8035 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8037 enum tree_code compl_code;
8039 if (code == MIN_EXPR)
8040 compl_code = MAX_EXPR;
8041 else if (code == MAX_EXPR)
8042 compl_code = MIN_EXPR;
8046 /* MIN (MAX (a, b), b) == b. */
8047 if (TREE_CODE (op0) == compl_code
8048 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8049 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8051 /* MIN (MAX (b, a), b) == b. */
8052 if (TREE_CODE (op0) == compl_code
8053 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8054 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8055 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8057 /* MIN (a, MAX (a, b)) == a. */
8058 if (TREE_CODE (op1) == compl_code
8059 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8060 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8061 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8063 /* MIN (a, MAX (b, a)) == a. */
8064 if (TREE_CODE (op1) == compl_code
8065 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8066 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8067 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8072 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8073 by changing CODE to reduce the magnitude of constants involved in
8074 ARG0 of the comparison.
8075 Returns a canonicalized comparison tree if a simplification was
8076 possible, otherwise returns NULL_TREE.
8077 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8078 valid if signed overflow is undefined. */
8081 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8082 tree arg0, tree arg1,
8083 bool *strict_overflow_p)
8085 enum tree_code code0 = TREE_CODE (arg0);
8086 tree t, cst0 = NULL_TREE;
8089 /* Match A +- CST code arg1. We can change this only if overflow
8091 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8092 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8093 /* In principle pointers also have undefined overflow behavior,
8094 but that causes problems elsewhere. */
8095 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8096 && (code0 == MINUS_EXPR
8097 || code0 == PLUS_EXPR)
8098 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8101 /* Identify the constant in arg0 and its sign. */
8102 cst0 = TREE_OPERAND (arg0, 1);
8103 sgn0 = tree_int_cst_sgn (cst0);
8105 /* Overflowed constants and zero will cause problems. */
8106 if (integer_zerop (cst0)
8107 || TREE_OVERFLOW (cst0))
8110 /* See if we can reduce the magnitude of the constant in
8111 arg0 by changing the comparison code. */
8112 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8114 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8116 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8117 else if (code == GT_EXPR
8118 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8120 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8121 else if (code == LE_EXPR
8122 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8124 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8125 else if (code == GE_EXPR
8126 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8130 *strict_overflow_p = true;
8132 /* Now build the constant reduced in magnitude. But not if that
8133 would produce one outside of its types range. */
8134 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8136 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8137 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8139 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8140 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8143 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8144 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8145 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8146 t = fold_convert (TREE_TYPE (arg1), t);
8148 return fold_build2_loc (loc, code, type, t, arg1);
8151 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8152 overflow further. Try to decrease the magnitude of constants involved
8153 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8154 and put sole constants at the second argument position.
8155 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8158 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8159 tree arg0, tree arg1)
8162 bool strict_overflow_p;
8163 const char * const warnmsg = G_("assuming signed overflow does not occur "
8164 "when reducing constant in comparison");
8166 /* Try canonicalization by simplifying arg0. */
8167 strict_overflow_p = false;
8168 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8169 &strict_overflow_p);
8172 if (strict_overflow_p)
8173 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8177 /* Try canonicalization by simplifying arg1 using the swapped
8179 code = swap_tree_comparison (code);
8180 strict_overflow_p = false;
8181 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8182 &strict_overflow_p);
8183 if (t && strict_overflow_p)
8184 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8188 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8189 space. This is used to avoid issuing overflow warnings for
8190 expressions like &p->x which can not wrap. */
8193 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8195 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8202 int precision = TYPE_PRECISION (TREE_TYPE (base));
8203 if (offset == NULL_TREE)
8204 wi_offset = wi::zero (precision);
8205 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8211 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8212 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8216 if (!wi::fits_uhwi_p (total))
8219 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8223 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8225 if (TREE_CODE (base) == ADDR_EXPR)
8227 HOST_WIDE_INT base_size;
8229 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8230 if (base_size > 0 && size < base_size)
8234 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8237 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8238 kind INTEGER_CST. This makes sure to properly sign-extend the
8241 static HOST_WIDE_INT
8242 size_low_cst (const_tree t)
8244 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8245 int prec = TYPE_PRECISION (TREE_TYPE (t));
8246 if (prec < HOST_BITS_PER_WIDE_INT)
8247 return sext_hwi (w, prec);
8251 /* Subroutine of fold_binary. This routine performs all of the
8252 transformations that are common to the equality/inequality
8253 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8254 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8255 fold_binary should call fold_binary. Fold a comparison with
8256 tree code CODE and type TYPE with operands OP0 and OP1. Return
8257 the folded comparison or NULL_TREE. */
8260 fold_comparison (location_t loc, enum tree_code code, tree type,
8263 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8264 tree arg0, arg1, tem;
8269 STRIP_SIGN_NOPS (arg0);
8270 STRIP_SIGN_NOPS (arg1);
8272 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8273 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8275 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8276 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8278 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8279 && TREE_CODE (arg1) == INTEGER_CST
8280 && !TREE_OVERFLOW (arg1))
8282 const enum tree_code
8283 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8284 tree const1 = TREE_OPERAND (arg0, 1);
8285 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8286 tree variable = TREE_OPERAND (arg0, 0);
8287 tree new_const = int_const_binop (reverse_op, const2, const1);
8289 /* If the constant operation overflowed this can be
8290 simplified as a comparison against INT_MAX/INT_MIN. */
8291 if (TREE_OVERFLOW (new_const)
8292 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8294 int const1_sgn = tree_int_cst_sgn (const1);
8295 enum tree_code code2 = code;
8297 /* Get the sign of the constant on the lhs if the
8298 operation were VARIABLE + CONST1. */
8299 if (TREE_CODE (arg0) == MINUS_EXPR)
8300 const1_sgn = -const1_sgn;
8302 /* The sign of the constant determines if we overflowed
8303 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8304 Canonicalize to the INT_MIN overflow by swapping the comparison
8306 if (const1_sgn == -1)
8307 code2 = swap_tree_comparison (code);
8309 /* We now can look at the canonicalized case
8310 VARIABLE + 1 CODE2 INT_MIN
8311 and decide on the result. */
8318 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8324 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8333 fold_overflow_warning ("assuming signed overflow does not occur "
8334 "when changing X +- C1 cmp C2 to "
8336 WARN_STRICT_OVERFLOW_COMPARISON);
8337 return fold_build2_loc (loc, code, type, variable, new_const);
8341 /* For comparisons of pointers we can decompose it to a compile time
8342 comparison of the base objects and the offsets into the object.
8343 This requires at least one operand being an ADDR_EXPR or a
8344 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8345 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8346 && (TREE_CODE (arg0) == ADDR_EXPR
8347 || TREE_CODE (arg1) == ADDR_EXPR
8348 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8349 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8351 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8352 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8354 int volatilep, unsignedp;
8355 bool indirect_base0 = false, indirect_base1 = false;
8357 /* Get base and offset for the access. Strip ADDR_EXPR for
8358 get_inner_reference, but put it back by stripping INDIRECT_REF
8359 off the base object if possible. indirect_baseN will be true
8360 if baseN is not an address but refers to the object itself. */
8362 if (TREE_CODE (arg0) == ADDR_EXPR)
8364 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8365 &bitsize, &bitpos0, &offset0, &mode,
8366 &unsignedp, &volatilep, false);
8367 if (TREE_CODE (base0) == INDIRECT_REF)
8368 base0 = TREE_OPERAND (base0, 0);
8370 indirect_base0 = true;
8372 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8374 base0 = TREE_OPERAND (arg0, 0);
8375 STRIP_SIGN_NOPS (base0);
8376 if (TREE_CODE (base0) == ADDR_EXPR)
8378 base0 = TREE_OPERAND (base0, 0);
8379 indirect_base0 = true;
8381 offset0 = TREE_OPERAND (arg0, 1);
8382 if (tree_fits_shwi_p (offset0))
8384 HOST_WIDE_INT off = size_low_cst (offset0);
8385 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8387 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8389 bitpos0 = off * BITS_PER_UNIT;
8390 offset0 = NULL_TREE;
8396 if (TREE_CODE (arg1) == ADDR_EXPR)
8398 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8399 &bitsize, &bitpos1, &offset1, &mode,
8400 &unsignedp, &volatilep, false);
8401 if (TREE_CODE (base1) == INDIRECT_REF)
8402 base1 = TREE_OPERAND (base1, 0);
8404 indirect_base1 = true;
8406 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8408 base1 = TREE_OPERAND (arg1, 0);
8409 STRIP_SIGN_NOPS (base1);
8410 if (TREE_CODE (base1) == ADDR_EXPR)
8412 base1 = TREE_OPERAND (base1, 0);
8413 indirect_base1 = true;
8415 offset1 = TREE_OPERAND (arg1, 1);
8416 if (tree_fits_shwi_p (offset1))
8418 HOST_WIDE_INT off = size_low_cst (offset1);
8419 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8421 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8423 bitpos1 = off * BITS_PER_UNIT;
8424 offset1 = NULL_TREE;
8429 /* If we have equivalent bases we might be able to simplify. */
8430 if (indirect_base0 == indirect_base1
8431 && operand_equal_p (base0, base1,
8432 indirect_base0 ? OEP_ADDRESS_OF : 0))
8434 /* We can fold this expression to a constant if the non-constant
8435 offset parts are equal. */
8436 if ((offset0 == offset1
8437 || (offset0 && offset1
8438 && operand_equal_p (offset0, offset1, 0)))
8441 || (indirect_base0 && DECL_P (base0))
8442 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8446 && bitpos0 != bitpos1
8447 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8448 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8449 fold_overflow_warning (("assuming pointer wraparound does not "
8450 "occur when comparing P +- C1 with "
8452 WARN_STRICT_OVERFLOW_CONDITIONAL);
8457 return constant_boolean_node (bitpos0 == bitpos1, type);
8459 return constant_boolean_node (bitpos0 != bitpos1, type);
8461 return constant_boolean_node (bitpos0 < bitpos1, type);
8463 return constant_boolean_node (bitpos0 <= bitpos1, type);
8465 return constant_boolean_node (bitpos0 >= bitpos1, type);
8467 return constant_boolean_node (bitpos0 > bitpos1, type);
8471 /* We can simplify the comparison to a comparison of the variable
8472 offset parts if the constant offset parts are equal.
8473 Be careful to use signed sizetype here because otherwise we
8474 mess with array offsets in the wrong way. This is possible
8475 because pointer arithmetic is restricted to retain within an
8476 object and overflow on pointer differences is undefined as of
8477 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8478 else if (bitpos0 == bitpos1
8480 || (indirect_base0 && DECL_P (base0))
8481 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8483 /* By converting to signed sizetype we cover middle-end pointer
8484 arithmetic which operates on unsigned pointer types of size
8485 type size and ARRAY_REF offsets which are properly sign or
8486 zero extended from their type in case it is narrower than
8488 if (offset0 == NULL_TREE)
8489 offset0 = build_int_cst (ssizetype, 0);
8491 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8492 if (offset1 == NULL_TREE)
8493 offset1 = build_int_cst (ssizetype, 0);
8495 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8498 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8499 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8500 fold_overflow_warning (("assuming pointer wraparound does not "
8501 "occur when comparing P +- C1 with "
8503 WARN_STRICT_OVERFLOW_COMPARISON);
8505 return fold_build2_loc (loc, code, type, offset0, offset1);
8508 /* For equal offsets we can simplify to a comparison of the
8510 else if (bitpos0 == bitpos1
8512 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8514 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8515 && ((offset0 == offset1)
8516 || (offset0 && offset1
8517 && operand_equal_p (offset0, offset1, 0))))
8520 base0 = build_fold_addr_expr_loc (loc, base0);
8522 base1 = build_fold_addr_expr_loc (loc, base1);
8523 return fold_build2_loc (loc, code, type, base0, base1);
8527 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8528 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8529 the resulting offset is smaller in absolute value than the
8530 original one and has the same sign. */
8531 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8532 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8533 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8534 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8535 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8536 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8537 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8538 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8540 tree const1 = TREE_OPERAND (arg0, 1);
8541 tree const2 = TREE_OPERAND (arg1, 1);
8542 tree variable1 = TREE_OPERAND (arg0, 0);
8543 tree variable2 = TREE_OPERAND (arg1, 0);
8545 const char * const warnmsg = G_("assuming signed overflow does not "
8546 "occur when combining constants around "
8549 /* Put the constant on the side where it doesn't overflow and is
8550 of lower absolute value and of same sign than before. */
8551 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8552 ? MINUS_EXPR : PLUS_EXPR,
8554 if (!TREE_OVERFLOW (cst)
8555 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8556 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8558 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8559 return fold_build2_loc (loc, code, type,
8561 fold_build2_loc (loc, TREE_CODE (arg1),
8566 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8567 ? MINUS_EXPR : PLUS_EXPR,
8569 if (!TREE_OVERFLOW (cst)
8570 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8571 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8573 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8574 return fold_build2_loc (loc, code, type,
8575 fold_build2_loc (loc, TREE_CODE (arg0),
8582 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8586 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8587 constant, we can simplify it. */
8588 if (TREE_CODE (arg1) == INTEGER_CST
8589 && (TREE_CODE (arg0) == MIN_EXPR
8590 || TREE_CODE (arg0) == MAX_EXPR)
8591 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8593 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8598 /* If we are comparing an expression that just has comparisons
8599 of two integer values, arithmetic expressions of those comparisons,
8600 and constants, we can simplify it. There are only three cases
8601 to check: the two values can either be equal, the first can be
8602 greater, or the second can be greater. Fold the expression for
8603 those three values. Since each value must be 0 or 1, we have
8604 eight possibilities, each of which corresponds to the constant 0
8605 or 1 or one of the six possible comparisons.
8607 This handles common cases like (a > b) == 0 but also handles
8608 expressions like ((x > y) - (y > x)) > 0, which supposedly
8609 occur in macroized code. */
8611 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8613 tree cval1 = 0, cval2 = 0;
8616 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8617 /* Don't handle degenerate cases here; they should already
8618 have been handled anyway. */
8619 && cval1 != 0 && cval2 != 0
8620 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8621 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8622 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8623 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8624 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8625 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8626 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8628 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8629 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8631 /* We can't just pass T to eval_subst in case cval1 or cval2
8632 was the same as ARG1. */
8635 = fold_build2_loc (loc, code, type,
8636 eval_subst (loc, arg0, cval1, maxval,
8640 = fold_build2_loc (loc, code, type,
8641 eval_subst (loc, arg0, cval1, maxval,
8645 = fold_build2_loc (loc, code, type,
8646 eval_subst (loc, arg0, cval1, minval,
8650 /* All three of these results should be 0 or 1. Confirm they are.
8651 Then use those values to select the proper code to use. */
8653 if (TREE_CODE (high_result) == INTEGER_CST
8654 && TREE_CODE (equal_result) == INTEGER_CST
8655 && TREE_CODE (low_result) == INTEGER_CST)
8657 /* Make a 3-bit mask with the high-order bit being the
8658 value for `>', the next for '=', and the low for '<'. */
8659 switch ((integer_onep (high_result) * 4)
8660 + (integer_onep (equal_result) * 2)
8661 + integer_onep (low_result))
8665 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8686 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8691 tem = save_expr (build2 (code, type, cval1, cval2));
8692 SET_EXPR_LOCATION (tem, loc);
8695 return fold_build2_loc (loc, code, type, cval1, cval2);
8700 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8701 into a single range test. */
8702 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8703 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8704 && TREE_CODE (arg1) == INTEGER_CST
8705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8706 && !integer_zerop (TREE_OPERAND (arg0, 1))
8707 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8708 && !TREE_OVERFLOW (arg1))
8710 tem = fold_div_compare (loc, code, type, arg0, arg1);
8711 if (tem != NULL_TREE)
8719 /* Subroutine of fold_binary. Optimize complex multiplications of the
8720 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8721 argument EXPR represents the expression "z" of type TYPE. */
8724 fold_mult_zconjz (location_t loc, tree type, tree expr)
8726 tree itype = TREE_TYPE (type);
8727 tree rpart, ipart, tem;
8729 if (TREE_CODE (expr) == COMPLEX_EXPR)
8731 rpart = TREE_OPERAND (expr, 0);
8732 ipart = TREE_OPERAND (expr, 1);
8734 else if (TREE_CODE (expr) == COMPLEX_CST)
8736 rpart = TREE_REALPART (expr);
8737 ipart = TREE_IMAGPART (expr);
8741 expr = save_expr (expr);
8742 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8743 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8746 rpart = save_expr (rpart);
8747 ipart = save_expr (ipart);
8748 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8749 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8750 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8751 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8752 build_zero_cst (itype));
8756 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8757 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8760 vec_cst_ctor_to_array (tree arg, tree *elts)
8762 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8764 if (TREE_CODE (arg) == VECTOR_CST)
8766 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8767 elts[i] = VECTOR_CST_ELT (arg, i);
8769 else if (TREE_CODE (arg) == CONSTRUCTOR)
8771 constructor_elt *elt;
8773 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8774 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8777 elts[i] = elt->value;
8781 for (; i < nelts; i++)
8783 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8787 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8788 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8789 NULL_TREE otherwise. */
8792 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8794 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8796 bool need_ctor = false;
8798 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8799 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8800 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8801 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8804 elts = XALLOCAVEC (tree, nelts * 3);
8805 if (!vec_cst_ctor_to_array (arg0, elts)
8806 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8809 for (i = 0; i < nelts; i++)
8811 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8813 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8818 vec<constructor_elt, va_gc> *v;
8819 vec_alloc (v, nelts);
8820 for (i = 0; i < nelts; i++)
8821 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8822 return build_constructor (type, v);
8825 return build_vector (type, &elts[2 * nelts]);
8828 /* Try to fold a pointer difference of type TYPE two address expressions of
8829 array references AREF0 and AREF1 using location LOC. Return a
8830 simplified expression for the difference or NULL_TREE. */
8833 fold_addr_of_array_ref_difference (location_t loc, tree type,
8834 tree aref0, tree aref1)
8836 tree base0 = TREE_OPERAND (aref0, 0);
8837 tree base1 = TREE_OPERAND (aref1, 0);
8838 tree base_offset = build_int_cst (type, 0);
8840 /* If the bases are array references as well, recurse. If the bases
8841 are pointer indirections compute the difference of the pointers.
8842 If the bases are equal, we are set. */
8843 if ((TREE_CODE (base0) == ARRAY_REF
8844 && TREE_CODE (base1) == ARRAY_REF
8846 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8847 || (INDIRECT_REF_P (base0)
8848 && INDIRECT_REF_P (base1)
8849 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
8850 TREE_OPERAND (base0, 0),
8851 TREE_OPERAND (base1, 0))))
8852 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8854 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8855 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8856 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8857 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8858 return fold_build2_loc (loc, PLUS_EXPR, type,
8860 fold_build2_loc (loc, MULT_EXPR, type,
8866 /* If the real or vector real constant CST of type TYPE has an exact
8867 inverse, return it, else return NULL. */
8870 exact_inverse (tree type, tree cst)
8873 tree unit_type, *elts;
8875 unsigned vec_nelts, i;
8877 switch (TREE_CODE (cst))
8880 r = TREE_REAL_CST (cst);
8882 if (exact_real_inverse (TYPE_MODE (type), &r))
8883 return build_real (type, r);
8888 vec_nelts = VECTOR_CST_NELTS (cst);
8889 elts = XALLOCAVEC (tree, vec_nelts);
8890 unit_type = TREE_TYPE (type);
8891 mode = TYPE_MODE (unit_type);
8893 for (i = 0; i < vec_nelts; i++)
8895 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8896 if (!exact_real_inverse (mode, &r))
8898 elts[i] = build_real (unit_type, r);
8901 return build_vector (type, elts);
8908 /* Mask out the tz least significant bits of X of type TYPE where
8909 tz is the number of trailing zeroes in Y. */
8911 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8913 int tz = wi::ctz (y);
8915 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8919 /* Return true when T is an address and is known to be nonzero.
8920 For floating point we further ensure that T is not denormal.
8921 Similar logic is present in nonzero_address in rtlanal.h.
8923 If the return value is based on the assumption that signed overflow
8924 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8925 change *STRICT_OVERFLOW_P. */
8928 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8930 tree type = TREE_TYPE (t);
8931 enum tree_code code;
8933 /* Doing something useful for floating point would need more work. */
8934 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8937 code = TREE_CODE (t);
8938 switch (TREE_CODE_CLASS (code))
8941 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8944 case tcc_comparison:
8945 return tree_binary_nonzero_warnv_p (code, type,
8946 TREE_OPERAND (t, 0),
8947 TREE_OPERAND (t, 1),
8950 case tcc_declaration:
8952 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8960 case TRUTH_NOT_EXPR:
8961 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8964 case TRUTH_AND_EXPR:
8966 case TRUTH_XOR_EXPR:
8967 return tree_binary_nonzero_warnv_p (code, type,
8968 TREE_OPERAND (t, 0),
8969 TREE_OPERAND (t, 1),
8977 case WITH_SIZE_EXPR:
8979 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8984 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8988 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8993 tree fndecl = get_callee_fndecl (t);
8994 if (!fndecl) return false;
8995 if (flag_delete_null_pointer_checks && !flag_check_new
8996 && DECL_IS_OPERATOR_NEW (fndecl)
8997 && !TREE_NOTHROW (fndecl))
8999 if (flag_delete_null_pointer_checks
9000 && lookup_attribute ("returns_nonnull",
9001 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9003 return alloca_call_p (t);
9012 /* Return true when T is an address and is known to be nonzero.
9013 Handle warnings about undefined signed overflow. */
9016 tree_expr_nonzero_p (tree t)
9018 bool ret, strict_overflow_p;
9020 strict_overflow_p = false;
9021 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9022 if (strict_overflow_p)
9023 fold_overflow_warning (("assuming signed overflow does not occur when "
9024 "determining that expression is always "
9026 WARN_STRICT_OVERFLOW_MISC);
9030 /* Fold a binary expression of code CODE and type TYPE with operands
9031 OP0 and OP1. LOC is the location of the resulting expression.
9032 Return the folded expression if folding is successful. Otherwise,
9033 return NULL_TREE. */
9036 fold_binary_loc (location_t loc,
9037 enum tree_code code, tree type, tree op0, tree op1)
9039 enum tree_code_class kind = TREE_CODE_CLASS (code);
9040 tree arg0, arg1, tem;
9041 tree t1 = NULL_TREE;
9042 bool strict_overflow_p;
9045 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9046 && TREE_CODE_LENGTH (code) == 2
9048 && op1 != NULL_TREE);
9053 /* Strip any conversions that don't change the mode. This is
9054 safe for every expression, except for a comparison expression
9055 because its signedness is derived from its operands. So, in
9056 the latter case, only strip conversions that don't change the
9057 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9060 Note that this is done as an internal manipulation within the
9061 constant folder, in order to find the simplest representation
9062 of the arguments so that their form can be studied. In any
9063 cases, the appropriate type conversions should be put back in
9064 the tree that will get out of the constant folder. */
9066 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9068 STRIP_SIGN_NOPS (arg0);
9069 STRIP_SIGN_NOPS (arg1);
9077 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9078 constant but we can't do arithmetic on them. */
9079 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9081 tem = const_binop (code, type, arg0, arg1);
9082 if (tem != NULL_TREE)
9084 if (TREE_TYPE (tem) != type)
9085 tem = fold_convert_loc (loc, type, tem);
9090 /* If this is a commutative operation, and ARG0 is a constant, move it
9091 to ARG1 to reduce the number of tests below. */
9092 if (commutative_tree_code (code)
9093 && tree_swap_operands_p (arg0, arg1, true))
9094 return fold_build2_loc (loc, code, type, op1, op0);
9096 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9097 to ARG1 to reduce the number of tests below. */
9098 if (kind == tcc_comparison
9099 && tree_swap_operands_p (arg0, arg1, true))
9100 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9102 tem = generic_simplify (loc, code, type, op0, op1);
9106 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9108 First check for cases where an arithmetic operation is applied to a
9109 compound, conditional, or comparison operation. Push the arithmetic
9110 operation inside the compound or conditional to see if any folding
9111 can then be done. Convert comparison to conditional for this purpose.
9112 The also optimizes non-constant cases that used to be done in
9115 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9116 one of the operands is a comparison and the other is a comparison, a
9117 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9118 code below would make the expression more complex. Change it to a
9119 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9120 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9122 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9123 || code == EQ_EXPR || code == NE_EXPR)
9124 && TREE_CODE (type) != VECTOR_TYPE
9125 && ((truth_value_p (TREE_CODE (arg0))
9126 && (truth_value_p (TREE_CODE (arg1))
9127 || (TREE_CODE (arg1) == BIT_AND_EXPR
9128 && integer_onep (TREE_OPERAND (arg1, 1)))))
9129 || (truth_value_p (TREE_CODE (arg1))
9130 && (truth_value_p (TREE_CODE (arg0))
9131 || (TREE_CODE (arg0) == BIT_AND_EXPR
9132 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9134 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9135 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9138 fold_convert_loc (loc, boolean_type_node, arg0),
9139 fold_convert_loc (loc, boolean_type_node, arg1));
9141 if (code == EQ_EXPR)
9142 tem = invert_truthvalue_loc (loc, tem);
9144 return fold_convert_loc (loc, type, tem);
9147 if (TREE_CODE_CLASS (code) == tcc_binary
9148 || TREE_CODE_CLASS (code) == tcc_comparison)
9150 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9152 tem = fold_build2_loc (loc, code, type,
9153 fold_convert_loc (loc, TREE_TYPE (op0),
9154 TREE_OPERAND (arg0, 1)), op1);
9155 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9158 if (TREE_CODE (arg1) == COMPOUND_EXPR
9159 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9161 tem = fold_build2_loc (loc, code, type, op0,
9162 fold_convert_loc (loc, TREE_TYPE (op1),
9163 TREE_OPERAND (arg1, 1)));
9164 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9168 if (TREE_CODE (arg0) == COND_EXPR
9169 || TREE_CODE (arg0) == VEC_COND_EXPR
9170 || COMPARISON_CLASS_P (arg0))
9172 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9174 /*cond_first_p=*/1);
9175 if (tem != NULL_TREE)
9179 if (TREE_CODE (arg1) == COND_EXPR
9180 || TREE_CODE (arg1) == VEC_COND_EXPR
9181 || COMPARISON_CLASS_P (arg1))
9183 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9185 /*cond_first_p=*/0);
9186 if (tem != NULL_TREE)
9194 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9195 if (TREE_CODE (arg0) == ADDR_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9198 tree iref = TREE_OPERAND (arg0, 0);
9199 return fold_build2 (MEM_REF, type,
9200 TREE_OPERAND (iref, 0),
9201 int_const_binop (PLUS_EXPR, arg1,
9202 TREE_OPERAND (iref, 1)));
9205 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9206 if (TREE_CODE (arg0) == ADDR_EXPR
9207 && handled_component_p (TREE_OPERAND (arg0, 0)))
9210 HOST_WIDE_INT coffset;
9211 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9215 return fold_build2 (MEM_REF, type,
9216 build_fold_addr_expr (base),
9217 int_const_binop (PLUS_EXPR, arg1,
9218 size_int (coffset)));
9223 case POINTER_PLUS_EXPR:
9224 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9225 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9227 return fold_convert_loc (loc, type,
9228 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9229 fold_convert_loc (loc, sizetype,
9231 fold_convert_loc (loc, sizetype,
9237 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9239 /* X + (X / CST) * -CST is X % CST. */
9240 if (TREE_CODE (arg1) == MULT_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9242 && operand_equal_p (arg0,
9243 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9245 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9246 tree cst1 = TREE_OPERAND (arg1, 1);
9247 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9249 if (sum && integer_zerop (sum))
9250 return fold_convert_loc (loc, type,
9251 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9252 TREE_TYPE (arg0), arg0,
9257 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9258 one. Make sure the type is not saturating and has the signedness of
9259 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9260 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9261 if ((TREE_CODE (arg0) == MULT_EXPR
9262 || TREE_CODE (arg1) == MULT_EXPR)
9263 && !TYPE_SATURATING (type)
9264 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9265 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9266 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9268 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9273 if (! FLOAT_TYPE_P (type))
9275 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9276 (plus (plus (mult) (mult)) (foo)) so that we can
9277 take advantage of the factoring cases below. */
9278 if (ANY_INTEGRAL_TYPE_P (type)
9279 && TYPE_OVERFLOW_WRAPS (type)
9280 && (((TREE_CODE (arg0) == PLUS_EXPR
9281 || TREE_CODE (arg0) == MINUS_EXPR)
9282 && TREE_CODE (arg1) == MULT_EXPR)
9283 || ((TREE_CODE (arg1) == PLUS_EXPR
9284 || TREE_CODE (arg1) == MINUS_EXPR)
9285 && TREE_CODE (arg0) == MULT_EXPR)))
9287 tree parg0, parg1, parg, marg;
9288 enum tree_code pcode;
9290 if (TREE_CODE (arg1) == MULT_EXPR)
9291 parg = arg0, marg = arg1;
9293 parg = arg1, marg = arg0;
9294 pcode = TREE_CODE (parg);
9295 parg0 = TREE_OPERAND (parg, 0);
9296 parg1 = TREE_OPERAND (parg, 1);
9300 if (TREE_CODE (parg0) == MULT_EXPR
9301 && TREE_CODE (parg1) != MULT_EXPR)
9302 return fold_build2_loc (loc, pcode, type,
9303 fold_build2_loc (loc, PLUS_EXPR, type,
9304 fold_convert_loc (loc, type,
9306 fold_convert_loc (loc, type,
9308 fold_convert_loc (loc, type, parg1));
9309 if (TREE_CODE (parg0) != MULT_EXPR
9310 && TREE_CODE (parg1) == MULT_EXPR)
9312 fold_build2_loc (loc, PLUS_EXPR, type,
9313 fold_convert_loc (loc, type, parg0),
9314 fold_build2_loc (loc, pcode, type,
9315 fold_convert_loc (loc, type, marg),
9316 fold_convert_loc (loc, type,
9322 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9323 to __complex__ ( x, y ). This is not the same for SNaNs or
9324 if signed zeros are involved. */
9325 if (!HONOR_SNANS (element_mode (arg0))
9326 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9327 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9329 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9330 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9331 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9332 bool arg0rz = false, arg0iz = false;
9333 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9334 || (arg0i && (arg0iz = real_zerop (arg0i))))
9336 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9337 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9338 if (arg0rz && arg1i && real_zerop (arg1i))
9340 tree rp = arg1r ? arg1r
9341 : build1 (REALPART_EXPR, rtype, arg1);
9342 tree ip = arg0i ? arg0i
9343 : build1 (IMAGPART_EXPR, rtype, arg0);
9344 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9346 else if (arg0iz && arg1r && real_zerop (arg1r))
9348 tree rp = arg0r ? arg0r
9349 : build1 (REALPART_EXPR, rtype, arg0);
9350 tree ip = arg1i ? arg1i
9351 : build1 (IMAGPART_EXPR, rtype, arg1);
9352 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9357 if (flag_unsafe_math_optimizations
9358 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9359 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9360 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9363 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9364 We associate floats only if the user has specified
9365 -fassociative-math. */
9366 if (flag_associative_math
9367 && TREE_CODE (arg1) == PLUS_EXPR
9368 && TREE_CODE (arg0) != MULT_EXPR)
9370 tree tree10 = TREE_OPERAND (arg1, 0);
9371 tree tree11 = TREE_OPERAND (arg1, 1);
9372 if (TREE_CODE (tree11) == MULT_EXPR
9373 && TREE_CODE (tree10) == MULT_EXPR)
9376 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9377 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9380 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9381 We associate floats only if the user has specified
9382 -fassociative-math. */
9383 if (flag_associative_math
9384 && TREE_CODE (arg0) == PLUS_EXPR
9385 && TREE_CODE (arg1) != MULT_EXPR)
9387 tree tree00 = TREE_OPERAND (arg0, 0);
9388 tree tree01 = TREE_OPERAND (arg0, 1);
9389 if (TREE_CODE (tree01) == MULT_EXPR
9390 && TREE_CODE (tree00) == MULT_EXPR)
9393 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9394 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9400 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9401 is a rotate of A by C1 bits. */
9402 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9403 is a rotate of A by B bits. */
9405 enum tree_code code0, code1;
9407 code0 = TREE_CODE (arg0);
9408 code1 = TREE_CODE (arg1);
9409 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9410 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9411 && operand_equal_p (TREE_OPERAND (arg0, 0),
9412 TREE_OPERAND (arg1, 0), 0)
9413 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9414 TYPE_UNSIGNED (rtype))
9415 /* Only create rotates in complete modes. Other cases are not
9416 expanded properly. */
9417 && (element_precision (rtype)
9418 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9420 tree tree01, tree11;
9421 enum tree_code code01, code11;
9423 tree01 = TREE_OPERAND (arg0, 1);
9424 tree11 = TREE_OPERAND (arg1, 1);
9425 STRIP_NOPS (tree01);
9426 STRIP_NOPS (tree11);
9427 code01 = TREE_CODE (tree01);
9428 code11 = TREE_CODE (tree11);
9429 if (code01 == INTEGER_CST
9430 && code11 == INTEGER_CST
9431 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9432 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9434 tem = build2_loc (loc, LROTATE_EXPR,
9435 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9436 TREE_OPERAND (arg0, 0),
9437 code0 == LSHIFT_EXPR
9438 ? TREE_OPERAND (arg0, 1)
9439 : TREE_OPERAND (arg1, 1));
9440 return fold_convert_loc (loc, type, tem);
9442 else if (code11 == MINUS_EXPR)
9444 tree tree110, tree111;
9445 tree110 = TREE_OPERAND (tree11, 0);
9446 tree111 = TREE_OPERAND (tree11, 1);
9447 STRIP_NOPS (tree110);
9448 STRIP_NOPS (tree111);
9449 if (TREE_CODE (tree110) == INTEGER_CST
9450 && 0 == compare_tree_int (tree110,
9452 (TREE_TYPE (TREE_OPERAND
9454 && operand_equal_p (tree01, tree111, 0))
9456 fold_convert_loc (loc, type,
9457 build2 ((code0 == LSHIFT_EXPR
9460 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9461 TREE_OPERAND (arg0, 0),
9462 TREE_OPERAND (arg0, 1)));
9464 else if (code01 == MINUS_EXPR)
9466 tree tree010, tree011;
9467 tree010 = TREE_OPERAND (tree01, 0);
9468 tree011 = TREE_OPERAND (tree01, 1);
9469 STRIP_NOPS (tree010);
9470 STRIP_NOPS (tree011);
9471 if (TREE_CODE (tree010) == INTEGER_CST
9472 && 0 == compare_tree_int (tree010,
9474 (TREE_TYPE (TREE_OPERAND
9476 && operand_equal_p (tree11, tree011, 0))
9477 return fold_convert_loc
9479 build2 ((code0 != LSHIFT_EXPR
9482 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9483 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9489 /* In most languages, can't associate operations on floats through
9490 parentheses. Rather than remember where the parentheses were, we
9491 don't associate floats at all, unless the user has specified
9493 And, we need to make sure type is not saturating. */
9495 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9496 && !TYPE_SATURATING (type))
9498 tree var0, con0, lit0, minus_lit0;
9499 tree var1, con1, lit1, minus_lit1;
9503 /* Split both trees into variables, constants, and literals. Then
9504 associate each group together, the constants with literals,
9505 then the result with variables. This increases the chances of
9506 literals being recombined later and of generating relocatable
9507 expressions for the sum of a constant and literal. */
9508 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9509 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9510 code == MINUS_EXPR);
9512 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9513 if (code == MINUS_EXPR)
9516 /* With undefined overflow prefer doing association in a type
9517 which wraps on overflow, if that is one of the operand types. */
9518 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9519 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9521 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9522 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9523 atype = TREE_TYPE (arg0);
9524 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9525 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9526 atype = TREE_TYPE (arg1);
9527 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9530 /* With undefined overflow we can only associate constants with one
9531 variable, and constants whose association doesn't overflow. */
9532 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9533 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9539 bool one_neg = false;
9541 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9543 tmp0 = TREE_OPERAND (tmp0, 0);
9546 if (CONVERT_EXPR_P (tmp0)
9547 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9548 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9549 <= TYPE_PRECISION (atype)))
9550 tmp0 = TREE_OPERAND (tmp0, 0);
9551 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9553 tmp1 = TREE_OPERAND (tmp1, 0);
9556 if (CONVERT_EXPR_P (tmp1)
9557 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9558 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9559 <= TYPE_PRECISION (atype)))
9560 tmp1 = TREE_OPERAND (tmp1, 0);
9561 /* The only case we can still associate with two variables
9562 is if they cancel out. */
9564 || !operand_equal_p (tmp0, tmp1, 0))
9569 /* Only do something if we found more than two objects. Otherwise,
9570 nothing has changed and we risk infinite recursion. */
9572 && (2 < ((var0 != 0) + (var1 != 0)
9573 + (con0 != 0) + (con1 != 0)
9574 + (lit0 != 0) + (lit1 != 0)
9575 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9577 bool any_overflows = false;
9578 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9579 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9580 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9581 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9582 var0 = associate_trees (loc, var0, var1, code, atype);
9583 con0 = associate_trees (loc, con0, con1, code, atype);
9584 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9585 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9588 /* Preserve the MINUS_EXPR if the negative part of the literal is
9589 greater than the positive part. Otherwise, the multiplicative
9590 folding code (i.e extract_muldiv) may be fooled in case
9591 unsigned constants are subtracted, like in the following
9592 example: ((X*2 + 4) - 8U)/2. */
9593 if (minus_lit0 && lit0)
9595 if (TREE_CODE (lit0) == INTEGER_CST
9596 && TREE_CODE (minus_lit0) == INTEGER_CST
9597 && tree_int_cst_lt (lit0, minus_lit0))
9599 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9605 lit0 = associate_trees (loc, lit0, minus_lit0,
9611 /* Don't introduce overflows through reassociation. */
9613 && ((lit0 && TREE_OVERFLOW_P (lit0))
9614 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9621 fold_convert_loc (loc, type,
9622 associate_trees (loc, var0, minus_lit0,
9623 MINUS_EXPR, atype));
9626 con0 = associate_trees (loc, con0, minus_lit0,
9629 fold_convert_loc (loc, type,
9630 associate_trees (loc, var0, con0,
9635 con0 = associate_trees (loc, con0, lit0, code, atype);
9637 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9645 /* Pointer simplifications for subtraction, simple reassociations. */
9646 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9648 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9649 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9650 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9652 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9653 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9654 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9655 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9656 return fold_build2_loc (loc, PLUS_EXPR, type,
9657 fold_build2_loc (loc, MINUS_EXPR, type,
9659 fold_build2_loc (loc, MINUS_EXPR, type,
9662 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9663 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9665 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9666 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9667 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9668 fold_convert_loc (loc, type, arg1));
9670 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9672 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9674 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9676 tree arg10 = fold_convert_loc (loc, type,
9677 TREE_OPERAND (arg1, 0));
9678 tree arg11 = fold_convert_loc (loc, type,
9679 TREE_OPERAND (arg1, 1));
9680 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9681 fold_convert_loc (loc, type, arg0),
9684 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9687 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9688 if (TREE_CODE (arg0) == NEGATE_EXPR
9689 && negate_expr_p (arg1)
9690 && reorder_operands_p (arg0, arg1))
9691 return fold_build2_loc (loc, MINUS_EXPR, type,
9692 fold_convert_loc (loc, type,
9693 negate_expr (arg1)),
9694 fold_convert_loc (loc, type,
9695 TREE_OPERAND (arg0, 0)));
9697 if (! FLOAT_TYPE_P (type))
9699 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9700 any power of 2 minus 1. */
9701 if (TREE_CODE (arg0) == BIT_AND_EXPR
9702 && TREE_CODE (arg1) == BIT_AND_EXPR
9703 && operand_equal_p (TREE_OPERAND (arg0, 0),
9704 TREE_OPERAND (arg1, 0), 0))
9706 tree mask0 = TREE_OPERAND (arg0, 1);
9707 tree mask1 = TREE_OPERAND (arg1, 1);
9708 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
9710 if (operand_equal_p (tem, mask1, 0))
9712 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
9713 TREE_OPERAND (arg0, 0), mask1);
9714 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
9719 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9720 __complex__ ( x, -y ). This is not the same for SNaNs or if
9721 signed zeros are involved. */
9722 if (!HONOR_SNANS (element_mode (arg0))
9723 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9724 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9726 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9727 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9728 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9729 bool arg0rz = false, arg0iz = false;
9730 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9731 || (arg0i && (arg0iz = real_zerop (arg0i))))
9733 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9734 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9735 if (arg0rz && arg1i && real_zerop (arg1i))
9737 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9739 : build1 (REALPART_EXPR, rtype, arg1));
9740 tree ip = arg0i ? arg0i
9741 : build1 (IMAGPART_EXPR, rtype, arg0);
9742 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9744 else if (arg0iz && arg1r && real_zerop (arg1r))
9746 tree rp = arg0r ? arg0r
9747 : build1 (REALPART_EXPR, rtype, arg0);
9748 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9750 : build1 (IMAGPART_EXPR, rtype, arg1));
9751 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9756 /* A - B -> A + (-B) if B is easily negatable. */
9757 if (negate_expr_p (arg1)
9758 && !TYPE_OVERFLOW_SANITIZED (type)
9759 && ((FLOAT_TYPE_P (type)
9760 /* Avoid this transformation if B is a positive REAL_CST. */
9761 && (TREE_CODE (arg1) != REAL_CST
9762 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9763 || INTEGRAL_TYPE_P (type)))
9764 return fold_build2_loc (loc, PLUS_EXPR, type,
9765 fold_convert_loc (loc, type, arg0),
9766 fold_convert_loc (loc, type,
9767 negate_expr (arg1)));
9769 /* Fold &a[i] - &a[j] to i-j. */
9770 if (TREE_CODE (arg0) == ADDR_EXPR
9771 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9772 && TREE_CODE (arg1) == ADDR_EXPR
9773 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9775 tree tem = fold_addr_of_array_ref_difference (loc, type,
9776 TREE_OPERAND (arg0, 0),
9777 TREE_OPERAND (arg1, 0));
9782 if (FLOAT_TYPE_P (type)
9783 && flag_unsafe_math_optimizations
9784 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9785 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9786 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9789 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9790 one. Make sure the type is not saturating and has the signedness of
9791 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9792 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9793 if ((TREE_CODE (arg0) == MULT_EXPR
9794 || TREE_CODE (arg1) == MULT_EXPR)
9795 && !TYPE_SATURATING (type)
9796 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9797 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9798 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9800 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9808 if (! FLOAT_TYPE_P (type))
9810 /* Transform x * -C into -x * C if x is easily negatable. */
9811 if (TREE_CODE (arg1) == INTEGER_CST
9812 && tree_int_cst_sgn (arg1) == -1
9813 && negate_expr_p (arg0)
9814 && (tem = negate_expr (arg1)) != arg1
9815 && !TREE_OVERFLOW (tem))
9816 return fold_build2_loc (loc, MULT_EXPR, type,
9817 fold_convert_loc (loc, type,
9818 negate_expr (arg0)),
9821 /* (A + A) * C -> A * 2 * C */
9822 if (TREE_CODE (arg0) == PLUS_EXPR
9823 && TREE_CODE (arg1) == INTEGER_CST
9824 && operand_equal_p (TREE_OPERAND (arg0, 0),
9825 TREE_OPERAND (arg0, 1), 0))
9826 return fold_build2_loc (loc, MULT_EXPR, type,
9827 omit_one_operand_loc (loc, type,
9828 TREE_OPERAND (arg0, 0),
9829 TREE_OPERAND (arg0, 1)),
9830 fold_build2_loc (loc, MULT_EXPR, type,
9831 build_int_cst (type, 2) , arg1));
9833 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9834 sign-changing only. */
9835 if (TREE_CODE (arg1) == INTEGER_CST
9836 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9837 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9838 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9840 strict_overflow_p = false;
9841 if (TREE_CODE (arg1) == INTEGER_CST
9842 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9843 &strict_overflow_p)))
9845 if (strict_overflow_p)
9846 fold_overflow_warning (("assuming signed overflow does not "
9847 "occur when simplifying "
9849 WARN_STRICT_OVERFLOW_MISC);
9850 return fold_convert_loc (loc, type, tem);
9853 /* Optimize z * conj(z) for integer complex numbers. */
9854 if (TREE_CODE (arg0) == CONJ_EXPR
9855 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9856 return fold_mult_zconjz (loc, type, arg1);
9857 if (TREE_CODE (arg1) == CONJ_EXPR
9858 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9859 return fold_mult_zconjz (loc, type, arg0);
9863 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9864 if (operand_equal_p (arg0, arg1, 0))
9866 tree tem = fold_strip_sign_ops (arg0);
9867 if (tem != NULL_TREE)
9869 tem = fold_convert_loc (loc, type, tem);
9870 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
9874 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9875 This is not the same for NaNs or if signed zeros are
9877 if (!HONOR_NANS (arg0)
9878 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9879 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9880 && TREE_CODE (arg1) == COMPLEX_CST
9881 && real_zerop (TREE_REALPART (arg1)))
9883 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9884 if (real_onep (TREE_IMAGPART (arg1)))
9886 fold_build2_loc (loc, COMPLEX_EXPR, type,
9887 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9889 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9890 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9892 fold_build2_loc (loc, COMPLEX_EXPR, type,
9893 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9894 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9898 /* Optimize z * conj(z) for floating point complex numbers.
9899 Guarded by flag_unsafe_math_optimizations as non-finite
9900 imaginary components don't produce scalar results. */
9901 if (flag_unsafe_math_optimizations
9902 && TREE_CODE (arg0) == CONJ_EXPR
9903 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9904 return fold_mult_zconjz (loc, type, arg1);
9905 if (flag_unsafe_math_optimizations
9906 && TREE_CODE (arg1) == CONJ_EXPR
9907 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9908 return fold_mult_zconjz (loc, type, arg0);
9910 if (flag_unsafe_math_optimizations)
9913 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9916 && operand_equal_p (arg0, arg1, 0))
9918 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9922 tree arg = build_real (type, dconst2);
9923 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9931 /* Canonicalize (X & C1) | C2. */
9932 if (TREE_CODE (arg0) == BIT_AND_EXPR
9933 && TREE_CODE (arg1) == INTEGER_CST
9934 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9936 int width = TYPE_PRECISION (type), w;
9937 wide_int c1 = TREE_OPERAND (arg0, 1);
9940 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9941 if ((c1 & c2) == c1)
9942 return omit_one_operand_loc (loc, type, arg1,
9943 TREE_OPERAND (arg0, 0));
9945 wide_int msk = wi::mask (width, false,
9946 TYPE_PRECISION (TREE_TYPE (arg1)));
9948 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9949 if (msk.and_not (c1 | c2) == 0)
9950 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9951 TREE_OPERAND (arg0, 0), arg1);
9953 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9954 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9955 mode which allows further optimizations. */
9958 wide_int c3 = c1.and_not (c2);
9959 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9961 wide_int mask = wi::mask (w, false,
9962 TYPE_PRECISION (type));
9963 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9971 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9972 fold_build2_loc (loc, BIT_AND_EXPR, type,
9973 TREE_OPERAND (arg0, 0),
9974 wide_int_to_tree (type,
9979 /* (X & ~Y) | (~X & Y) is X ^ Y */
9980 if (TREE_CODE (arg0) == BIT_AND_EXPR
9981 && TREE_CODE (arg1) == BIT_AND_EXPR)
9983 tree a0, a1, l0, l1, n0, n1;
9985 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9986 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9988 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9989 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9991 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
9992 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
9994 if ((operand_equal_p (n0, a0, 0)
9995 && operand_equal_p (n1, a1, 0))
9996 || (operand_equal_p (n0, a1, 0)
9997 && operand_equal_p (n1, a0, 0)))
9998 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10001 /* See if this can be simplified into a rotate first. If that
10002 is unsuccessful continue in the association code. */
10006 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10007 if (TREE_CODE (arg0) == BIT_AND_EXPR
10008 && INTEGRAL_TYPE_P (type)
10009 && integer_onep (TREE_OPERAND (arg0, 1))
10010 && integer_onep (arg1))
10011 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10012 build_zero_cst (TREE_TYPE (arg0)));
10014 /* See if this can be simplified into a rotate first. If that
10015 is unsuccessful continue in the association code. */
10019 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10020 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10021 && INTEGRAL_TYPE_P (type)
10022 && integer_onep (TREE_OPERAND (arg0, 1))
10023 && integer_onep (arg1))
10026 tem = TREE_OPERAND (arg0, 0);
10027 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10028 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10030 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10031 build_zero_cst (TREE_TYPE (tem)));
10033 /* Fold ~X & 1 as (X & 1) == 0. */
10034 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10035 && INTEGRAL_TYPE_P (type)
10036 && integer_onep (arg1))
10039 tem = TREE_OPERAND (arg0, 0);
10040 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10041 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10043 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10044 build_zero_cst (TREE_TYPE (tem)));
10046 /* Fold !X & 1 as X == 0. */
10047 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10048 && integer_onep (arg1))
10050 tem = TREE_OPERAND (arg0, 0);
10051 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10052 build_zero_cst (TREE_TYPE (tem)));
10055 /* Fold (X ^ Y) & Y as ~X & Y. */
10056 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10057 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10059 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10060 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10061 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10062 fold_convert_loc (loc, type, arg1));
10064 /* Fold (X ^ Y) & X as ~Y & X. */
10065 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10066 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10067 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10069 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10070 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10071 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10072 fold_convert_loc (loc, type, arg1));
10074 /* Fold X & (X ^ Y) as X & ~Y. */
10075 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10076 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10078 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10079 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10080 fold_convert_loc (loc, type, arg0),
10081 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10083 /* Fold X & (Y ^ X) as ~Y & X. */
10084 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10085 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10086 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10088 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10089 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10090 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10091 fold_convert_loc (loc, type, arg0));
10094 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10095 multiple of 1 << CST. */
10096 if (TREE_CODE (arg1) == INTEGER_CST)
10098 wide_int cst1 = arg1;
10099 wide_int ncst1 = -cst1;
10100 if ((cst1 & ncst1) == ncst1
10101 && multiple_of_p (type, arg0,
10102 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10103 return fold_convert_loc (loc, type, arg0);
10106 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10108 if (TREE_CODE (arg1) == INTEGER_CST
10109 && TREE_CODE (arg0) == MULT_EXPR
10110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10112 wide_int warg1 = arg1;
10113 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10116 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10118 else if (masked != warg1)
10120 /* Avoid the transform if arg1 is a mask of some
10121 mode which allows further optimizations. */
10122 int pop = wi::popcount (warg1);
10123 if (!(pop >= BITS_PER_UNIT
10124 && exact_log2 (pop) != -1
10125 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10126 return fold_build2_loc (loc, code, type, op0,
10127 wide_int_to_tree (type, masked));
10131 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10132 ((A & N) + B) & M -> (A + B) & M
10133 Similarly if (N & M) == 0,
10134 ((A | N) + B) & M -> (A + B) & M
10135 and for - instead of + (or unary - instead of +)
10136 and/or ^ instead of |.
10137 If B is constant and (B & M) == 0, fold into A & M. */
10138 if (TREE_CODE (arg1) == INTEGER_CST)
10140 wide_int cst1 = arg1;
10141 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10142 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10143 && (TREE_CODE (arg0) == PLUS_EXPR
10144 || TREE_CODE (arg0) == MINUS_EXPR
10145 || TREE_CODE (arg0) == NEGATE_EXPR)
10146 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10147 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10153 /* Now we know that arg0 is (C + D) or (C - D) or
10154 -C and arg1 (M) is == (1LL << cst) - 1.
10155 Store C into PMOP[0] and D into PMOP[1]. */
10156 pmop[0] = TREE_OPERAND (arg0, 0);
10158 if (TREE_CODE (arg0) != NEGATE_EXPR)
10160 pmop[1] = TREE_OPERAND (arg0, 1);
10164 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10167 for (; which >= 0; which--)
10168 switch (TREE_CODE (pmop[which]))
10173 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10176 cst0 = TREE_OPERAND (pmop[which], 1);
10178 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10183 else if (cst0 != 0)
10185 /* If C or D is of the form (A & N) where
10186 (N & M) == M, or of the form (A | N) or
10187 (A ^ N) where (N & M) == 0, replace it with A. */
10188 pmop[which] = TREE_OPERAND (pmop[which], 0);
10191 /* If C or D is a N where (N & M) == 0, it can be
10192 omitted (assumed 0). */
10193 if ((TREE_CODE (arg0) == PLUS_EXPR
10194 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10195 && (cst1 & pmop[which]) == 0)
10196 pmop[which] = NULL;
10202 /* Only build anything new if we optimized one or both arguments
10204 if (pmop[0] != TREE_OPERAND (arg0, 0)
10205 || (TREE_CODE (arg0) != NEGATE_EXPR
10206 && pmop[1] != TREE_OPERAND (arg0, 1)))
10208 tree utype = TREE_TYPE (arg0);
10209 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10211 /* Perform the operations in a type that has defined
10212 overflow behavior. */
10213 utype = unsigned_type_for (TREE_TYPE (arg0));
10214 if (pmop[0] != NULL)
10215 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10216 if (pmop[1] != NULL)
10217 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10220 if (TREE_CODE (arg0) == NEGATE_EXPR)
10221 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10222 else if (TREE_CODE (arg0) == PLUS_EXPR)
10224 if (pmop[0] != NULL && pmop[1] != NULL)
10225 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10227 else if (pmop[0] != NULL)
10229 else if (pmop[1] != NULL)
10232 return build_int_cst (type, 0);
10234 else if (pmop[0] == NULL)
10235 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10237 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10239 /* TEM is now the new binary +, - or unary - replacement. */
10240 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10241 fold_convert_loc (loc, utype, arg1));
10242 return fold_convert_loc (loc, type, tem);
10247 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10248 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10249 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10251 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10253 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10256 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10262 /* Don't touch a floating-point divide by zero unless the mode
10263 of the constant can represent infinity. */
10264 if (TREE_CODE (arg1) == REAL_CST
10265 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10266 && real_zerop (arg1))
10269 /* (-A) / (-B) -> A / B */
10270 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10271 return fold_build2_loc (loc, RDIV_EXPR, type,
10272 TREE_OPERAND (arg0, 0),
10273 negate_expr (arg1));
10274 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10275 return fold_build2_loc (loc, RDIV_EXPR, type,
10276 negate_expr (arg0),
10277 TREE_OPERAND (arg1, 0));
10279 /* Convert A/B/C to A/(B*C). */
10280 if (flag_reciprocal_math
10281 && TREE_CODE (arg0) == RDIV_EXPR)
10282 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10283 fold_build2_loc (loc, MULT_EXPR, type,
10284 TREE_OPERAND (arg0, 1), arg1));
10286 /* Convert A/(B/C) to (A/B)*C. */
10287 if (flag_reciprocal_math
10288 && TREE_CODE (arg1) == RDIV_EXPR)
10289 return fold_build2_loc (loc, MULT_EXPR, type,
10290 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10291 TREE_OPERAND (arg1, 0)),
10292 TREE_OPERAND (arg1, 1));
10294 /* Convert C1/(X*C2) into (C1/C2)/X. */
10295 if (flag_reciprocal_math
10296 && TREE_CODE (arg1) == MULT_EXPR
10297 && TREE_CODE (arg0) == REAL_CST
10298 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10300 tree tem = const_binop (RDIV_EXPR, arg0,
10301 TREE_OPERAND (arg1, 1));
10303 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10304 TREE_OPERAND (arg1, 0));
10309 case TRUNC_DIV_EXPR:
10310 /* Optimize (X & (-A)) / A where A is a power of 2,
10312 if (TREE_CODE (arg0) == BIT_AND_EXPR
10313 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10314 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10316 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10317 arg1, TREE_OPERAND (arg0, 1));
10318 if (sum && integer_zerop (sum)) {
10319 tree pow2 = build_int_cst (integer_type_node,
10320 wi::exact_log2 (arg1));
10321 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10322 TREE_OPERAND (arg0, 0), pow2);
10328 case FLOOR_DIV_EXPR:
10329 /* Simplify A / (B << N) where A and B are positive and B is
10330 a power of 2, to A >> (N + log2(B)). */
10331 strict_overflow_p = false;
10332 if (TREE_CODE (arg1) == LSHIFT_EXPR
10333 && (TYPE_UNSIGNED (type)
10334 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10336 tree sval = TREE_OPERAND (arg1, 0);
10337 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10339 tree sh_cnt = TREE_OPERAND (arg1, 1);
10340 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10341 wi::exact_log2 (sval));
10343 if (strict_overflow_p)
10344 fold_overflow_warning (("assuming signed overflow does not "
10345 "occur when simplifying A / (B << N)"),
10346 WARN_STRICT_OVERFLOW_MISC);
10348 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10350 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10351 fold_convert_loc (loc, type, arg0), sh_cnt);
10357 case ROUND_DIV_EXPR:
10358 case CEIL_DIV_EXPR:
10359 case EXACT_DIV_EXPR:
10360 if (integer_zerop (arg1))
10363 /* Convert -A / -B to A / B when the type is signed and overflow is
10365 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10366 && TREE_CODE (arg0) == NEGATE_EXPR
10367 && negate_expr_p (arg1))
10369 if (INTEGRAL_TYPE_P (type))
10370 fold_overflow_warning (("assuming signed overflow does not occur "
10371 "when distributing negation across "
10373 WARN_STRICT_OVERFLOW_MISC);
10374 return fold_build2_loc (loc, code, type,
10375 fold_convert_loc (loc, type,
10376 TREE_OPERAND (arg0, 0)),
10377 fold_convert_loc (loc, type,
10378 negate_expr (arg1)));
10380 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10381 && TREE_CODE (arg1) == NEGATE_EXPR
10382 && negate_expr_p (arg0))
10384 if (INTEGRAL_TYPE_P (type))
10385 fold_overflow_warning (("assuming signed overflow does not occur "
10386 "when distributing negation across "
10388 WARN_STRICT_OVERFLOW_MISC);
10389 return fold_build2_loc (loc, code, type,
10390 fold_convert_loc (loc, type,
10391 negate_expr (arg0)),
10392 fold_convert_loc (loc, type,
10393 TREE_OPERAND (arg1, 0)));
10396 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10397 operation, EXACT_DIV_EXPR.
10399 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10400 At one time others generated faster code, it's not clear if they do
10401 after the last round to changes to the DIV code in expmed.c. */
10402 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10403 && multiple_of_p (type, arg0, arg1))
10404 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10405 fold_convert (type, arg0),
10406 fold_convert (type, arg1));
10408 strict_overflow_p = false;
10409 if (TREE_CODE (arg1) == INTEGER_CST
10410 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10411 &strict_overflow_p)))
10413 if (strict_overflow_p)
10414 fold_overflow_warning (("assuming signed overflow does not occur "
10415 "when simplifying division"),
10416 WARN_STRICT_OVERFLOW_MISC);
10417 return fold_convert_loc (loc, type, tem);
10422 case CEIL_MOD_EXPR:
10423 case FLOOR_MOD_EXPR:
10424 case ROUND_MOD_EXPR:
10425 case TRUNC_MOD_EXPR:
10426 strict_overflow_p = false;
10427 if (TREE_CODE (arg1) == INTEGER_CST
10428 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10429 &strict_overflow_p)))
10431 if (strict_overflow_p)
10432 fold_overflow_warning (("assuming signed overflow does not occur "
10433 "when simplifying modulus"),
10434 WARN_STRICT_OVERFLOW_MISC);
10435 return fold_convert_loc (loc, type, tem);
10444 /* Since negative shift count is not well-defined,
10445 don't try to compute it in the compiler. */
10446 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10449 prec = element_precision (type);
10451 /* If we have a rotate of a bit operation with the rotate count and
10452 the second operand of the bit operation both constant,
10453 permute the two operations. */
10454 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10455 && (TREE_CODE (arg0) == BIT_AND_EXPR
10456 || TREE_CODE (arg0) == BIT_IOR_EXPR
10457 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10458 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10459 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10460 fold_build2_loc (loc, code, type,
10461 TREE_OPERAND (arg0, 0), arg1),
10462 fold_build2_loc (loc, code, type,
10463 TREE_OPERAND (arg0, 1), arg1));
10465 /* Two consecutive rotates adding up to the some integer
10466 multiple of the precision of the type can be ignored. */
10467 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10468 && TREE_CODE (arg0) == RROTATE_EXPR
10469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10470 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10472 return TREE_OPERAND (arg0, 0);
10477 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10483 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10488 case TRUTH_ANDIF_EXPR:
10489 /* Note that the operands of this must be ints
10490 and their values must be 0 or 1.
10491 ("true" is a fixed value perhaps depending on the language.) */
10492 /* If first arg is constant zero, return it. */
10493 if (integer_zerop (arg0))
10494 return fold_convert_loc (loc, type, arg0);
10495 case TRUTH_AND_EXPR:
10496 /* If either arg is constant true, drop it. */
10497 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10498 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10499 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10500 /* Preserve sequence points. */
10501 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10502 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10503 /* If second arg is constant zero, result is zero, but first arg
10504 must be evaluated. */
10505 if (integer_zerop (arg1))
10506 return omit_one_operand_loc (loc, type, arg1, arg0);
10507 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10508 case will be handled here. */
10509 if (integer_zerop (arg0))
10510 return omit_one_operand_loc (loc, type, arg0, arg1);
10512 /* !X && X is always false. */
10513 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10514 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10515 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10516 /* X && !X is always false. */
10517 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10519 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10521 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10522 means A >= Y && A != MAX, but in this case we know that
10525 if (!TREE_SIDE_EFFECTS (arg0)
10526 && !TREE_SIDE_EFFECTS (arg1))
10528 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10529 if (tem && !operand_equal_p (tem, arg0, 0))
10530 return fold_build2_loc (loc, code, type, tem, arg1);
10532 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10533 if (tem && !operand_equal_p (tem, arg1, 0))
10534 return fold_build2_loc (loc, code, type, arg0, tem);
10537 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10543 case TRUTH_ORIF_EXPR:
10544 /* Note that the operands of this must be ints
10545 and their values must be 0 or true.
10546 ("true" is a fixed value perhaps depending on the language.) */
10547 /* If first arg is constant true, return it. */
10548 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10549 return fold_convert_loc (loc, type, arg0);
10550 case TRUTH_OR_EXPR:
10551 /* If either arg is constant zero, drop it. */
10552 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10553 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10554 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10555 /* Preserve sequence points. */
10556 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10558 /* If second arg is constant true, result is true, but we must
10559 evaluate first arg. */
10560 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10561 return omit_one_operand_loc (loc, type, arg1, arg0);
10562 /* Likewise for first arg, but note this only occurs here for
10564 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10565 return omit_one_operand_loc (loc, type, arg0, arg1);
10567 /* !X || X is always true. */
10568 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10569 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10570 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10571 /* X || !X is always true. */
10572 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10573 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10574 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10576 /* (X && !Y) || (!X && Y) is X ^ Y */
10577 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10578 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10580 tree a0, a1, l0, l1, n0, n1;
10582 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10583 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10585 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10586 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10588 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10589 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10591 if ((operand_equal_p (n0, a0, 0)
10592 && operand_equal_p (n1, a1, 0))
10593 || (operand_equal_p (n0, a1, 0)
10594 && operand_equal_p (n1, a0, 0)))
10595 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10598 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10604 case TRUTH_XOR_EXPR:
10605 /* If the second arg is constant zero, drop it. */
10606 if (integer_zerop (arg1))
10607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10608 /* If the second arg is constant true, this is a logical inversion. */
10609 if (integer_onep (arg1))
10611 tem = invert_truthvalue_loc (loc, arg0);
10612 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10614 /* Identical arguments cancel to zero. */
10615 if (operand_equal_p (arg0, arg1, 0))
10616 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10618 /* !X ^ X is always true. */
10619 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10620 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10621 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10623 /* X ^ !X is always true. */
10624 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10625 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10626 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10635 tem = fold_comparison (loc, code, type, op0, op1);
10636 if (tem != NULL_TREE)
10639 /* bool_var != 1 becomes !bool_var. */
10640 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10641 && code == NE_EXPR)
10642 return fold_convert_loc (loc, type,
10643 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10644 TREE_TYPE (arg0), arg0));
10646 /* bool_var == 0 becomes !bool_var. */
10647 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10648 && code == EQ_EXPR)
10649 return fold_convert_loc (loc, type,
10650 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10651 TREE_TYPE (arg0), arg0));
10653 /* !exp != 0 becomes !exp */
10654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10655 && code == NE_EXPR)
10656 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10658 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10659 if ((TREE_CODE (arg0) == PLUS_EXPR
10660 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10661 || TREE_CODE (arg0) == MINUS_EXPR)
10662 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10665 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10666 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10668 tree val = TREE_OPERAND (arg0, 1);
10669 return omit_two_operands_loc (loc, type,
10670 fold_build2_loc (loc, code, type,
10672 build_int_cst (TREE_TYPE (val),
10674 TREE_OPERAND (arg0, 0), arg1);
10677 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10678 if (TREE_CODE (arg0) == MINUS_EXPR
10679 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10680 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10683 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10685 return omit_two_operands_loc (loc, type,
10687 ? boolean_true_node : boolean_false_node,
10688 TREE_OPERAND (arg0, 1), arg1);
10691 /* If this is an EQ or NE comparison with zero and ARG0 is
10692 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10693 two operations, but the latter can be done in one less insn
10694 on machines that have only two-operand insns or on which a
10695 constant cannot be the first operand. */
10696 if (TREE_CODE (arg0) == BIT_AND_EXPR
10697 && integer_zerop (arg1))
10699 tree arg00 = TREE_OPERAND (arg0, 0);
10700 tree arg01 = TREE_OPERAND (arg0, 1);
10701 if (TREE_CODE (arg00) == LSHIFT_EXPR
10702 && integer_onep (TREE_OPERAND (arg00, 0)))
10704 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10705 arg01, TREE_OPERAND (arg00, 1));
10706 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10707 build_int_cst (TREE_TYPE (arg0), 1));
10708 return fold_build2_loc (loc, code, type,
10709 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10712 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10713 && integer_onep (TREE_OPERAND (arg01, 0)))
10715 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10716 arg00, TREE_OPERAND (arg01, 1));
10717 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10718 build_int_cst (TREE_TYPE (arg0), 1));
10719 return fold_build2_loc (loc, code, type,
10720 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10725 /* If this is an NE or EQ comparison of zero against the result of a
10726 signed MOD operation whose second operand is a power of 2, make
10727 the MOD operation unsigned since it is simpler and equivalent. */
10728 if (integer_zerop (arg1)
10729 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10730 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10731 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10732 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10733 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10734 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10736 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10737 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10738 fold_convert_loc (loc, newtype,
10739 TREE_OPERAND (arg0, 0)),
10740 fold_convert_loc (loc, newtype,
10741 TREE_OPERAND (arg0, 1)));
10743 return fold_build2_loc (loc, code, type, newmod,
10744 fold_convert_loc (loc, newtype, arg1));
10747 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10748 C1 is a valid shift constant, and C2 is a power of two, i.e.
10750 if (TREE_CODE (arg0) == BIT_AND_EXPR
10751 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10754 && integer_pow2p (TREE_OPERAND (arg0, 1))
10755 && integer_zerop (arg1))
10757 tree itype = TREE_TYPE (arg0);
10758 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10759 prec = TYPE_PRECISION (itype);
10761 /* Check for a valid shift count. */
10762 if (wi::ltu_p (arg001, prec))
10764 tree arg01 = TREE_OPERAND (arg0, 1);
10765 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10766 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10767 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10768 can be rewritten as (X & (C2 << C1)) != 0. */
10769 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10771 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10772 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10773 return fold_build2_loc (loc, code, type, tem,
10774 fold_convert_loc (loc, itype, arg1));
10776 /* Otherwise, for signed (arithmetic) shifts,
10777 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10778 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10779 else if (!TYPE_UNSIGNED (itype))
10780 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10781 arg000, build_int_cst (itype, 0));
10782 /* Otherwise, of unsigned (logical) shifts,
10783 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10784 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10786 return omit_one_operand_loc (loc, type,
10787 code == EQ_EXPR ? integer_one_node
10788 : integer_zero_node,
10793 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10794 Similarly for NE_EXPR. */
10795 if (TREE_CODE (arg0) == BIT_AND_EXPR
10796 && TREE_CODE (arg1) == INTEGER_CST
10797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10799 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10800 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10801 TREE_OPERAND (arg0, 1));
10803 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10804 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10806 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10807 if (integer_nonzerop (dandnotc))
10808 return omit_one_operand_loc (loc, type, rslt, arg0);
10811 /* If this is a comparison of a field, we may be able to simplify it. */
10812 if ((TREE_CODE (arg0) == COMPONENT_REF
10813 || TREE_CODE (arg0) == BIT_FIELD_REF)
10814 /* Handle the constant case even without -O
10815 to make sure the warnings are given. */
10816 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10818 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10823 /* Optimize comparisons of strlen vs zero to a compare of the
10824 first character of the string vs zero. To wit,
10825 strlen(ptr) == 0 => *ptr == 0
10826 strlen(ptr) != 0 => *ptr != 0
10827 Other cases should reduce to one of these two (or a constant)
10828 due to the return value of strlen being unsigned. */
10829 if (TREE_CODE (arg0) == CALL_EXPR
10830 && integer_zerop (arg1))
10832 tree fndecl = get_callee_fndecl (arg0);
10835 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10836 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10837 && call_expr_nargs (arg0) == 1
10838 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10840 tree iref = build_fold_indirect_ref_loc (loc,
10841 CALL_EXPR_ARG (arg0, 0));
10842 return fold_build2_loc (loc, code, type, iref,
10843 build_int_cst (TREE_TYPE (iref), 0));
10847 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10848 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10849 if (TREE_CODE (arg0) == RSHIFT_EXPR
10850 && integer_zerop (arg1)
10851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10853 tree arg00 = TREE_OPERAND (arg0, 0);
10854 tree arg01 = TREE_OPERAND (arg0, 1);
10855 tree itype = TREE_TYPE (arg00);
10856 if (wi::eq_p (arg01, element_precision (itype) - 1))
10858 if (TYPE_UNSIGNED (itype))
10860 itype = signed_type_for (itype);
10861 arg00 = fold_convert_loc (loc, itype, arg00);
10863 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10864 type, arg00, build_zero_cst (itype));
10868 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10869 (X & C) == 0 when C is a single bit. */
10870 if (TREE_CODE (arg0) == BIT_AND_EXPR
10871 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10872 && integer_zerop (arg1)
10873 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10875 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10876 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10877 TREE_OPERAND (arg0, 1));
10878 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10880 fold_convert_loc (loc, TREE_TYPE (arg0),
10884 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10885 constant C is a power of two, i.e. a single bit. */
10886 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10888 && integer_zerop (arg1)
10889 && integer_pow2p (TREE_OPERAND (arg0, 1))
10890 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10891 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10893 tree arg00 = TREE_OPERAND (arg0, 0);
10894 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10895 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10898 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10899 when is C is a power of two, i.e. a single bit. */
10900 if (TREE_CODE (arg0) == BIT_AND_EXPR
10901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10902 && integer_zerop (arg1)
10903 && integer_pow2p (TREE_OPERAND (arg0, 1))
10904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10905 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10907 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10908 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10909 arg000, TREE_OPERAND (arg0, 1));
10910 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10911 tem, build_int_cst (TREE_TYPE (tem), 0));
10914 if (integer_zerop (arg1)
10915 && tree_expr_nonzero_p (arg0))
10917 tree res = constant_boolean_node (code==NE_EXPR, type);
10918 return omit_one_operand_loc (loc, type, res, arg0);
10921 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10922 if (TREE_CODE (arg0) == BIT_AND_EXPR
10923 && TREE_CODE (arg1) == BIT_AND_EXPR)
10925 tree arg00 = TREE_OPERAND (arg0, 0);
10926 tree arg01 = TREE_OPERAND (arg0, 1);
10927 tree arg10 = TREE_OPERAND (arg1, 0);
10928 tree arg11 = TREE_OPERAND (arg1, 1);
10929 tree itype = TREE_TYPE (arg0);
10931 if (operand_equal_p (arg01, arg11, 0))
10932 return fold_build2_loc (loc, code, type,
10933 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10934 fold_build2_loc (loc,
10935 BIT_XOR_EXPR, itype,
10938 build_zero_cst (itype));
10940 if (operand_equal_p (arg01, arg10, 0))
10941 return fold_build2_loc (loc, code, type,
10942 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10943 fold_build2_loc (loc,
10944 BIT_XOR_EXPR, itype,
10947 build_zero_cst (itype));
10949 if (operand_equal_p (arg00, arg11, 0))
10950 return fold_build2_loc (loc, code, type,
10951 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10952 fold_build2_loc (loc,
10953 BIT_XOR_EXPR, itype,
10956 build_zero_cst (itype));
10958 if (operand_equal_p (arg00, arg10, 0))
10959 return fold_build2_loc (loc, code, type,
10960 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10961 fold_build2_loc (loc,
10962 BIT_XOR_EXPR, itype,
10965 build_zero_cst (itype));
10968 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10969 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10971 tree arg00 = TREE_OPERAND (arg0, 0);
10972 tree arg01 = TREE_OPERAND (arg0, 1);
10973 tree arg10 = TREE_OPERAND (arg1, 0);
10974 tree arg11 = TREE_OPERAND (arg1, 1);
10975 tree itype = TREE_TYPE (arg0);
10977 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10978 operand_equal_p guarantees no side-effects so we don't need
10979 to use omit_one_operand on Z. */
10980 if (operand_equal_p (arg01, arg11, 0))
10981 return fold_build2_loc (loc, code, type, arg00,
10982 fold_convert_loc (loc, TREE_TYPE (arg00),
10984 if (operand_equal_p (arg01, arg10, 0))
10985 return fold_build2_loc (loc, code, type, arg00,
10986 fold_convert_loc (loc, TREE_TYPE (arg00),
10988 if (operand_equal_p (arg00, arg11, 0))
10989 return fold_build2_loc (loc, code, type, arg01,
10990 fold_convert_loc (loc, TREE_TYPE (arg01),
10992 if (operand_equal_p (arg00, arg10, 0))
10993 return fold_build2_loc (loc, code, type, arg01,
10994 fold_convert_loc (loc, TREE_TYPE (arg01),
10997 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10998 if (TREE_CODE (arg01) == INTEGER_CST
10999 && TREE_CODE (arg11) == INTEGER_CST)
11001 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11002 fold_convert_loc (loc, itype, arg11));
11003 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11004 return fold_build2_loc (loc, code, type, tem,
11005 fold_convert_loc (loc, itype, arg10));
11009 /* Attempt to simplify equality/inequality comparisons of complex
11010 values. Only lower the comparison if the result is known or
11011 can be simplified to a single scalar comparison. */
11012 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11013 || TREE_CODE (arg0) == COMPLEX_CST)
11014 && (TREE_CODE (arg1) == COMPLEX_EXPR
11015 || TREE_CODE (arg1) == COMPLEX_CST))
11017 tree real0, imag0, real1, imag1;
11020 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11022 real0 = TREE_OPERAND (arg0, 0);
11023 imag0 = TREE_OPERAND (arg0, 1);
11027 real0 = TREE_REALPART (arg0);
11028 imag0 = TREE_IMAGPART (arg0);
11031 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11033 real1 = TREE_OPERAND (arg1, 0);
11034 imag1 = TREE_OPERAND (arg1, 1);
11038 real1 = TREE_REALPART (arg1);
11039 imag1 = TREE_IMAGPART (arg1);
11042 rcond = fold_binary_loc (loc, code, type, real0, real1);
11043 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11045 if (integer_zerop (rcond))
11047 if (code == EQ_EXPR)
11048 return omit_two_operands_loc (loc, type, boolean_false_node,
11050 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11054 if (code == NE_EXPR)
11055 return omit_two_operands_loc (loc, type, boolean_true_node,
11057 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11061 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11062 if (icond && TREE_CODE (icond) == INTEGER_CST)
11064 if (integer_zerop (icond))
11066 if (code == EQ_EXPR)
11067 return omit_two_operands_loc (loc, type, boolean_false_node,
11069 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11073 if (code == NE_EXPR)
11074 return omit_two_operands_loc (loc, type, boolean_true_node,
11076 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11087 tem = fold_comparison (loc, code, type, op0, op1);
11088 if (tem != NULL_TREE)
11091 /* Transform comparisons of the form X +- C CMP X. */
11092 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11094 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11095 && !HONOR_SNANS (arg0))
11096 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11097 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11099 tree arg01 = TREE_OPERAND (arg0, 1);
11100 enum tree_code code0 = TREE_CODE (arg0);
11103 if (TREE_CODE (arg01) == REAL_CST)
11104 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11106 is_positive = tree_int_cst_sgn (arg01);
11108 /* (X - c) > X becomes false. */
11109 if (code == GT_EXPR
11110 && ((code0 == MINUS_EXPR && is_positive >= 0)
11111 || (code0 == PLUS_EXPR && is_positive <= 0)))
11113 if (TREE_CODE (arg01) == INTEGER_CST
11114 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11115 fold_overflow_warning (("assuming signed overflow does not "
11116 "occur when assuming that (X - c) > X "
11117 "is always false"),
11118 WARN_STRICT_OVERFLOW_ALL);
11119 return constant_boolean_node (0, type);
11122 /* Likewise (X + c) < X becomes false. */
11123 if (code == LT_EXPR
11124 && ((code0 == PLUS_EXPR && is_positive >= 0)
11125 || (code0 == MINUS_EXPR && is_positive <= 0)))
11127 if (TREE_CODE (arg01) == INTEGER_CST
11128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11129 fold_overflow_warning (("assuming signed overflow does not "
11130 "occur when assuming that "
11131 "(X + c) < X is always false"),
11132 WARN_STRICT_OVERFLOW_ALL);
11133 return constant_boolean_node (0, type);
11136 /* Convert (X - c) <= X to true. */
11137 if (!HONOR_NANS (arg1)
11139 && ((code0 == MINUS_EXPR && is_positive >= 0)
11140 || (code0 == PLUS_EXPR && is_positive <= 0)))
11142 if (TREE_CODE (arg01) == INTEGER_CST
11143 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11144 fold_overflow_warning (("assuming signed overflow does not "
11145 "occur when assuming that "
11146 "(X - c) <= X is always true"),
11147 WARN_STRICT_OVERFLOW_ALL);
11148 return constant_boolean_node (1, type);
11151 /* Convert (X + c) >= X to true. */
11152 if (!HONOR_NANS (arg1)
11154 && ((code0 == PLUS_EXPR && is_positive >= 0)
11155 || (code0 == MINUS_EXPR && is_positive <= 0)))
11157 if (TREE_CODE (arg01) == INTEGER_CST
11158 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11159 fold_overflow_warning (("assuming signed overflow does not "
11160 "occur when assuming that "
11161 "(X + c) >= X is always true"),
11162 WARN_STRICT_OVERFLOW_ALL);
11163 return constant_boolean_node (1, type);
11166 if (TREE_CODE (arg01) == INTEGER_CST)
11168 /* Convert X + c > X and X - c < X to true for integers. */
11169 if (code == GT_EXPR
11170 && ((code0 == PLUS_EXPR && is_positive > 0)
11171 || (code0 == MINUS_EXPR && is_positive < 0)))
11173 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11174 fold_overflow_warning (("assuming signed overflow does "
11175 "not occur when assuming that "
11176 "(X + c) > X is always true"),
11177 WARN_STRICT_OVERFLOW_ALL);
11178 return constant_boolean_node (1, type);
11181 if (code == LT_EXPR
11182 && ((code0 == MINUS_EXPR && is_positive > 0)
11183 || (code0 == PLUS_EXPR && is_positive < 0)))
11185 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11186 fold_overflow_warning (("assuming signed overflow does "
11187 "not occur when assuming that "
11188 "(X - c) < X is always true"),
11189 WARN_STRICT_OVERFLOW_ALL);
11190 return constant_boolean_node (1, type);
11193 /* Convert X + c <= X and X - c >= X to false for integers. */
11194 if (code == LE_EXPR
11195 && ((code0 == PLUS_EXPR && is_positive > 0)
11196 || (code0 == MINUS_EXPR && is_positive < 0)))
11198 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11199 fold_overflow_warning (("assuming signed overflow does "
11200 "not occur when assuming that "
11201 "(X + c) <= X is always false"),
11202 WARN_STRICT_OVERFLOW_ALL);
11203 return constant_boolean_node (0, type);
11206 if (code == GE_EXPR
11207 && ((code0 == MINUS_EXPR && is_positive > 0)
11208 || (code0 == PLUS_EXPR && is_positive < 0)))
11210 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11211 fold_overflow_warning (("assuming signed overflow does "
11212 "not occur when assuming that "
11213 "(X - c) >= X is always false"),
11214 WARN_STRICT_OVERFLOW_ALL);
11215 return constant_boolean_node (0, type);
11220 /* If we are comparing an ABS_EXPR with a constant, we can
11221 convert all the cases into explicit comparisons, but they may
11222 well not be faster than doing the ABS and one comparison.
11223 But ABS (X) <= C is a range comparison, which becomes a subtraction
11224 and a comparison, and is probably faster. */
11225 if (code == LE_EXPR
11226 && TREE_CODE (arg1) == INTEGER_CST
11227 && TREE_CODE (arg0) == ABS_EXPR
11228 && ! TREE_SIDE_EFFECTS (arg0)
11229 && (0 != (tem = negate_expr (arg1)))
11230 && TREE_CODE (tem) == INTEGER_CST
11231 && !TREE_OVERFLOW (tem))
11232 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11233 build2 (GE_EXPR, type,
11234 TREE_OPERAND (arg0, 0), tem),
11235 build2 (LE_EXPR, type,
11236 TREE_OPERAND (arg0, 0), arg1));
11238 /* Convert ABS_EXPR<x> >= 0 to true. */
11239 strict_overflow_p = false;
11240 if (code == GE_EXPR
11241 && (integer_zerop (arg1)
11242 || (! HONOR_NANS (arg0)
11243 && real_zerop (arg1)))
11244 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11246 if (strict_overflow_p)
11247 fold_overflow_warning (("assuming signed overflow does not occur "
11248 "when simplifying comparison of "
11249 "absolute value and zero"),
11250 WARN_STRICT_OVERFLOW_CONDITIONAL);
11251 return omit_one_operand_loc (loc, type,
11252 constant_boolean_node (true, type),
11256 /* Convert ABS_EXPR<x> < 0 to false. */
11257 strict_overflow_p = false;
11258 if (code == LT_EXPR
11259 && (integer_zerop (arg1) || real_zerop (arg1))
11260 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11262 if (strict_overflow_p)
11263 fold_overflow_warning (("assuming signed overflow does not occur "
11264 "when simplifying comparison of "
11265 "absolute value and zero"),
11266 WARN_STRICT_OVERFLOW_CONDITIONAL);
11267 return omit_one_operand_loc (loc, type,
11268 constant_boolean_node (false, type),
11272 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11273 and similarly for >= into !=. */
11274 if ((code == LT_EXPR || code == GE_EXPR)
11275 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11276 && TREE_CODE (arg1) == LSHIFT_EXPR
11277 && integer_onep (TREE_OPERAND (arg1, 0)))
11278 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11279 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11280 TREE_OPERAND (arg1, 1)),
11281 build_zero_cst (TREE_TYPE (arg0)));
11283 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11284 otherwise Y might be >= # of bits in X's type and thus e.g.
11285 (unsigned char) (1 << Y) for Y 15 might be 0.
11286 If the cast is widening, then 1 << Y should have unsigned type,
11287 otherwise if Y is number of bits in the signed shift type minus 1,
11288 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11289 31 might be 0xffffffff80000000. */
11290 if ((code == LT_EXPR || code == GE_EXPR)
11291 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11292 && CONVERT_EXPR_P (arg1)
11293 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11294 && (element_precision (TREE_TYPE (arg1))
11295 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11296 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11297 || (element_precision (TREE_TYPE (arg1))
11298 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11299 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11301 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11302 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11303 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11304 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11305 build_zero_cst (TREE_TYPE (arg0)));
11310 case UNORDERED_EXPR:
11318 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11320 tree targ0 = strip_float_extensions (arg0);
11321 tree targ1 = strip_float_extensions (arg1);
11322 tree newtype = TREE_TYPE (targ0);
11324 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11325 newtype = TREE_TYPE (targ1);
11327 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11328 return fold_build2_loc (loc, code, type,
11329 fold_convert_loc (loc, newtype, targ0),
11330 fold_convert_loc (loc, newtype, targ1));
11335 case COMPOUND_EXPR:
11336 /* When pedantic, a compound expression can be neither an lvalue
11337 nor an integer constant expression. */
11338 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11340 /* Don't let (0, 0) be null pointer constant. */
11341 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11342 : fold_convert_loc (loc, type, arg1);
11343 return pedantic_non_lvalue_loc (loc, tem);
11346 /* An ASSERT_EXPR should never be passed to fold_binary. */
11347 gcc_unreachable ();
11351 } /* switch (code) */
11354 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11355 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11359 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11361 switch (TREE_CODE (*tp))
11367 *walk_subtrees = 0;
11369 /* ... fall through ... */
11376 /* Return whether the sub-tree ST contains a label which is accessible from
11377 outside the sub-tree. */
11380 contains_label_p (tree st)
11383 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11386 /* Fold a ternary expression of code CODE and type TYPE with operands
11387 OP0, OP1, and OP2. Return the folded expression if folding is
11388 successful. Otherwise, return NULL_TREE. */
11391 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11392 tree op0, tree op1, tree op2)
11395 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11396 enum tree_code_class kind = TREE_CODE_CLASS (code);
11398 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11399 && TREE_CODE_LENGTH (code) == 3);
11401 /* If this is a commutative operation, and OP0 is a constant, move it
11402 to OP1 to reduce the number of tests below. */
11403 if (commutative_ternary_tree_code (code)
11404 && tree_swap_operands_p (op0, op1, true))
11405 return fold_build3_loc (loc, code, type, op1, op0, op2);
11407 tem = generic_simplify (loc, code, type, op0, op1, op2);
11411 /* Strip any conversions that don't change the mode. This is safe
11412 for every expression, except for a comparison expression because
11413 its signedness is derived from its operands. So, in the latter
11414 case, only strip conversions that don't change the signedness.
11416 Note that this is done as an internal manipulation within the
11417 constant folder, in order to find the simplest representation of
11418 the arguments so that their form can be studied. In any cases,
11419 the appropriate type conversions should be put back in the tree
11420 that will get out of the constant folder. */
11441 case COMPONENT_REF:
11442 if (TREE_CODE (arg0) == CONSTRUCTOR
11443 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11445 unsigned HOST_WIDE_INT idx;
11447 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11454 case VEC_COND_EXPR:
11455 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11456 so all simple results must be passed through pedantic_non_lvalue. */
11457 if (TREE_CODE (arg0) == INTEGER_CST)
11459 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11460 tem = integer_zerop (arg0) ? op2 : op1;
11461 /* Only optimize constant conditions when the selected branch
11462 has the same type as the COND_EXPR. This avoids optimizing
11463 away "c ? x : throw", where the throw has a void type.
11464 Avoid throwing away that operand which contains label. */
11465 if ((!TREE_SIDE_EFFECTS (unused_op)
11466 || !contains_label_p (unused_op))
11467 && (! VOID_TYPE_P (TREE_TYPE (tem))
11468 || VOID_TYPE_P (type)))
11469 return pedantic_non_lvalue_loc (loc, tem);
11472 else if (TREE_CODE (arg0) == VECTOR_CST)
11474 if ((TREE_CODE (arg1) == VECTOR_CST
11475 || TREE_CODE (arg1) == CONSTRUCTOR)
11476 && (TREE_CODE (arg2) == VECTOR_CST
11477 || TREE_CODE (arg2) == CONSTRUCTOR))
11479 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11480 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11481 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11482 for (i = 0; i < nelts; i++)
11484 tree val = VECTOR_CST_ELT (arg0, i);
11485 if (integer_all_onesp (val))
11487 else if (integer_zerop (val))
11488 sel[i] = nelts + i;
11489 else /* Currently unreachable. */
11492 tree t = fold_vec_perm (type, arg1, arg2, sel);
11493 if (t != NULL_TREE)
11498 /* If we have A op B ? A : C, we may be able to convert this to a
11499 simpler expression, depending on the operation and the values
11500 of B and C. Signed zeros prevent all of these transformations,
11501 for reasons given above each one.
11503 Also try swapping the arguments and inverting the conditional. */
11504 if (COMPARISON_CLASS_P (arg0)
11505 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11506 arg1, TREE_OPERAND (arg0, 1))
11507 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11509 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11514 if (COMPARISON_CLASS_P (arg0)
11515 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11517 TREE_OPERAND (arg0, 1))
11518 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11520 location_t loc0 = expr_location_or (arg0, loc);
11521 tem = fold_invert_truthvalue (loc0, arg0);
11522 if (tem && COMPARISON_CLASS_P (tem))
11524 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11530 /* If the second operand is simpler than the third, swap them
11531 since that produces better jump optimization results. */
11532 if (truth_value_p (TREE_CODE (arg0))
11533 && tree_swap_operands_p (op1, op2, false))
11535 location_t loc0 = expr_location_or (arg0, loc);
11536 /* See if this can be inverted. If it can't, possibly because
11537 it was a floating-point inequality comparison, don't do
11539 tem = fold_invert_truthvalue (loc0, arg0);
11541 return fold_build3_loc (loc, code, type, tem, op2, op1);
11544 /* Convert A ? 1 : 0 to simply A. */
11545 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11546 : (integer_onep (op1)
11547 && !VECTOR_TYPE_P (type)))
11548 && integer_zerop (op2)
11549 /* If we try to convert OP0 to our type, the
11550 call to fold will try to move the conversion inside
11551 a COND, which will recurse. In that case, the COND_EXPR
11552 is probably the best choice, so leave it alone. */
11553 && type == TREE_TYPE (arg0))
11554 return pedantic_non_lvalue_loc (loc, arg0);
11556 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11557 over COND_EXPR in cases such as floating point comparisons. */
11558 if (integer_zerop (op1)
11559 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11560 : (integer_onep (op2)
11561 && !VECTOR_TYPE_P (type)))
11562 && truth_value_p (TREE_CODE (arg0)))
11563 return pedantic_non_lvalue_loc (loc,
11564 fold_convert_loc (loc, type,
11565 invert_truthvalue_loc (loc,
11568 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11569 if (TREE_CODE (arg0) == LT_EXPR
11570 && integer_zerop (TREE_OPERAND (arg0, 1))
11571 && integer_zerop (op2)
11572 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11574 /* sign_bit_p looks through both zero and sign extensions,
11575 but for this optimization only sign extensions are
11577 tree tem2 = TREE_OPERAND (arg0, 0);
11578 while (tem != tem2)
11580 if (TREE_CODE (tem2) != NOP_EXPR
11581 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11586 tem2 = TREE_OPERAND (tem2, 0);
11588 /* sign_bit_p only checks ARG1 bits within A's precision.
11589 If <sign bit of A> has wider type than A, bits outside
11590 of A's precision in <sign bit of A> need to be checked.
11591 If they are all 0, this optimization needs to be done
11592 in unsigned A's type, if they are all 1 in signed A's type,
11593 otherwise this can't be done. */
11595 && TYPE_PRECISION (TREE_TYPE (tem))
11596 < TYPE_PRECISION (TREE_TYPE (arg1))
11597 && TYPE_PRECISION (TREE_TYPE (tem))
11598 < TYPE_PRECISION (type))
11600 int inner_width, outer_width;
11603 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11604 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11605 if (outer_width > TYPE_PRECISION (type))
11606 outer_width = TYPE_PRECISION (type);
11608 wide_int mask = wi::shifted_mask
11609 (inner_width, outer_width - inner_width, false,
11610 TYPE_PRECISION (TREE_TYPE (arg1)));
11612 wide_int common = mask & arg1;
11613 if (common == mask)
11615 tem_type = signed_type_for (TREE_TYPE (tem));
11616 tem = fold_convert_loc (loc, tem_type, tem);
11618 else if (common == 0)
11620 tem_type = unsigned_type_for (TREE_TYPE (tem));
11621 tem = fold_convert_loc (loc, tem_type, tem);
11629 fold_convert_loc (loc, type,
11630 fold_build2_loc (loc, BIT_AND_EXPR,
11631 TREE_TYPE (tem), tem,
11632 fold_convert_loc (loc,
11637 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11638 already handled above. */
11639 if (TREE_CODE (arg0) == BIT_AND_EXPR
11640 && integer_onep (TREE_OPERAND (arg0, 1))
11641 && integer_zerop (op2)
11642 && integer_pow2p (arg1))
11644 tree tem = TREE_OPERAND (arg0, 0);
11646 if (TREE_CODE (tem) == RSHIFT_EXPR
11647 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11648 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11649 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11650 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11651 TREE_OPERAND (tem, 0), arg1);
11654 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11655 is probably obsolete because the first operand should be a
11656 truth value (that's why we have the two cases above), but let's
11657 leave it in until we can confirm this for all front-ends. */
11658 if (integer_zerop (op2)
11659 && TREE_CODE (arg0) == NE_EXPR
11660 && integer_zerop (TREE_OPERAND (arg0, 1))
11661 && integer_pow2p (arg1)
11662 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11663 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11664 arg1, OEP_ONLY_CONST))
11665 return pedantic_non_lvalue_loc (loc,
11666 fold_convert_loc (loc, type,
11667 TREE_OPERAND (arg0, 0)));
11669 /* Disable the transformations below for vectors, since
11670 fold_binary_op_with_conditional_arg may undo them immediately,
11671 yielding an infinite loop. */
11672 if (code == VEC_COND_EXPR)
11675 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11676 if (integer_zerop (op2)
11677 && truth_value_p (TREE_CODE (arg0))
11678 && truth_value_p (TREE_CODE (arg1))
11679 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11680 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11681 : TRUTH_ANDIF_EXPR,
11682 type, fold_convert_loc (loc, type, arg0), arg1);
11684 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11685 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11686 && truth_value_p (TREE_CODE (arg0))
11687 && truth_value_p (TREE_CODE (arg1))
11688 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11690 location_t loc0 = expr_location_or (arg0, loc);
11691 /* Only perform transformation if ARG0 is easily inverted. */
11692 tem = fold_invert_truthvalue (loc0, arg0);
11694 return fold_build2_loc (loc, code == VEC_COND_EXPR
11697 type, fold_convert_loc (loc, type, tem),
11701 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11702 if (integer_zerop (arg1)
11703 && truth_value_p (TREE_CODE (arg0))
11704 && truth_value_p (TREE_CODE (op2))
11705 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11707 location_t loc0 = expr_location_or (arg0, loc);
11708 /* Only perform transformation if ARG0 is easily inverted. */
11709 tem = fold_invert_truthvalue (loc0, arg0);
11711 return fold_build2_loc (loc, code == VEC_COND_EXPR
11712 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11713 type, fold_convert_loc (loc, type, tem),
11717 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11718 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11719 && truth_value_p (TREE_CODE (arg0))
11720 && truth_value_p (TREE_CODE (op2))
11721 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11722 return fold_build2_loc (loc, code == VEC_COND_EXPR
11723 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11724 type, fold_convert_loc (loc, type, arg0), op2);
11729 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11730 of fold_ternary on them. */
11731 gcc_unreachable ();
11733 case BIT_FIELD_REF:
11734 if ((TREE_CODE (arg0) == VECTOR_CST
11735 || (TREE_CODE (arg0) == CONSTRUCTOR
11736 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11737 && (type == TREE_TYPE (TREE_TYPE (arg0))
11738 || (TREE_CODE (type) == VECTOR_TYPE
11739 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11741 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11742 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11743 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11744 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11747 && (idx % width) == 0
11748 && (n % width) == 0
11749 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11754 if (TREE_CODE (arg0) == VECTOR_CST)
11757 return VECTOR_CST_ELT (arg0, idx);
11759 tree *vals = XALLOCAVEC (tree, n);
11760 for (unsigned i = 0; i < n; ++i)
11761 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11762 return build_vector (type, vals);
11765 /* Constructor elements can be subvectors. */
11766 unsigned HOST_WIDE_INT k = 1;
11767 if (CONSTRUCTOR_NELTS (arg0) != 0)
11769 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11770 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11771 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11774 /* We keep an exact subset of the constructor elements. */
11775 if ((idx % k) == 0 && (n % k) == 0)
11777 if (CONSTRUCTOR_NELTS (arg0) == 0)
11778 return build_constructor (type, NULL);
11783 if (idx < CONSTRUCTOR_NELTS (arg0))
11784 return CONSTRUCTOR_ELT (arg0, idx)->value;
11785 return build_zero_cst (type);
11788 vec<constructor_elt, va_gc> *vals;
11789 vec_alloc (vals, n);
11790 for (unsigned i = 0;
11791 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11793 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11795 (arg0, idx + i)->value);
11796 return build_constructor (type, vals);
11798 /* The bitfield references a single constructor element. */
11799 else if (idx + n <= (idx / k + 1) * k)
11801 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11802 return build_zero_cst (type);
11804 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11806 return fold_build3_loc (loc, code, type,
11807 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11808 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11813 /* A bit-field-ref that referenced the full argument can be stripped. */
11814 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11815 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11816 && integer_zerop (op2))
11817 return fold_convert_loc (loc, type, arg0);
11819 /* On constants we can use native encode/interpret to constant
11820 fold (nearly) all BIT_FIELD_REFs. */
11821 if (CONSTANT_CLASS_P (arg0)
11822 && can_native_interpret_type_p (type)
11823 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11824 /* This limitation should not be necessary, we just need to
11825 round this up to mode size. */
11826 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11827 /* Need bit-shifting of the buffer to relax the following. */
11828 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11830 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11831 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11832 unsigned HOST_WIDE_INT clen;
11833 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11834 /* ??? We cannot tell native_encode_expr to start at
11835 some random byte only. So limit us to a reasonable amount
11839 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11840 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11842 && len * BITS_PER_UNIT >= bitpos + bitsize)
11844 tree v = native_interpret_expr (type,
11845 b + bitpos / BITS_PER_UNIT,
11846 bitsize / BITS_PER_UNIT);
11856 /* For integers we can decompose the FMA if possible. */
11857 if (TREE_CODE (arg0) == INTEGER_CST
11858 && TREE_CODE (arg1) == INTEGER_CST)
11859 return fold_build2_loc (loc, PLUS_EXPR, type,
11860 const_binop (MULT_EXPR, arg0, arg1), arg2);
11861 if (integer_zerop (arg2))
11862 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11864 return fold_fma (loc, type, arg0, arg1, arg2);
11866 case VEC_PERM_EXPR:
11867 if (TREE_CODE (arg2) == VECTOR_CST)
11869 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11870 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11871 unsigned char *sel2 = sel + nelts;
11872 bool need_mask_canon = false;
11873 bool need_mask_canon2 = false;
11874 bool all_in_vec0 = true;
11875 bool all_in_vec1 = true;
11876 bool maybe_identity = true;
11877 bool single_arg = (op0 == op1);
11878 bool changed = false;
11880 mask2 = 2 * nelts - 1;
11881 mask = single_arg ? (nelts - 1) : mask2;
11882 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11883 for (i = 0; i < nelts; i++)
11885 tree val = VECTOR_CST_ELT (arg2, i);
11886 if (TREE_CODE (val) != INTEGER_CST)
11889 /* Make sure that the perm value is in an acceptable
11892 need_mask_canon |= wi::gtu_p (t, mask);
11893 need_mask_canon2 |= wi::gtu_p (t, mask2);
11894 sel[i] = t.to_uhwi () & mask;
11895 sel2[i] = t.to_uhwi () & mask2;
11897 if (sel[i] < nelts)
11898 all_in_vec1 = false;
11900 all_in_vec0 = false;
11902 if ((sel[i] & (nelts-1)) != i)
11903 maybe_identity = false;
11906 if (maybe_identity)
11916 else if (all_in_vec1)
11919 for (i = 0; i < nelts; i++)
11921 need_mask_canon = true;
11924 if ((TREE_CODE (op0) == VECTOR_CST
11925 || TREE_CODE (op0) == CONSTRUCTOR)
11926 && (TREE_CODE (op1) == VECTOR_CST
11927 || TREE_CODE (op1) == CONSTRUCTOR))
11929 tree t = fold_vec_perm (type, op0, op1, sel);
11930 if (t != NULL_TREE)
11934 if (op0 == op1 && !single_arg)
11937 /* Some targets are deficient and fail to expand a single
11938 argument permutation while still allowing an equivalent
11939 2-argument version. */
11940 if (need_mask_canon && arg2 == op2
11941 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11942 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11944 need_mask_canon = need_mask_canon2;
11948 if (need_mask_canon && arg2 == op2)
11950 tree *tsel = XALLOCAVEC (tree, nelts);
11951 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11952 for (i = 0; i < nelts; i++)
11953 tsel[i] = build_int_cst (eltype, sel[i]);
11954 op2 = build_vector (TREE_TYPE (arg2), tsel);
11959 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11965 } /* switch (code) */
11968 /* Perform constant folding and related simplification of EXPR.
11969 The related simplifications include x*1 => x, x*0 => 0, etc.,
11970 and application of the associative law.
11971 NOP_EXPR conversions may be removed freely (as long as we
11972 are careful not to change the type of the overall expression).
11973 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11974 but we can constant-fold them if they have constant operands. */
11976 #ifdef ENABLE_FOLD_CHECKING
11977 # define fold(x) fold_1 (x)
11978 static tree fold_1 (tree);
11984 const tree t = expr;
11985 enum tree_code code = TREE_CODE (t);
11986 enum tree_code_class kind = TREE_CODE_CLASS (code);
11988 location_t loc = EXPR_LOCATION (expr);
11990 /* Return right away if a constant. */
11991 if (kind == tcc_constant)
11994 /* CALL_EXPR-like objects with variable numbers of operands are
11995 treated specially. */
11996 if (kind == tcc_vl_exp)
11998 if (code == CALL_EXPR)
12000 tem = fold_call_expr (loc, expr, false);
12001 return tem ? tem : expr;
12006 if (IS_EXPR_CODE_CLASS (kind))
12008 tree type = TREE_TYPE (t);
12009 tree op0, op1, op2;
12011 switch (TREE_CODE_LENGTH (code))
12014 op0 = TREE_OPERAND (t, 0);
12015 tem = fold_unary_loc (loc, code, type, op0);
12016 return tem ? tem : expr;
12018 op0 = TREE_OPERAND (t, 0);
12019 op1 = TREE_OPERAND (t, 1);
12020 tem = fold_binary_loc (loc, code, type, op0, op1);
12021 return tem ? tem : expr;
12023 op0 = TREE_OPERAND (t, 0);
12024 op1 = TREE_OPERAND (t, 1);
12025 op2 = TREE_OPERAND (t, 2);
12026 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12027 return tem ? tem : expr;
12037 tree op0 = TREE_OPERAND (t, 0);
12038 tree op1 = TREE_OPERAND (t, 1);
12040 if (TREE_CODE (op1) == INTEGER_CST
12041 && TREE_CODE (op0) == CONSTRUCTOR
12042 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12044 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12045 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12046 unsigned HOST_WIDE_INT begin = 0;
12048 /* Find a matching index by means of a binary search. */
12049 while (begin != end)
12051 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12052 tree index = (*elts)[middle].index;
12054 if (TREE_CODE (index) == INTEGER_CST
12055 && tree_int_cst_lt (index, op1))
12056 begin = middle + 1;
12057 else if (TREE_CODE (index) == INTEGER_CST
12058 && tree_int_cst_lt (op1, index))
12060 else if (TREE_CODE (index) == RANGE_EXPR
12061 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12062 begin = middle + 1;
12063 else if (TREE_CODE (index) == RANGE_EXPR
12064 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12067 return (*elts)[middle].value;
12074 /* Return a VECTOR_CST if possible. */
12077 tree type = TREE_TYPE (t);
12078 if (TREE_CODE (type) != VECTOR_TYPE)
12081 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12082 unsigned HOST_WIDE_INT idx, pos = 0;
12085 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12087 if (!CONSTANT_CLASS_P (value))
12089 if (TREE_CODE (value) == VECTOR_CST)
12091 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12092 vec[pos++] = VECTOR_CST_ELT (value, i);
12095 vec[pos++] = value;
12097 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12098 vec[pos] = build_zero_cst (TREE_TYPE (type));
12100 return build_vector (type, vec);
12104 return fold (DECL_INITIAL (t));
12108 } /* switch (code) */
12111 #ifdef ENABLE_FOLD_CHECKING
12114 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12115 hash_table<nofree_ptr_hash<const tree_node> > *);
12116 static void fold_check_failed (const_tree, const_tree);
12117 void print_fold_checksum (const_tree);
12119 /* When --enable-checking=fold, compute a digest of expr before
12120 and after actual fold call to see if fold did not accidentally
12121 change original expr. */
12127 struct md5_ctx ctx;
12128 unsigned char checksum_before[16], checksum_after[16];
12129 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12131 md5_init_ctx (&ctx);
12132 fold_checksum_tree (expr, &ctx, &ht);
12133 md5_finish_ctx (&ctx, checksum_before);
12136 ret = fold_1 (expr);
12138 md5_init_ctx (&ctx);
12139 fold_checksum_tree (expr, &ctx, &ht);
12140 md5_finish_ctx (&ctx, checksum_after);
12142 if (memcmp (checksum_before, checksum_after, 16))
12143 fold_check_failed (expr, ret);
12149 print_fold_checksum (const_tree expr)
12151 struct md5_ctx ctx;
12152 unsigned char checksum[16], cnt;
12153 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12155 md5_init_ctx (&ctx);
12156 fold_checksum_tree (expr, &ctx, &ht);
12157 md5_finish_ctx (&ctx, checksum);
12158 for (cnt = 0; cnt < 16; ++cnt)
12159 fprintf (stderr, "%02x", checksum[cnt]);
12160 putc ('\n', stderr);
12164 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12166 internal_error ("fold check: original tree changed by fold");
12170 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12171 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12173 const tree_node **slot;
12174 enum tree_code code;
12175 union tree_node buf;
12181 slot = ht->find_slot (expr, INSERT);
12185 code = TREE_CODE (expr);
12186 if (TREE_CODE_CLASS (code) == tcc_declaration
12187 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12189 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12190 memcpy ((char *) &buf, expr, tree_size (expr));
12191 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12192 buf.decl_with_vis.symtab_node = NULL;
12193 expr = (tree) &buf;
12195 else if (TREE_CODE_CLASS (code) == tcc_type
12196 && (TYPE_POINTER_TO (expr)
12197 || TYPE_REFERENCE_TO (expr)
12198 || TYPE_CACHED_VALUES_P (expr)
12199 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12200 || TYPE_NEXT_VARIANT (expr)))
12202 /* Allow these fields to be modified. */
12204 memcpy ((char *) &buf, expr, tree_size (expr));
12205 expr = tmp = (tree) &buf;
12206 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12207 TYPE_POINTER_TO (tmp) = NULL;
12208 TYPE_REFERENCE_TO (tmp) = NULL;
12209 TYPE_NEXT_VARIANT (tmp) = NULL;
12210 if (TYPE_CACHED_VALUES_P (tmp))
12212 TYPE_CACHED_VALUES_P (tmp) = 0;
12213 TYPE_CACHED_VALUES (tmp) = NULL;
12216 md5_process_bytes (expr, tree_size (expr), ctx);
12217 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12218 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12219 if (TREE_CODE_CLASS (code) != tcc_type
12220 && TREE_CODE_CLASS (code) != tcc_declaration
12221 && code != TREE_LIST
12222 && code != SSA_NAME
12223 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12224 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12225 switch (TREE_CODE_CLASS (code))
12231 md5_process_bytes (TREE_STRING_POINTER (expr),
12232 TREE_STRING_LENGTH (expr), ctx);
12235 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12236 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12239 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12240 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12246 case tcc_exceptional:
12250 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12251 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12252 expr = TREE_CHAIN (expr);
12253 goto recursive_label;
12256 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12257 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12263 case tcc_expression:
12264 case tcc_reference:
12265 case tcc_comparison:
12268 case tcc_statement:
12270 len = TREE_OPERAND_LENGTH (expr);
12271 for (i = 0; i < len; ++i)
12272 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12274 case tcc_declaration:
12275 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12276 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12277 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12279 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12280 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12281 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12282 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12283 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12286 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12288 if (TREE_CODE (expr) == FUNCTION_DECL)
12290 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12291 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12293 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12297 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12298 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12299 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12300 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12301 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12302 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12303 if (INTEGRAL_TYPE_P (expr)
12304 || SCALAR_FLOAT_TYPE_P (expr))
12306 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12307 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12309 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12310 if (TREE_CODE (expr) == RECORD_TYPE
12311 || TREE_CODE (expr) == UNION_TYPE
12312 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12313 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12314 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12321 /* Helper function for outputting the checksum of a tree T. When
12322 debugging with gdb, you can "define mynext" to be "next" followed
12323 by "call debug_fold_checksum (op0)", then just trace down till the
12326 DEBUG_FUNCTION void
12327 debug_fold_checksum (const_tree t)
12330 unsigned char checksum[16];
12331 struct md5_ctx ctx;
12332 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (t, &ctx, &ht);
12336 md5_finish_ctx (&ctx, checksum);
12339 for (i = 0; i < 16; i++)
12340 fprintf (stderr, "%d ", checksum[i]);
12342 fprintf (stderr, "\n");
12347 /* Fold a unary tree expression with code CODE of type TYPE with an
12348 operand OP0. LOC is the location of the resulting expression.
12349 Return a folded expression if successful. Otherwise, return a tree
12350 expression with code CODE of type TYPE with an operand OP0. */
12353 fold_build1_stat_loc (location_t loc,
12354 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12357 #ifdef ENABLE_FOLD_CHECKING
12358 unsigned char checksum_before[16], checksum_after[16];
12359 struct md5_ctx ctx;
12360 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12362 md5_init_ctx (&ctx);
12363 fold_checksum_tree (op0, &ctx, &ht);
12364 md5_finish_ctx (&ctx, checksum_before);
12368 tem = fold_unary_loc (loc, code, type, op0);
12370 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12372 #ifdef ENABLE_FOLD_CHECKING
12373 md5_init_ctx (&ctx);
12374 fold_checksum_tree (op0, &ctx, &ht);
12375 md5_finish_ctx (&ctx, checksum_after);
12377 if (memcmp (checksum_before, checksum_after, 16))
12378 fold_check_failed (op0, tem);
12383 /* Fold a binary tree expression with code CODE of type TYPE with
12384 operands OP0 and OP1. LOC is the location of the resulting
12385 expression. Return a folded expression if successful. Otherwise,
12386 return a tree expression with code CODE of type TYPE with operands
12390 fold_build2_stat_loc (location_t loc,
12391 enum tree_code code, tree type, tree op0, tree op1
12395 #ifdef ENABLE_FOLD_CHECKING
12396 unsigned char checksum_before_op0[16],
12397 checksum_before_op1[16],
12398 checksum_after_op0[16],
12399 checksum_after_op1[16];
12400 struct md5_ctx ctx;
12401 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12403 md5_init_ctx (&ctx);
12404 fold_checksum_tree (op0, &ctx, &ht);
12405 md5_finish_ctx (&ctx, checksum_before_op0);
12408 md5_init_ctx (&ctx);
12409 fold_checksum_tree (op1, &ctx, &ht);
12410 md5_finish_ctx (&ctx, checksum_before_op1);
12414 tem = fold_binary_loc (loc, code, type, op0, op1);
12416 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12418 #ifdef ENABLE_FOLD_CHECKING
12419 md5_init_ctx (&ctx);
12420 fold_checksum_tree (op0, &ctx, &ht);
12421 md5_finish_ctx (&ctx, checksum_after_op0);
12424 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12425 fold_check_failed (op0, tem);
12427 md5_init_ctx (&ctx);
12428 fold_checksum_tree (op1, &ctx, &ht);
12429 md5_finish_ctx (&ctx, checksum_after_op1);
12431 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12432 fold_check_failed (op1, tem);
12437 /* Fold a ternary tree expression with code CODE of type TYPE with
12438 operands OP0, OP1, and OP2. Return a folded expression if
12439 successful. Otherwise, return a tree expression with code CODE of
12440 type TYPE with operands OP0, OP1, and OP2. */
12443 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12444 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12447 #ifdef ENABLE_FOLD_CHECKING
12448 unsigned char checksum_before_op0[16],
12449 checksum_before_op1[16],
12450 checksum_before_op2[16],
12451 checksum_after_op0[16],
12452 checksum_after_op1[16],
12453 checksum_after_op2[16];
12454 struct md5_ctx ctx;
12455 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12457 md5_init_ctx (&ctx);
12458 fold_checksum_tree (op0, &ctx, &ht);
12459 md5_finish_ctx (&ctx, checksum_before_op0);
12462 md5_init_ctx (&ctx);
12463 fold_checksum_tree (op1, &ctx, &ht);
12464 md5_finish_ctx (&ctx, checksum_before_op1);
12467 md5_init_ctx (&ctx);
12468 fold_checksum_tree (op2, &ctx, &ht);
12469 md5_finish_ctx (&ctx, checksum_before_op2);
12473 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12474 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12476 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12478 #ifdef ENABLE_FOLD_CHECKING
12479 md5_init_ctx (&ctx);
12480 fold_checksum_tree (op0, &ctx, &ht);
12481 md5_finish_ctx (&ctx, checksum_after_op0);
12484 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12485 fold_check_failed (op0, tem);
12487 md5_init_ctx (&ctx);
12488 fold_checksum_tree (op1, &ctx, &ht);
12489 md5_finish_ctx (&ctx, checksum_after_op1);
12492 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12493 fold_check_failed (op1, tem);
12495 md5_init_ctx (&ctx);
12496 fold_checksum_tree (op2, &ctx, &ht);
12497 md5_finish_ctx (&ctx, checksum_after_op2);
12499 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12500 fold_check_failed (op2, tem);
12505 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12506 arguments in ARGARRAY, and a null static chain.
12507 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12508 of type TYPE from the given operands as constructed by build_call_array. */
12511 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12512 int nargs, tree *argarray)
12515 #ifdef ENABLE_FOLD_CHECKING
12516 unsigned char checksum_before_fn[16],
12517 checksum_before_arglist[16],
12518 checksum_after_fn[16],
12519 checksum_after_arglist[16];
12520 struct md5_ctx ctx;
12521 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12524 md5_init_ctx (&ctx);
12525 fold_checksum_tree (fn, &ctx, &ht);
12526 md5_finish_ctx (&ctx, checksum_before_fn);
12529 md5_init_ctx (&ctx);
12530 for (i = 0; i < nargs; i++)
12531 fold_checksum_tree (argarray[i], &ctx, &ht);
12532 md5_finish_ctx (&ctx, checksum_before_arglist);
12536 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12538 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12540 #ifdef ENABLE_FOLD_CHECKING
12541 md5_init_ctx (&ctx);
12542 fold_checksum_tree (fn, &ctx, &ht);
12543 md5_finish_ctx (&ctx, checksum_after_fn);
12546 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12547 fold_check_failed (fn, tem);
12549 md5_init_ctx (&ctx);
12550 for (i = 0; i < nargs; i++)
12551 fold_checksum_tree (argarray[i], &ctx, &ht);
12552 md5_finish_ctx (&ctx, checksum_after_arglist);
12554 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12555 fold_check_failed (NULL_TREE, tem);
12560 /* Perform constant folding and related simplification of initializer
12561 expression EXPR. These behave identically to "fold_buildN" but ignore
12562 potential run-time traps and exceptions that fold must preserve. */
12564 #define START_FOLD_INIT \
12565 int saved_signaling_nans = flag_signaling_nans;\
12566 int saved_trapping_math = flag_trapping_math;\
12567 int saved_rounding_math = flag_rounding_math;\
12568 int saved_trapv = flag_trapv;\
12569 int saved_folding_initializer = folding_initializer;\
12570 flag_signaling_nans = 0;\
12571 flag_trapping_math = 0;\
12572 flag_rounding_math = 0;\
12574 folding_initializer = 1;
12576 #define END_FOLD_INIT \
12577 flag_signaling_nans = saved_signaling_nans;\
12578 flag_trapping_math = saved_trapping_math;\
12579 flag_rounding_math = saved_rounding_math;\
12580 flag_trapv = saved_trapv;\
12581 folding_initializer = saved_folding_initializer;
12584 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12585 tree type, tree op)
12590 result = fold_build1_loc (loc, code, type, op);
12597 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12598 tree type, tree op0, tree op1)
12603 result = fold_build2_loc (loc, code, type, op0, op1);
12610 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12611 int nargs, tree *argarray)
12616 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12622 #undef START_FOLD_INIT
12623 #undef END_FOLD_INIT
12625 /* Determine if first argument is a multiple of second argument. Return 0 if
12626 it is not, or we cannot easily determined it to be.
12628 An example of the sort of thing we care about (at this point; this routine
12629 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12630 fold cases do now) is discovering that
12632 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12638 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12640 This code also handles discovering that
12642 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12644 is a multiple of 8 so we don't have to worry about dealing with a
12645 possible remainder.
12647 Note that we *look* inside a SAVE_EXPR only to determine how it was
12648 calculated; it is not safe for fold to do much of anything else with the
12649 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12650 at run time. For example, the latter example above *cannot* be implemented
12651 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12652 evaluation time of the original SAVE_EXPR is not necessarily the same at
12653 the time the new expression is evaluated. The only optimization of this
12654 sort that would be valid is changing
12656 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12660 SAVE_EXPR (I) * SAVE_EXPR (J)
12662 (where the same SAVE_EXPR (J) is used in the original and the
12663 transformed version). */
12666 multiple_of_p (tree type, const_tree top, const_tree bottom)
12668 if (operand_equal_p (top, bottom, 0))
12671 if (TREE_CODE (type) != INTEGER_TYPE)
12674 switch (TREE_CODE (top))
12677 /* Bitwise and provides a power of two multiple. If the mask is
12678 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12679 if (!integer_pow2p (bottom))
12684 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12685 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12689 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12690 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12693 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12697 op1 = TREE_OPERAND (top, 1);
12698 /* const_binop may not detect overflow correctly,
12699 so check for it explicitly here. */
12700 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12701 && 0 != (t1 = fold_convert (type,
12702 const_binop (LSHIFT_EXPR,
12705 && !TREE_OVERFLOW (t1))
12706 return multiple_of_p (type, t1, bottom);
12711 /* Can't handle conversions from non-integral or wider integral type. */
12712 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12713 || (TYPE_PRECISION (type)
12714 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12717 /* .. fall through ... */
12720 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12723 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12724 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12727 if (TREE_CODE (bottom) != INTEGER_CST
12728 || integer_zerop (bottom)
12729 || (TYPE_UNSIGNED (type)
12730 && (tree_int_cst_sgn (top) < 0
12731 || tree_int_cst_sgn (bottom) < 0)))
12733 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12741 #define tree_expr_nonnegative_warnv_p(X, Y) \
12742 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12744 #define RECURSE(X) \
12745 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12747 /* Return true if CODE or TYPE is known to be non-negative. */
12750 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12752 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12753 && truth_value_p (code))
12754 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12755 have a signed:1 type (where the value is -1 and 0). */
12760 /* Return true if (CODE OP0) is known to be non-negative. If the return
12761 value is based on the assumption that signed overflow is undefined,
12762 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12763 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12766 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12767 bool *strict_overflow_p, int depth)
12769 if (TYPE_UNSIGNED (type))
12775 /* We can't return 1 if flag_wrapv is set because
12776 ABS_EXPR<INT_MIN> = INT_MIN. */
12777 if (!ANY_INTEGRAL_TYPE_P (type))
12779 if (TYPE_OVERFLOW_UNDEFINED (type))
12781 *strict_overflow_p = true;
12786 case NON_LVALUE_EXPR:
12788 case FIX_TRUNC_EXPR:
12789 return RECURSE (op0);
12793 tree inner_type = TREE_TYPE (op0);
12794 tree outer_type = type;
12796 if (TREE_CODE (outer_type) == REAL_TYPE)
12798 if (TREE_CODE (inner_type) == REAL_TYPE)
12799 return RECURSE (op0);
12800 if (INTEGRAL_TYPE_P (inner_type))
12802 if (TYPE_UNSIGNED (inner_type))
12804 return RECURSE (op0);
12807 else if (INTEGRAL_TYPE_P (outer_type))
12809 if (TREE_CODE (inner_type) == REAL_TYPE)
12810 return RECURSE (op0);
12811 if (INTEGRAL_TYPE_P (inner_type))
12812 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12813 && TYPE_UNSIGNED (inner_type);
12819 return tree_simple_nonnegative_warnv_p (code, type);
12822 /* We don't know sign of `t', so be conservative and return false. */
12826 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12827 value is based on the assumption that signed overflow is undefined,
12828 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12829 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12832 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12833 tree op1, bool *strict_overflow_p,
12836 if (TYPE_UNSIGNED (type))
12841 case POINTER_PLUS_EXPR:
12843 if (FLOAT_TYPE_P (type))
12844 return RECURSE (op0) && RECURSE (op1);
12846 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12847 both unsigned and at least 2 bits shorter than the result. */
12848 if (TREE_CODE (type) == INTEGER_TYPE
12849 && TREE_CODE (op0) == NOP_EXPR
12850 && TREE_CODE (op1) == NOP_EXPR)
12852 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12853 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12854 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12855 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12857 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12858 TYPE_PRECISION (inner2)) + 1;
12859 return prec < TYPE_PRECISION (type);
12865 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12867 /* x * x is always non-negative for floating point x
12868 or without overflow. */
12869 if (operand_equal_p (op0, op1, 0)
12870 || (RECURSE (op0) && RECURSE (op1)))
12872 if (ANY_INTEGRAL_TYPE_P (type)
12873 && TYPE_OVERFLOW_UNDEFINED (type))
12874 *strict_overflow_p = true;
12879 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12880 both unsigned and their total bits is shorter than the result. */
12881 if (TREE_CODE (type) == INTEGER_TYPE
12882 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12883 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12885 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12886 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12888 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12889 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12892 bool unsigned0 = TYPE_UNSIGNED (inner0);
12893 bool unsigned1 = TYPE_UNSIGNED (inner1);
12895 if (TREE_CODE (op0) == INTEGER_CST)
12896 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12898 if (TREE_CODE (op1) == INTEGER_CST)
12899 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12901 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12902 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12904 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12905 ? tree_int_cst_min_precision (op0, UNSIGNED)
12906 : TYPE_PRECISION (inner0);
12908 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12909 ? tree_int_cst_min_precision (op1, UNSIGNED)
12910 : TYPE_PRECISION (inner1);
12912 return precision0 + precision1 < TYPE_PRECISION (type);
12919 return RECURSE (op0) || RECURSE (op1);
12925 case TRUNC_DIV_EXPR:
12926 case CEIL_DIV_EXPR:
12927 case FLOOR_DIV_EXPR:
12928 case ROUND_DIV_EXPR:
12929 return RECURSE (op0) && RECURSE (op1);
12931 case TRUNC_MOD_EXPR:
12932 case CEIL_MOD_EXPR:
12933 case FLOOR_MOD_EXPR:
12934 case ROUND_MOD_EXPR:
12935 return RECURSE (op0);
12938 return tree_simple_nonnegative_warnv_p (code, type);
12941 /* We don't know sign of `t', so be conservative and return false. */
12945 /* Return true if T is known to be non-negative. If the return
12946 value is based on the assumption that signed overflow is undefined,
12947 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12948 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12951 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12953 if (TREE_CODE (t) == SSA_NAME
12954 && name_registered_for_update_p (t))
12957 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12960 switch (TREE_CODE (t))
12963 return tree_int_cst_sgn (t) >= 0;
12966 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12969 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12972 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12975 /* Limit the depth of recursion to avoid quadratic behavior.
12976 This is expected to catch almost all occurrences in practice.
12977 If this code misses important cases that unbounded recursion
12978 would not, passes that need this information could be revised
12979 to provide it through dataflow propagation. */
12980 if (depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH))
12981 return gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12982 strict_overflow_p, depth);
12986 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12990 /* Return true if T is known to be non-negative. If the return
12991 value is based on the assumption that signed overflow is undefined,
12992 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12993 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12996 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12997 bool *strict_overflow_p, int depth)
12999 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13000 switch (DECL_FUNCTION_CODE (fndecl))
13002 CASE_FLT_FN (BUILT_IN_ACOS):
13003 CASE_FLT_FN (BUILT_IN_ACOSH):
13004 CASE_FLT_FN (BUILT_IN_CABS):
13005 CASE_FLT_FN (BUILT_IN_COSH):
13006 CASE_FLT_FN (BUILT_IN_ERFC):
13007 CASE_FLT_FN (BUILT_IN_EXP):
13008 CASE_FLT_FN (BUILT_IN_EXP10):
13009 CASE_FLT_FN (BUILT_IN_EXP2):
13010 CASE_FLT_FN (BUILT_IN_FABS):
13011 CASE_FLT_FN (BUILT_IN_FDIM):
13012 CASE_FLT_FN (BUILT_IN_HYPOT):
13013 CASE_FLT_FN (BUILT_IN_POW10):
13014 CASE_INT_FN (BUILT_IN_FFS):
13015 CASE_INT_FN (BUILT_IN_PARITY):
13016 CASE_INT_FN (BUILT_IN_POPCOUNT):
13017 CASE_INT_FN (BUILT_IN_CLZ):
13018 CASE_INT_FN (BUILT_IN_CLRSB):
13019 case BUILT_IN_BSWAP32:
13020 case BUILT_IN_BSWAP64:
13024 CASE_FLT_FN (BUILT_IN_SQRT):
13025 /* sqrt(-0.0) is -0.0. */
13026 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13028 return RECURSE (arg0);
13030 CASE_FLT_FN (BUILT_IN_ASINH):
13031 CASE_FLT_FN (BUILT_IN_ATAN):
13032 CASE_FLT_FN (BUILT_IN_ATANH):
13033 CASE_FLT_FN (BUILT_IN_CBRT):
13034 CASE_FLT_FN (BUILT_IN_CEIL):
13035 CASE_FLT_FN (BUILT_IN_ERF):
13036 CASE_FLT_FN (BUILT_IN_EXPM1):
13037 CASE_FLT_FN (BUILT_IN_FLOOR):
13038 CASE_FLT_FN (BUILT_IN_FMOD):
13039 CASE_FLT_FN (BUILT_IN_FREXP):
13040 CASE_FLT_FN (BUILT_IN_ICEIL):
13041 CASE_FLT_FN (BUILT_IN_IFLOOR):
13042 CASE_FLT_FN (BUILT_IN_IRINT):
13043 CASE_FLT_FN (BUILT_IN_IROUND):
13044 CASE_FLT_FN (BUILT_IN_LCEIL):
13045 CASE_FLT_FN (BUILT_IN_LDEXP):
13046 CASE_FLT_FN (BUILT_IN_LFLOOR):
13047 CASE_FLT_FN (BUILT_IN_LLCEIL):
13048 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13049 CASE_FLT_FN (BUILT_IN_LLRINT):
13050 CASE_FLT_FN (BUILT_IN_LLROUND):
13051 CASE_FLT_FN (BUILT_IN_LRINT):
13052 CASE_FLT_FN (BUILT_IN_LROUND):
13053 CASE_FLT_FN (BUILT_IN_MODF):
13054 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13055 CASE_FLT_FN (BUILT_IN_RINT):
13056 CASE_FLT_FN (BUILT_IN_ROUND):
13057 CASE_FLT_FN (BUILT_IN_SCALB):
13058 CASE_FLT_FN (BUILT_IN_SCALBLN):
13059 CASE_FLT_FN (BUILT_IN_SCALBN):
13060 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13061 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13062 CASE_FLT_FN (BUILT_IN_SINH):
13063 CASE_FLT_FN (BUILT_IN_TANH):
13064 CASE_FLT_FN (BUILT_IN_TRUNC):
13065 /* True if the 1st argument is nonnegative. */
13066 return RECURSE (arg0);
13068 CASE_FLT_FN (BUILT_IN_FMAX):
13069 /* True if the 1st OR 2nd arguments are nonnegative. */
13070 return RECURSE (arg0) || RECURSE (arg1);
13072 CASE_FLT_FN (BUILT_IN_FMIN):
13073 /* True if the 1st AND 2nd arguments are nonnegative. */
13074 return RECURSE (arg0) && RECURSE (arg1);
13076 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13077 /* True if the 2nd argument is nonnegative. */
13078 return RECURSE (arg1);
13080 CASE_FLT_FN (BUILT_IN_POWI):
13081 /* True if the 1st argument is nonnegative or the second
13082 argument is an even integer. */
13083 if (TREE_CODE (arg1) == INTEGER_CST
13084 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13086 return RECURSE (arg0);
13088 CASE_FLT_FN (BUILT_IN_POW):
13089 /* True if the 1st argument is nonnegative or the second
13090 argument is an even integer valued real. */
13091 if (TREE_CODE (arg1) == REAL_CST)
13096 c = TREE_REAL_CST (arg1);
13097 n = real_to_integer (&c);
13100 REAL_VALUE_TYPE cint;
13101 real_from_integer (&cint, VOIDmode, n, SIGNED);
13102 if (real_identical (&c, &cint))
13106 return RECURSE (arg0);
13111 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13114 /* Return true if T is known to be non-negative. If the return
13115 value is based on the assumption that signed overflow is undefined,
13116 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13117 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13120 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13122 enum tree_code code = TREE_CODE (t);
13123 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13130 tree temp = TARGET_EXPR_SLOT (t);
13131 t = TARGET_EXPR_INITIAL (t);
13133 /* If the initializer is non-void, then it's a normal expression
13134 that will be assigned to the slot. */
13135 if (!VOID_TYPE_P (t))
13136 return RECURSE (t);
13138 /* Otherwise, the initializer sets the slot in some way. One common
13139 way is an assignment statement at the end of the initializer. */
13142 if (TREE_CODE (t) == BIND_EXPR)
13143 t = expr_last (BIND_EXPR_BODY (t));
13144 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13145 || TREE_CODE (t) == TRY_CATCH_EXPR)
13146 t = expr_last (TREE_OPERAND (t, 0));
13147 else if (TREE_CODE (t) == STATEMENT_LIST)
13152 if (TREE_CODE (t) == MODIFY_EXPR
13153 && TREE_OPERAND (t, 0) == temp)
13154 return RECURSE (TREE_OPERAND (t, 1));
13161 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13162 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13164 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13165 get_callee_fndecl (t),
13168 strict_overflow_p, depth);
13170 case COMPOUND_EXPR:
13172 return RECURSE (TREE_OPERAND (t, 1));
13175 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13178 return RECURSE (TREE_OPERAND (t, 0));
13181 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13186 #undef tree_expr_nonnegative_warnv_p
13188 /* Return true if T is known to be non-negative. If the return
13189 value is based on the assumption that signed overflow is undefined,
13190 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13191 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13194 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13196 enum tree_code code;
13197 if (t == error_mark_node)
13200 code = TREE_CODE (t);
13201 switch (TREE_CODE_CLASS (code))
13204 case tcc_comparison:
13205 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13207 TREE_OPERAND (t, 0),
13208 TREE_OPERAND (t, 1),
13209 strict_overflow_p, depth);
13212 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13214 TREE_OPERAND (t, 0),
13215 strict_overflow_p, depth);
13218 case tcc_declaration:
13219 case tcc_reference:
13220 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13228 case TRUTH_AND_EXPR:
13229 case TRUTH_OR_EXPR:
13230 case TRUTH_XOR_EXPR:
13231 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13233 TREE_OPERAND (t, 0),
13234 TREE_OPERAND (t, 1),
13235 strict_overflow_p, depth);
13236 case TRUTH_NOT_EXPR:
13237 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13239 TREE_OPERAND (t, 0),
13240 strict_overflow_p, depth);
13247 case WITH_SIZE_EXPR:
13249 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13252 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13256 /* Return true if `t' is known to be non-negative. Handle warnings
13257 about undefined signed overflow. */
13260 tree_expr_nonnegative_p (tree t)
13262 bool ret, strict_overflow_p;
13264 strict_overflow_p = false;
13265 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13266 if (strict_overflow_p)
13267 fold_overflow_warning (("assuming signed overflow does not occur when "
13268 "determining that expression is always "
13270 WARN_STRICT_OVERFLOW_MISC);
13275 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13276 For floating point we further ensure that T is not denormal.
13277 Similar logic is present in nonzero_address in rtlanal.h.
13279 If the return value is based on the assumption that signed overflow
13280 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13281 change *STRICT_OVERFLOW_P. */
13284 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13285 bool *strict_overflow_p)
13290 return tree_expr_nonzero_warnv_p (op0,
13291 strict_overflow_p);
13295 tree inner_type = TREE_TYPE (op0);
13296 tree outer_type = type;
13298 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13299 && tree_expr_nonzero_warnv_p (op0,
13300 strict_overflow_p));
13304 case NON_LVALUE_EXPR:
13305 return tree_expr_nonzero_warnv_p (op0,
13306 strict_overflow_p);
13315 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13316 For floating point we further ensure that T is not denormal.
13317 Similar logic is present in nonzero_address in rtlanal.h.
13319 If the return value is based on the assumption that signed overflow
13320 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13321 change *STRICT_OVERFLOW_P. */
13324 tree_binary_nonzero_warnv_p (enum tree_code code,
13327 tree op1, bool *strict_overflow_p)
13329 bool sub_strict_overflow_p;
13332 case POINTER_PLUS_EXPR:
13334 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13336 /* With the presence of negative values it is hard
13337 to say something. */
13338 sub_strict_overflow_p = false;
13339 if (!tree_expr_nonnegative_warnv_p (op0,
13340 &sub_strict_overflow_p)
13341 || !tree_expr_nonnegative_warnv_p (op1,
13342 &sub_strict_overflow_p))
13344 /* One of operands must be positive and the other non-negative. */
13345 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13346 overflows, on a twos-complement machine the sum of two
13347 nonnegative numbers can never be zero. */
13348 return (tree_expr_nonzero_warnv_p (op0,
13350 || tree_expr_nonzero_warnv_p (op1,
13351 strict_overflow_p));
13356 if (TYPE_OVERFLOW_UNDEFINED (type))
13358 if (tree_expr_nonzero_warnv_p (op0,
13360 && tree_expr_nonzero_warnv_p (op1,
13361 strict_overflow_p))
13363 *strict_overflow_p = true;
13370 sub_strict_overflow_p = false;
13371 if (tree_expr_nonzero_warnv_p (op0,
13372 &sub_strict_overflow_p)
13373 && tree_expr_nonzero_warnv_p (op1,
13374 &sub_strict_overflow_p))
13376 if (sub_strict_overflow_p)
13377 *strict_overflow_p = true;
13382 sub_strict_overflow_p = false;
13383 if (tree_expr_nonzero_warnv_p (op0,
13384 &sub_strict_overflow_p))
13386 if (sub_strict_overflow_p)
13387 *strict_overflow_p = true;
13389 /* When both operands are nonzero, then MAX must be too. */
13390 if (tree_expr_nonzero_warnv_p (op1,
13391 strict_overflow_p))
13394 /* MAX where operand 0 is positive is positive. */
13395 return tree_expr_nonnegative_warnv_p (op0,
13396 strict_overflow_p);
13398 /* MAX where operand 1 is positive is positive. */
13399 else if (tree_expr_nonzero_warnv_p (op1,
13400 &sub_strict_overflow_p)
13401 && tree_expr_nonnegative_warnv_p (op1,
13402 &sub_strict_overflow_p))
13404 if (sub_strict_overflow_p)
13405 *strict_overflow_p = true;
13411 return (tree_expr_nonzero_warnv_p (op1,
13413 || tree_expr_nonzero_warnv_p (op0,
13414 strict_overflow_p));
13423 /* Return true when T is an address and is known to be nonzero.
13424 For floating point we further ensure that T is not denormal.
13425 Similar logic is present in nonzero_address in rtlanal.h.
13427 If the return value is based on the assumption that signed overflow
13428 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13429 change *STRICT_OVERFLOW_P. */
13432 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13434 bool sub_strict_overflow_p;
13435 switch (TREE_CODE (t))
13438 return !integer_zerop (t);
13442 tree base = TREE_OPERAND (t, 0);
13444 if (!DECL_P (base))
13445 base = get_base_address (base);
13450 /* For objects in symbol table check if we know they are non-zero.
13451 Don't do anything for variables and functions before symtab is built;
13452 it is quite possible that they will be declared weak later. */
13453 if (DECL_P (base) && decl_in_symtab_p (base))
13455 struct symtab_node *symbol;
13457 symbol = symtab_node::get_create (base);
13459 return symbol->nonzero_address ();
13464 /* Function local objects are never NULL. */
13466 && (DECL_CONTEXT (base)
13467 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13468 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13471 /* Constants are never weak. */
13472 if (CONSTANT_CLASS_P (base))
13479 sub_strict_overflow_p = false;
13480 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13481 &sub_strict_overflow_p)
13482 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13483 &sub_strict_overflow_p))
13485 if (sub_strict_overflow_p)
13486 *strict_overflow_p = true;
13497 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13498 attempt to fold the expression to a constant without modifying TYPE,
13501 If the expression could be simplified to a constant, then return
13502 the constant. If the expression would not be simplified to a
13503 constant, then return NULL_TREE. */
13506 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13508 tree tem = fold_binary (code, type, op0, op1);
13509 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13512 /* Given the components of a unary expression CODE, TYPE and OP0,
13513 attempt to fold the expression to a constant without modifying
13516 If the expression could be simplified to a constant, then return
13517 the constant. If the expression would not be simplified to a
13518 constant, then return NULL_TREE. */
13521 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13523 tree tem = fold_unary (code, type, op0);
13524 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13527 /* If EXP represents referencing an element in a constant string
13528 (either via pointer arithmetic or array indexing), return the
13529 tree representing the value accessed, otherwise return NULL. */
13532 fold_read_from_constant_string (tree exp)
13534 if ((TREE_CODE (exp) == INDIRECT_REF
13535 || TREE_CODE (exp) == ARRAY_REF)
13536 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13538 tree exp1 = TREE_OPERAND (exp, 0);
13541 location_t loc = EXPR_LOCATION (exp);
13543 if (TREE_CODE (exp) == INDIRECT_REF)
13544 string = string_constant (exp1, &index);
13547 tree low_bound = array_ref_low_bound (exp);
13548 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13550 /* Optimize the special-case of a zero lower bound.
13552 We convert the low_bound to sizetype to avoid some problems
13553 with constant folding. (E.g. suppose the lower bound is 1,
13554 and its mode is QI. Without the conversion,l (ARRAY
13555 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13556 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13557 if (! integer_zerop (low_bound))
13558 index = size_diffop_loc (loc, index,
13559 fold_convert_loc (loc, sizetype, low_bound));
13565 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13566 && TREE_CODE (string) == STRING_CST
13567 && TREE_CODE (index) == INTEGER_CST
13568 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13569 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13571 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13572 return build_int_cst_type (TREE_TYPE (exp),
13573 (TREE_STRING_POINTER (string)
13574 [TREE_INT_CST_LOW (index)]));
13579 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13580 an integer constant, real, or fixed-point constant.
13582 TYPE is the type of the result. */
13585 fold_negate_const (tree arg0, tree type)
13587 tree t = NULL_TREE;
13589 switch (TREE_CODE (arg0))
13594 wide_int val = wi::neg (arg0, &overflow);
13595 t = force_fit_type (type, val, 1,
13596 (overflow | TREE_OVERFLOW (arg0))
13597 && !TYPE_UNSIGNED (type));
13602 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13607 FIXED_VALUE_TYPE f;
13608 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13609 &(TREE_FIXED_CST (arg0)), NULL,
13610 TYPE_SATURATING (type));
13611 t = build_fixed (type, f);
13612 /* Propagate overflow flags. */
13613 if (overflow_p | TREE_OVERFLOW (arg0))
13614 TREE_OVERFLOW (t) = 1;
13619 gcc_unreachable ();
13625 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13626 an integer constant or real constant.
13628 TYPE is the type of the result. */
13631 fold_abs_const (tree arg0, tree type)
13633 tree t = NULL_TREE;
13635 switch (TREE_CODE (arg0))
13639 /* If the value is unsigned or non-negative, then the absolute value
13640 is the same as the ordinary value. */
13641 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13644 /* If the value is negative, then the absolute value is
13649 wide_int val = wi::neg (arg0, &overflow);
13650 t = force_fit_type (type, val, -1,
13651 overflow | TREE_OVERFLOW (arg0));
13657 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13658 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13664 gcc_unreachable ();
13670 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13671 constant. TYPE is the type of the result. */
13674 fold_not_const (const_tree arg0, tree type)
13676 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13678 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13681 /* Given CODE, a relational operator, the target type, TYPE and two
13682 constant operands OP0 and OP1, return the result of the
13683 relational operation. If the result is not a compile time
13684 constant, then return NULL_TREE. */
13687 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13689 int result, invert;
13691 /* From here on, the only cases we handle are when the result is
13692 known to be a constant. */
13694 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13696 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13697 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13699 /* Handle the cases where either operand is a NaN. */
13700 if (real_isnan (c0) || real_isnan (c1))
13710 case UNORDERED_EXPR:
13724 if (flag_trapping_math)
13730 gcc_unreachable ();
13733 return constant_boolean_node (result, type);
13736 return constant_boolean_node (real_compare (code, c0, c1), type);
13739 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13741 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13742 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13743 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13746 /* Handle equality/inequality of complex constants. */
13747 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13749 tree rcond = fold_relational_const (code, type,
13750 TREE_REALPART (op0),
13751 TREE_REALPART (op1));
13752 tree icond = fold_relational_const (code, type,
13753 TREE_IMAGPART (op0),
13754 TREE_IMAGPART (op1));
13755 if (code == EQ_EXPR)
13756 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13757 else if (code == NE_EXPR)
13758 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13763 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13765 unsigned count = VECTOR_CST_NELTS (op0);
13766 tree *elts = XALLOCAVEC (tree, count);
13767 gcc_assert (VECTOR_CST_NELTS (op1) == count
13768 && TYPE_VECTOR_SUBPARTS (type) == count);
13770 for (unsigned i = 0; i < count; i++)
13772 tree elem_type = TREE_TYPE (type);
13773 tree elem0 = VECTOR_CST_ELT (op0, i);
13774 tree elem1 = VECTOR_CST_ELT (op1, i);
13776 tree tem = fold_relational_const (code, elem_type,
13779 if (tem == NULL_TREE)
13782 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13785 return build_vector (type, elts);
13788 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13790 To compute GT, swap the arguments and do LT.
13791 To compute GE, do LT and invert the result.
13792 To compute LE, swap the arguments, do LT and invert the result.
13793 To compute NE, do EQ and invert the result.
13795 Therefore, the code below must handle only EQ and LT. */
13797 if (code == LE_EXPR || code == GT_EXPR)
13799 std::swap (op0, op1);
13800 code = swap_tree_comparison (code);
13803 /* Note that it is safe to invert for real values here because we
13804 have already handled the one case that it matters. */
13807 if (code == NE_EXPR || code == GE_EXPR)
13810 code = invert_tree_comparison (code, false);
13813 /* Compute a result for LT or EQ if args permit;
13814 Otherwise return T. */
13815 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13817 if (code == EQ_EXPR)
13818 result = tree_int_cst_equal (op0, op1);
13820 result = tree_int_cst_lt (op0, op1);
13827 return constant_boolean_node (result, type);
13830 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13831 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13835 fold_build_cleanup_point_expr (tree type, tree expr)
13837 /* If the expression does not have side effects then we don't have to wrap
13838 it with a cleanup point expression. */
13839 if (!TREE_SIDE_EFFECTS (expr))
13842 /* If the expression is a return, check to see if the expression inside the
13843 return has no side effects or the right hand side of the modify expression
13844 inside the return. If either don't have side effects set we don't need to
13845 wrap the expression in a cleanup point expression. Note we don't check the
13846 left hand side of the modify because it should always be a return decl. */
13847 if (TREE_CODE (expr) == RETURN_EXPR)
13849 tree op = TREE_OPERAND (expr, 0);
13850 if (!op || !TREE_SIDE_EFFECTS (op))
13852 op = TREE_OPERAND (op, 1);
13853 if (!TREE_SIDE_EFFECTS (op))
13857 return build1 (CLEANUP_POINT_EXPR, type, expr);
13860 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13861 of an indirection through OP0, or NULL_TREE if no simplification is
13865 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13871 subtype = TREE_TYPE (sub);
13872 if (!POINTER_TYPE_P (subtype))
13875 if (TREE_CODE (sub) == ADDR_EXPR)
13877 tree op = TREE_OPERAND (sub, 0);
13878 tree optype = TREE_TYPE (op);
13879 /* *&CONST_DECL -> to the value of the const decl. */
13880 if (TREE_CODE (op) == CONST_DECL)
13881 return DECL_INITIAL (op);
13882 /* *&p => p; make sure to handle *&"str"[cst] here. */
13883 if (type == optype)
13885 tree fop = fold_read_from_constant_string (op);
13891 /* *(foo *)&fooarray => fooarray[0] */
13892 else if (TREE_CODE (optype) == ARRAY_TYPE
13893 && type == TREE_TYPE (optype)
13894 && (!in_gimple_form
13895 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13897 tree type_domain = TYPE_DOMAIN (optype);
13898 tree min_val = size_zero_node;
13899 if (type_domain && TYPE_MIN_VALUE (type_domain))
13900 min_val = TYPE_MIN_VALUE (type_domain);
13902 && TREE_CODE (min_val) != INTEGER_CST)
13904 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13905 NULL_TREE, NULL_TREE);
13907 /* *(foo *)&complexfoo => __real__ complexfoo */
13908 else if (TREE_CODE (optype) == COMPLEX_TYPE
13909 && type == TREE_TYPE (optype))
13910 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13911 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13912 else if (TREE_CODE (optype) == VECTOR_TYPE
13913 && type == TREE_TYPE (optype))
13915 tree part_width = TYPE_SIZE (type);
13916 tree index = bitsize_int (0);
13917 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13921 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13922 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13924 tree op00 = TREE_OPERAND (sub, 0);
13925 tree op01 = TREE_OPERAND (sub, 1);
13928 if (TREE_CODE (op00) == ADDR_EXPR)
13931 op00 = TREE_OPERAND (op00, 0);
13932 op00type = TREE_TYPE (op00);
13934 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13935 if (TREE_CODE (op00type) == VECTOR_TYPE
13936 && type == TREE_TYPE (op00type))
13938 HOST_WIDE_INT offset = tree_to_shwi (op01);
13939 tree part_width = TYPE_SIZE (type);
13940 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
13941 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13942 tree index = bitsize_int (indexi);
13944 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
13945 return fold_build3_loc (loc,
13946 BIT_FIELD_REF, type, op00,
13947 part_width, index);
13950 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13951 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13952 && type == TREE_TYPE (op00type))
13954 tree size = TYPE_SIZE_UNIT (type);
13955 if (tree_int_cst_equal (size, op01))
13956 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13958 /* ((foo *)&fooarray)[1] => fooarray[1] */
13959 else if (TREE_CODE (op00type) == ARRAY_TYPE
13960 && type == TREE_TYPE (op00type))
13962 tree type_domain = TYPE_DOMAIN (op00type);
13963 tree min_val = size_zero_node;
13964 if (type_domain && TYPE_MIN_VALUE (type_domain))
13965 min_val = TYPE_MIN_VALUE (type_domain);
13966 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
13967 TYPE_SIZE_UNIT (type));
13968 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
13969 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13970 NULL_TREE, NULL_TREE);
13975 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13976 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13977 && type == TREE_TYPE (TREE_TYPE (subtype))
13978 && (!in_gimple_form
13979 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13982 tree min_val = size_zero_node;
13983 sub = build_fold_indirect_ref_loc (loc, sub);
13984 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13985 if (type_domain && TYPE_MIN_VALUE (type_domain))
13986 min_val = TYPE_MIN_VALUE (type_domain);
13988 && TREE_CODE (min_val) != INTEGER_CST)
13990 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13997 /* Builds an expression for an indirection through T, simplifying some
14001 build_fold_indirect_ref_loc (location_t loc, tree t)
14003 tree type = TREE_TYPE (TREE_TYPE (t));
14004 tree sub = fold_indirect_ref_1 (loc, type, t);
14009 return build1_loc (loc, INDIRECT_REF, type, t);
14012 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14015 fold_indirect_ref_loc (location_t loc, tree t)
14017 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14025 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14026 whose result is ignored. The type of the returned tree need not be
14027 the same as the original expression. */
14030 fold_ignored_result (tree t)
14032 if (!TREE_SIDE_EFFECTS (t))
14033 return integer_zero_node;
14036 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14039 t = TREE_OPERAND (t, 0);
14043 case tcc_comparison:
14044 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14045 t = TREE_OPERAND (t, 0);
14046 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14047 t = TREE_OPERAND (t, 1);
14052 case tcc_expression:
14053 switch (TREE_CODE (t))
14055 case COMPOUND_EXPR:
14056 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14058 t = TREE_OPERAND (t, 0);
14062 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14063 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14065 t = TREE_OPERAND (t, 0);
14078 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14081 round_up_loc (location_t loc, tree value, unsigned int divisor)
14083 tree div = NULL_TREE;
14088 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14089 have to do anything. Only do this when we are not given a const,
14090 because in that case, this check is more expensive than just
14092 if (TREE_CODE (value) != INTEGER_CST)
14094 div = build_int_cst (TREE_TYPE (value), divisor);
14096 if (multiple_of_p (TREE_TYPE (value), value, div))
14100 /* If divisor is a power of two, simplify this to bit manipulation. */
14101 if (divisor == (divisor & -divisor))
14103 if (TREE_CODE (value) == INTEGER_CST)
14105 wide_int val = value;
14108 if ((val & (divisor - 1)) == 0)
14111 overflow_p = TREE_OVERFLOW (value);
14112 val += divisor - 1;
14113 val &= - (int) divisor;
14117 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14123 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14124 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14125 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14126 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14132 div = build_int_cst (TREE_TYPE (value), divisor);
14133 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14134 value = size_binop_loc (loc, MULT_EXPR, value, div);
14140 /* Likewise, but round down. */
14143 round_down_loc (location_t loc, tree value, int divisor)
14145 tree div = NULL_TREE;
14147 gcc_assert (divisor > 0);
14151 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14152 have to do anything. Only do this when we are not given a const,
14153 because in that case, this check is more expensive than just
14155 if (TREE_CODE (value) != INTEGER_CST)
14157 div = build_int_cst (TREE_TYPE (value), divisor);
14159 if (multiple_of_p (TREE_TYPE (value), value, div))
14163 /* If divisor is a power of two, simplify this to bit manipulation. */
14164 if (divisor == (divisor & -divisor))
14168 t = build_int_cst (TREE_TYPE (value), -divisor);
14169 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14174 div = build_int_cst (TREE_TYPE (value), divisor);
14175 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14176 value = size_binop_loc (loc, MULT_EXPR, value, div);
14182 /* Returns the pointer to the base of the object addressed by EXP and
14183 extracts the information about the offset of the access, storing it
14184 to PBITPOS and POFFSET. */
14187 split_address_to_core_and_offset (tree exp,
14188 HOST_WIDE_INT *pbitpos, tree *poffset)
14192 int unsignedp, volatilep;
14193 HOST_WIDE_INT bitsize;
14194 location_t loc = EXPR_LOCATION (exp);
14196 if (TREE_CODE (exp) == ADDR_EXPR)
14198 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14199 poffset, &mode, &unsignedp, &volatilep,
14201 core = build_fold_addr_expr_loc (loc, core);
14207 *poffset = NULL_TREE;
14213 /* Returns true if addresses of E1 and E2 differ by a constant, false
14214 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14217 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14220 HOST_WIDE_INT bitpos1, bitpos2;
14221 tree toffset1, toffset2, tdiff, type;
14223 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14224 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14226 if (bitpos1 % BITS_PER_UNIT != 0
14227 || bitpos2 % BITS_PER_UNIT != 0
14228 || !operand_equal_p (core1, core2, 0))
14231 if (toffset1 && toffset2)
14233 type = TREE_TYPE (toffset1);
14234 if (type != TREE_TYPE (toffset2))
14235 toffset2 = fold_convert (type, toffset2);
14237 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14238 if (!cst_and_fits_in_hwi (tdiff))
14241 *diff = int_cst_value (tdiff);
14243 else if (toffset1 || toffset2)
14245 /* If only one of the offsets is non-constant, the difference cannot
14252 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14256 /* Simplify the floating point expression EXP when the sign of the
14257 result is not significant. Return NULL_TREE if no simplification
14261 fold_strip_sign_ops (tree exp)
14264 location_t loc = EXPR_LOCATION (exp);
14266 switch (TREE_CODE (exp))
14270 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14271 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14275 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
14277 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14278 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14279 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14280 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
14281 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14282 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14285 case COMPOUND_EXPR:
14286 arg0 = TREE_OPERAND (exp, 0);
14287 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14289 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14293 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14294 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14296 return fold_build3_loc (loc,
14297 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14298 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14299 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14304 const enum built_in_function fcode = builtin_mathfn_code (exp);
14307 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14308 /* Strip copysign function call, return the 1st argument. */
14309 arg0 = CALL_EXPR_ARG (exp, 0);
14310 arg1 = CALL_EXPR_ARG (exp, 1);
14311 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
14314 /* Strip sign ops from the argument of "odd" math functions. */
14315 if (negate_mathfn_p (fcode))
14317 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14319 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
14332 /* Return OFF converted to a pointer offset type suitable as offset for
14333 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14335 convert_to_ptrofftype_loc (location_t loc, tree off)
14337 return fold_convert_loc (loc, sizetype, off);
14340 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14342 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14344 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14345 ptr, convert_to_ptrofftype_loc (loc, off));
14348 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14350 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14352 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14353 ptr, size_int (off));