1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
81 #ifndef LOAD_EXTEND_OP
82 #define LOAD_EXTEND_OP(M) UNKNOWN
85 /* Nonzero if we are folding constants inside an initializer; zero
87 int folding_initializer = 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
111 static bool negate_mathfn_p (enum built_in_function);
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int operand_equal_for_comparison_p (tree, tree, tree);
119 static int twoval_comparison_p (tree, tree *, tree *, int *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree make_bit_field_ref (location_t, tree, tree,
122 HOST_WIDE_INT, HOST_WIDE_INT, int);
123 static tree optimize_bit_field_compare (location_t, enum tree_code,
125 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
127 machine_mode *, int *, int *,
129 static int simple_operand_p (const_tree);
130 static bool simple_operand_p_2 (tree);
131 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
132 static tree range_predecessor (tree);
133 static tree range_successor (tree);
134 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
135 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
136 static tree unextend (tree, int, int, tree);
137 static tree optimize_minmax_comparison (location_t, enum tree_code,
139 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
140 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
141 static tree fold_binary_op_with_conditional_arg (location_t,
142 enum tree_code, tree,
145 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
146 static bool reorder_operands_p (const_tree, const_tree);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static bool vec_cst_ctor_to_array (tree, tree *);
155 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
156 Otherwise, return LOC. */
159 expr_location_or (tree t, location_t loc)
161 location_t tloc = EXPR_LOCATION (t);
162 return tloc == UNKNOWN_LOCATION ? loc : tloc;
165 /* Similar to protected_set_expr_location, but never modify x in place,
166 if location can and needs to be set, unshare it. */
169 protected_set_expr_location_unshare (tree x, location_t loc)
171 if (CAN_HAVE_LOCATION_P (x)
172 && EXPR_LOCATION (x) != loc
173 && !(TREE_CODE (x) == SAVE_EXPR
174 || TREE_CODE (x) == TARGET_EXPR
175 || TREE_CODE (x) == BIND_EXPR))
178 SET_EXPR_LOCATION (x, loc);
183 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
184 division and returns the quotient. Otherwise returns
188 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
194 return wide_int_to_tree (TREE_TYPE (arg1), quo);
199 /* This is nonzero if we should defer warnings about undefined
200 overflow. This facility exists because these warnings are a
201 special case. The code to estimate loop iterations does not want
202 to issue any warnings, since it works with expressions which do not
203 occur in user code. Various bits of cleanup code call fold(), but
204 only use the result if it has certain characteristics (e.g., is a
205 constant); that code only wants to issue a warning if the result is
208 static int fold_deferring_overflow_warnings;
210 /* If a warning about undefined overflow is deferred, this is the
211 warning. Note that this may cause us to turn two warnings into
212 one, but that is fine since it is sufficient to only give one
213 warning per expression. */
215 static const char* fold_deferred_overflow_warning;
217 /* If a warning about undefined overflow is deferred, this is the
218 level at which the warning should be emitted. */
220 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222 /* Start deferring overflow warnings. We could use a stack here to
223 permit nested calls, but at present it is not necessary. */
226 fold_defer_overflow_warnings (void)
228 ++fold_deferring_overflow_warnings;
231 /* Stop deferring overflow warnings. If there is a pending warning,
232 and ISSUE is true, then issue the warning if appropriate. STMT is
233 the statement with which the warning should be associated (used for
234 location information); STMT may be NULL. CODE is the level of the
235 warning--a warn_strict_overflow_code value. This function will use
236 the smaller of CODE and the deferred code when deciding whether to
237 issue the warning. CODE may be zero to mean to always use the
241 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
246 gcc_assert (fold_deferring_overflow_warnings > 0);
247 --fold_deferring_overflow_warnings;
248 if (fold_deferring_overflow_warnings > 0)
250 if (fold_deferred_overflow_warning != NULL
252 && code < (int) fold_deferred_overflow_code)
253 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 warnmsg = fold_deferred_overflow_warning;
258 fold_deferred_overflow_warning = NULL;
260 if (!issue || warnmsg == NULL)
263 if (gimple_no_warning_p (stmt))
266 /* Use the smallest code level when deciding to issue the
268 if (code == 0 || code > (int) fold_deferred_overflow_code)
269 code = fold_deferred_overflow_code;
271 if (!issue_strict_overflow_warning (code))
275 locus = input_location;
277 locus = gimple_location (stmt);
278 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
281 /* Stop deferring overflow warnings, ignoring any deferred
285 fold_undefer_and_ignore_overflow_warnings (void)
287 fold_undefer_overflow_warnings (false, NULL, 0);
290 /* Whether we are deferring overflow warnings. */
293 fold_deferring_overflow_warnings_p (void)
295 return fold_deferring_overflow_warnings > 0;
298 /* This is called when we fold something based on the fact that signed
299 overflow is undefined. */
302 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 if (fold_deferring_overflow_warnings > 0)
306 if (fold_deferred_overflow_warning == NULL
307 || wc < fold_deferred_overflow_code)
309 fold_deferred_overflow_warning = gmsgid;
310 fold_deferred_overflow_code = wc;
313 else if (issue_strict_overflow_warning (wc))
314 warning (OPT_Wstrict_overflow, gmsgid);
317 /* Return true if the built-in mathematical function specified by CODE
318 is odd, i.e. -f(x) == f(-x). */
321 negate_mathfn_p (enum built_in_function code)
325 CASE_FLT_FN (BUILT_IN_ASIN):
326 CASE_FLT_FN (BUILT_IN_ASINH):
327 CASE_FLT_FN (BUILT_IN_ATAN):
328 CASE_FLT_FN (BUILT_IN_ATANH):
329 CASE_FLT_FN (BUILT_IN_CASIN):
330 CASE_FLT_FN (BUILT_IN_CASINH):
331 CASE_FLT_FN (BUILT_IN_CATAN):
332 CASE_FLT_FN (BUILT_IN_CATANH):
333 CASE_FLT_FN (BUILT_IN_CBRT):
334 CASE_FLT_FN (BUILT_IN_CPROJ):
335 CASE_FLT_FN (BUILT_IN_CSIN):
336 CASE_FLT_FN (BUILT_IN_CSINH):
337 CASE_FLT_FN (BUILT_IN_CTAN):
338 CASE_FLT_FN (BUILT_IN_CTANH):
339 CASE_FLT_FN (BUILT_IN_ERF):
340 CASE_FLT_FN (BUILT_IN_LLROUND):
341 CASE_FLT_FN (BUILT_IN_LROUND):
342 CASE_FLT_FN (BUILT_IN_ROUND):
343 CASE_FLT_FN (BUILT_IN_SIN):
344 CASE_FLT_FN (BUILT_IN_SINH):
345 CASE_FLT_FN (BUILT_IN_TAN):
346 CASE_FLT_FN (BUILT_IN_TANH):
347 CASE_FLT_FN (BUILT_IN_TRUNC):
350 CASE_FLT_FN (BUILT_IN_LLRINT):
351 CASE_FLT_FN (BUILT_IN_LRINT):
352 CASE_FLT_FN (BUILT_IN_NEARBYINT):
353 CASE_FLT_FN (BUILT_IN_RINT):
354 return !flag_rounding_math;
362 /* Check whether we may negate an integer constant T without causing
366 may_negate_without_overflow_p (const_tree t)
370 gcc_assert (TREE_CODE (t) == INTEGER_CST);
372 type = TREE_TYPE (t);
373 if (TYPE_UNSIGNED (type))
376 return !wi::only_sign_bit_p (t);
379 /* Determine whether an expression T can be cheaply negated using
380 the function negate_expr without introducing undefined overflow. */
383 negate_expr_p (tree t)
390 type = TREE_TYPE (t);
393 switch (TREE_CODE (t))
396 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
399 /* Check that -CST will not overflow type. */
400 return may_negate_without_overflow_p (t);
402 return (INTEGRAL_TYPE_P (type)
403 && TYPE_OVERFLOW_WRAPS (type));
409 return !TYPE_OVERFLOW_SANITIZED (type);
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 return negate_expr_p (TREE_REALPART (t))
418 && negate_expr_p (TREE_IMAGPART (t));
422 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
425 int count = TYPE_VECTOR_SUBPARTS (type), i;
427 for (i = 0; i < count; i++)
428 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
435 return negate_expr_p (TREE_OPERAND (t, 0))
436 && negate_expr_p (TREE_OPERAND (t, 1));
439 return negate_expr_p (TREE_OPERAND (t, 0));
442 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 || HONOR_SIGNED_ZEROS (element_mode (type)))
445 /* -(A + B) -> (-B) - A. */
446 if (negate_expr_p (TREE_OPERAND (t, 1))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1)))
450 /* -(A + B) -> (-A) - B. */
451 return negate_expr_p (TREE_OPERAND (t, 0));
454 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
455 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
456 && !HONOR_SIGNED_ZEROS (element_mode (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
461 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
475 /* In general we can't negate A / B, because if A is INT_MIN and
476 B is 1, we may turn this into INT_MIN / -1 which is undefined
477 and actually traps on some architectures. But if overflow is
478 undefined, we can negate, because - (INT_MIN / 1) is an
480 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
482 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
484 /* If overflow is undefined then we have to be careful because
485 we ask whether it's ok to associate the negate with the
486 division which is not ok for example for
487 -((a - b) / c) where (-(a - b)) / c may invoke undefined
488 overflow because of negating INT_MIN. So do not use
489 negate_expr_p here but open-code the two important cases. */
490 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
491 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
492 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
495 else if (negate_expr_p (TREE_OPERAND (t, 0)))
497 return negate_expr_p (TREE_OPERAND (t, 1));
500 /* Negate -((double)float) as (double)(-float). */
501 if (TREE_CODE (type) == REAL_TYPE)
503 tree tem = strip_float_extensions (t);
505 return negate_expr_p (tem);
510 /* Negate -f(x) as f(-x). */
511 if (negate_mathfn_p (builtin_mathfn_code (t)))
512 return negate_expr_p (CALL_EXPR_ARG (t, 0));
516 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
517 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 tree op1 = TREE_OPERAND (t, 1);
520 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
531 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
532 simplification is possible.
533 If negate_expr_p would return true for T, NULL_TREE will never be
537 fold_negate_expr (location_t loc, tree t)
539 tree type = TREE_TYPE (t);
542 switch (TREE_CODE (t))
544 /* Convert - (~A) to A + 1. */
546 if (INTEGRAL_TYPE_P (type))
547 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
548 build_one_cst (type));
552 tem = fold_negate_const (t, type);
553 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
554 || (ANY_INTEGRAL_TYPE_P (type)
555 && !TYPE_OVERFLOW_TRAPS (type)
556 && TYPE_OVERFLOW_WRAPS (type))
557 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
562 tem = fold_negate_const (t, type);
566 tem = fold_negate_const (t, type);
571 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
572 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
574 return build_complex (type, rpart, ipart);
580 int count = TYPE_VECTOR_SUBPARTS (type), i;
581 tree *elts = XALLOCAVEC (tree, count);
583 for (i = 0; i < count; i++)
585 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
586 if (elts[i] == NULL_TREE)
590 return build_vector (type, elts);
594 if (negate_expr_p (t))
595 return fold_build2_loc (loc, COMPLEX_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
597 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
601 if (negate_expr_p (t))
602 return fold_build1_loc (loc, CONJ_EXPR, type,
603 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
607 if (!TYPE_OVERFLOW_SANITIZED (type))
608 return TREE_OPERAND (t, 0);
612 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
613 && !HONOR_SIGNED_ZEROS (element_mode (type)))
615 /* -(A + B) -> (-B) - A. */
616 if (negate_expr_p (TREE_OPERAND (t, 1))
617 && reorder_operands_p (TREE_OPERAND (t, 0),
618 TREE_OPERAND (t, 1)))
620 tem = negate_expr (TREE_OPERAND (t, 1));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 0));
625 /* -(A + B) -> (-A) - B. */
626 if (negate_expr_p (TREE_OPERAND (t, 0)))
628 tem = negate_expr (TREE_OPERAND (t, 0));
629 return fold_build2_loc (loc, MINUS_EXPR, type,
630 tem, TREE_OPERAND (t, 1));
636 /* - (A - B) -> B - A */
637 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
638 && !HONOR_SIGNED_ZEROS (element_mode (type))
639 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 if (TYPE_UNSIGNED (type))
651 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
653 tem = TREE_OPERAND (t, 1);
654 if (negate_expr_p (tem))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
657 tem = TREE_OPERAND (t, 0);
658 if (negate_expr_p (tem))
659 return fold_build2_loc (loc, TREE_CODE (t), type,
660 negate_expr (tem), TREE_OPERAND (t, 1));
667 /* In general we can't negate A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. But if overflow is
670 undefined, we can negate, because - (INT_MIN / 1) is an
672 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
674 const char * const warnmsg = G_("assuming signed overflow does not "
675 "occur when negating a division");
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || integer_onep (tem)))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0), negate_expr (tem));
686 /* If overflow is undefined then we have to be careful because
687 we ask whether it's ok to associate the negate with the
688 division which is not ok for example for
689 -((a - b) / c) where (-(a - b)) / c may invoke undefined
690 overflow because of negating INT_MIN. So do not use
691 negate_expr_p here but open-code the two important cases. */
692 tem = TREE_OPERAND (t, 0);
693 if ((INTEGRAL_TYPE_P (type)
694 && (TREE_CODE (tem) == NEGATE_EXPR
695 || (TREE_CODE (tem) == INTEGER_CST
696 && may_negate_without_overflow_p (tem))))
697 || !INTEGRAL_TYPE_P (type))
698 return fold_build2_loc (loc, TREE_CODE (t), type,
699 negate_expr (tem), TREE_OPERAND (t, 1));
704 /* Convert -((double)float) into (double)(-float). */
705 if (TREE_CODE (type) == REAL_TYPE)
707 tem = strip_float_extensions (t);
708 if (tem != t && negate_expr_p (tem))
709 return fold_convert_loc (loc, type, negate_expr (tem));
714 /* Negate -f(x) as f(-x). */
715 if (negate_mathfn_p (builtin_mathfn_code (t))
716 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 fndecl = get_callee_fndecl (t);
721 arg = negate_expr (CALL_EXPR_ARG (t, 0));
722 return build_call_expr_loc (loc, fndecl, 1, arg);
727 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
728 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
730 tree op1 = TREE_OPERAND (t, 1);
731 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
733 tree ntype = TYPE_UNSIGNED (type)
734 ? signed_type_for (type)
735 : unsigned_type_for (type);
736 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
737 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
738 return fold_convert_loc (loc, type, temp);
750 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
751 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
763 loc = EXPR_LOCATION (t);
764 type = TREE_TYPE (t);
767 tem = fold_negate_expr (loc, t);
769 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
770 return fold_convert_loc (loc, type, tem);
773 /* Split a tree IN into a constant, literal and variable parts that could be
774 combined with CODE to make IN. "constant" means an expression with
775 TREE_CONSTANT but that isn't an actual constant. CODE must be a
776 commutative arithmetic operation. Store the constant part into *CONP,
777 the literal in *LITP and return the variable part. If a part isn't
778 present, set it to null. If the tree does not decompose in this way,
779 return the entire tree as the variable part and the other parts as null.
781 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
782 case, we negate an operand that was subtracted. Except if it is a
783 literal for which we use *MINUS_LITP instead.
785 If NEGATE_P is true, we are negating all of IN, again except a literal
786 for which we use *MINUS_LITP instead.
788 If IN is itself a literal or constant, return it as appropriate.
790 Note that we do not guarantee that any of the three values will be the
791 same type as IN, but they will have the same signedness and mode. */
794 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
795 tree *minus_litp, int negate_p)
803 /* Strip any conversions that don't change the machine mode or signedness. */
804 STRIP_SIGN_NOPS (in);
806 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
807 || TREE_CODE (in) == FIXED_CST)
809 else if (TREE_CODE (in) == code
810 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
811 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
812 /* We can associate addition and subtraction together (even
813 though the C standard doesn't say so) for integers because
814 the value is not affected. For reals, the value might be
815 affected, so we can't. */
816 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
817 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
848 *minus_litp = *litp, *litp = 0;
850 *conp = negate_expr (*conp);
852 var = negate_expr (var);
854 else if (TREE_CODE (in) == BIT_NOT_EXPR
855 && code == PLUS_EXPR)
857 /* -X - 1 is folded to ~X, undo that here. */
858 *minus_litp = build_one_cst (TREE_TYPE (in));
859 var = negate_expr (TREE_OPERAND (in, 0));
861 else if (TREE_CONSTANT (in))
869 *minus_litp = *litp, *litp = 0;
870 else if (*minus_litp)
871 *litp = *minus_litp, *minus_litp = 0;
872 *conp = negate_expr (*conp);
873 var = negate_expr (var);
879 /* Re-associate trees split by the above function. T1 and T2 are
880 either expressions to associate or null. Return the new
881 expression, if any. LOC is the location of the new expression. If
882 we build an operation, do it in TYPE and with CODE. */
885 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
893 try to fold this since we will have infinite recursion. But do
894 deal with any NEGATE_EXPRs. */
895 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
896 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
898 if (code == PLUS_EXPR)
900 if (TREE_CODE (t1) == NEGATE_EXPR)
901 return build2_loc (loc, MINUS_EXPR, type,
902 fold_convert_loc (loc, type, t2),
903 fold_convert_loc (loc, type,
904 TREE_OPERAND (t1, 0)));
905 else if (TREE_CODE (t2) == NEGATE_EXPR)
906 return build2_loc (loc, MINUS_EXPR, type,
907 fold_convert_loc (loc, type, t1),
908 fold_convert_loc (loc, type,
909 TREE_OPERAND (t2, 0)));
910 else if (integer_zerop (t2))
911 return fold_convert_loc (loc, type, t1);
913 else if (code == MINUS_EXPR)
915 if (integer_zerop (t2))
916 return fold_convert_loc (loc, type, t1);
919 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
923 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
924 fold_convert_loc (loc, type, t2));
927 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
928 for use in int_const_binop, size_binop and size_diffop. */
931 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
933 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
935 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
951 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
952 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 /* Combine two integer constants ARG1 and ARG2 under operation CODE
957 to produce a new constant. Return NULL_TREE if we don't know how
958 to evaluate CODE at compile-time. */
961 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
966 tree type = TREE_TYPE (arg1);
967 signop sign = TYPE_SIGN (type);
968 bool overflow = false;
970 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
971 TYPE_SIGN (TREE_TYPE (parg2)));
976 res = wi::bit_or (arg1, arg2);
980 res = wi::bit_xor (arg1, arg2);
984 res = wi::bit_and (arg1, arg2);
989 if (wi::neg_p (arg2))
992 if (code == RSHIFT_EXPR)
998 if (code == RSHIFT_EXPR)
999 /* It's unclear from the C standard whether shifts can overflow.
1000 The following code ignores overflow; perhaps a C standard
1001 interpretation ruling is needed. */
1002 res = wi::rshift (arg1, arg2, sign);
1004 res = wi::lshift (arg1, arg2);
1009 if (wi::neg_p (arg2))
1012 if (code == RROTATE_EXPR)
1013 code = LROTATE_EXPR;
1015 code = RROTATE_EXPR;
1018 if (code == RROTATE_EXPR)
1019 res = wi::rrotate (arg1, arg2);
1021 res = wi::lrotate (arg1, arg2);
1025 res = wi::add (arg1, arg2, sign, &overflow);
1029 res = wi::sub (arg1, arg2, sign, &overflow);
1033 res = wi::mul (arg1, arg2, sign, &overflow);
1036 case MULT_HIGHPART_EXPR:
1037 res = wi::mul_high (arg1, arg2, sign);
1040 case TRUNC_DIV_EXPR:
1041 case EXACT_DIV_EXPR:
1044 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1047 case FLOOR_DIV_EXPR:
1050 res = wi::div_floor (arg1, arg2, sign, &overflow);
1056 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1059 case ROUND_DIV_EXPR:
1062 res = wi::div_round (arg1, arg2, sign, &overflow);
1065 case TRUNC_MOD_EXPR:
1068 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1071 case FLOOR_MOD_EXPR:
1074 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1080 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1083 case ROUND_MOD_EXPR:
1086 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 res = wi::min (arg1, arg2, sign);
1094 res = wi::max (arg1, arg2, sign);
1101 t = force_fit_type (type, res, overflowable,
1102 (((sign == SIGNED || overflowable == -1)
1104 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1110 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1112 return int_const_binop_1 (code, arg1, arg2, 1);
1115 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1116 constant. We assume ARG1 and ARG2 have the same data type, or at least
1117 are the same kind of constant and the same machine mode. Return zero if
1118 combining the constants is not allowed in the current operating mode. */
1121 const_binop (enum tree_code code, tree arg1, tree arg2)
1123 /* Sanity check for the recursive cases. */
1130 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1132 if (code == POINTER_PLUS_EXPR)
1133 return int_const_binop (PLUS_EXPR,
1134 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1136 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1149 /* The following codes are handled by real_arithmetic. */
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && real_equal (&d2, &dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1187 else if (REAL_VALUE_ISNAN (d2))
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1226 /* The following codes are handled by fixed_arithmetic. */
1232 case TRUNC_DIV_EXPR:
1233 if (TREE_CODE (arg2) != FIXED_CST)
1235 f2 = TREE_FIXED_CST (arg2);
1241 if (TREE_CODE (arg2) != INTEGER_CST)
1244 f2.data.high = w2.elt (1);
1245 f2.data.low = w2.elt (0);
1254 f1 = TREE_FIXED_CST (arg1);
1255 type = TREE_TYPE (arg1);
1256 sat_p = TYPE_SATURATING (type);
1257 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1258 t = build_fixed (type, result);
1259 /* Propagate overflow flags. */
1260 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1261 TREE_OVERFLOW (t) = 1;
1265 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1267 tree type = TREE_TYPE (arg1);
1268 tree r1 = TREE_REALPART (arg1);
1269 tree i1 = TREE_IMAGPART (arg1);
1270 tree r2 = TREE_REALPART (arg2);
1271 tree i2 = TREE_IMAGPART (arg2);
1278 real = const_binop (code, r1, r2);
1279 imag = const_binop (code, i1, i2);
1283 if (COMPLEX_FLOAT_TYPE_P (type))
1284 return do_mpc_arg2 (arg1, arg2, type,
1285 /* do_nonfinite= */ folding_initializer,
1288 real = const_binop (MINUS_EXPR,
1289 const_binop (MULT_EXPR, r1, r2),
1290 const_binop (MULT_EXPR, i1, i2));
1291 imag = const_binop (PLUS_EXPR,
1292 const_binop (MULT_EXPR, r1, i2),
1293 const_binop (MULT_EXPR, i1, r2));
1297 if (COMPLEX_FLOAT_TYPE_P (type))
1298 return do_mpc_arg2 (arg1, arg2, type,
1299 /* do_nonfinite= */ folding_initializer,
1302 case TRUNC_DIV_EXPR:
1304 case FLOOR_DIV_EXPR:
1305 case ROUND_DIV_EXPR:
1306 if (flag_complex_method == 0)
1308 /* Keep this algorithm in sync with
1309 tree-complex.c:expand_complex_div_straight().
1311 Expand complex division to scalars, straightforward algorithm.
1312 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r2, r2),
1318 const_binop (MULT_EXPR, i2, i2));
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r1, r2),
1322 const_binop (MULT_EXPR, i1, i2));
1324 = const_binop (MINUS_EXPR,
1325 const_binop (MULT_EXPR, i1, r2),
1326 const_binop (MULT_EXPR, r1, i2));
1328 real = const_binop (code, t1, magsquared);
1329 imag = const_binop (code, t2, magsquared);
1333 /* Keep this algorithm in sync with
1334 tree-complex.c:expand_complex_div_wide().
1336 Expand complex division to scalars, modified algorithm to minimize
1337 overflow with wide input ranges. */
1338 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1339 fold_abs_const (r2, TREE_TYPE (type)),
1340 fold_abs_const (i2, TREE_TYPE (type)));
1342 if (integer_nonzerop (compare))
1344 /* In the TRUE branch, we compute
1346 div = (br * ratio) + bi;
1347 tr = (ar * ratio) + ai;
1348 ti = (ai * ratio) - ar;
1351 tree ratio = const_binop (code, r2, i2);
1352 tree div = const_binop (PLUS_EXPR, i2,
1353 const_binop (MULT_EXPR, r2, ratio));
1354 real = const_binop (MULT_EXPR, r1, ratio);
1355 real = const_binop (PLUS_EXPR, real, i1);
1356 real = const_binop (code, real, div);
1358 imag = const_binop (MULT_EXPR, i1, ratio);
1359 imag = const_binop (MINUS_EXPR, imag, r1);
1360 imag = const_binop (code, imag, div);
1364 /* In the FALSE branch, we compute
1366 divisor = (d * ratio) + c;
1367 tr = (b * ratio) + a;
1368 ti = b - (a * ratio);
1371 tree ratio = const_binop (code, i2, r2);
1372 tree div = const_binop (PLUS_EXPR, r2,
1373 const_binop (MULT_EXPR, i2, ratio));
1375 real = const_binop (MULT_EXPR, i1, ratio);
1376 real = const_binop (PLUS_EXPR, real, r1);
1377 real = const_binop (code, real, div);
1379 imag = const_binop (MULT_EXPR, r1, ratio);
1380 imag = const_binop (MINUS_EXPR, i1, imag);
1381 imag = const_binop (code, imag, div);
1391 return build_complex (type, real, imag);
1394 if (TREE_CODE (arg1) == VECTOR_CST
1395 && TREE_CODE (arg2) == VECTOR_CST)
1397 tree type = TREE_TYPE (arg1);
1398 int count = TYPE_VECTOR_SUBPARTS (type), i;
1399 tree *elts = XALLOCAVEC (tree, count);
1401 for (i = 0; i < count; i++)
1403 tree elem1 = VECTOR_CST_ELT (arg1, i);
1404 tree elem2 = VECTOR_CST_ELT (arg2, i);
1406 elts[i] = const_binop (code, elem1, elem2);
1408 /* It is possible that const_binop cannot handle the given
1409 code and return NULL_TREE */
1410 if (elts[i] == NULL_TREE)
1414 return build_vector (type, elts);
1417 /* Shifts allow a scalar offset for a vector. */
1418 if (TREE_CODE (arg1) == VECTOR_CST
1419 && TREE_CODE (arg2) == INTEGER_CST)
1421 tree type = TREE_TYPE (arg1);
1422 int count = TYPE_VECTOR_SUBPARTS (type), i;
1423 tree *elts = XALLOCAVEC (tree, count);
1425 for (i = 0; i < count; i++)
1427 tree elem1 = VECTOR_CST_ELT (arg1, i);
1429 elts[i] = const_binop (code, elem1, arg2);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE. */
1433 if (elts[i] == NULL_TREE)
1437 return build_vector (type, elts);
1442 /* Overload that adds a TYPE parameter to be able to dispatch
1443 to fold_relational_const. */
1446 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1448 if (TREE_CODE_CLASS (code) == tcc_comparison)
1449 return fold_relational_const (code, type, arg1, arg2);
1451 /* ??? Until we make the const_binop worker take the type of the
1452 result as argument put those cases that need it here. */
1456 if ((TREE_CODE (arg1) == REAL_CST
1457 && TREE_CODE (arg2) == REAL_CST)
1458 || (TREE_CODE (arg1) == INTEGER_CST
1459 && TREE_CODE (arg2) == INTEGER_CST))
1460 return build_complex (type, arg1, arg2);
1463 case VEC_PACK_TRUNC_EXPR:
1464 case VEC_PACK_FIX_TRUNC_EXPR:
1466 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1469 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1470 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1471 if (TREE_CODE (arg1) != VECTOR_CST
1472 || TREE_CODE (arg2) != VECTOR_CST)
1475 elts = XALLOCAVEC (tree, nelts);
1476 if (!vec_cst_ctor_to_array (arg1, elts)
1477 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1480 for (i = 0; i < nelts; i++)
1482 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1483 ? NOP_EXPR : FIX_TRUNC_EXPR,
1484 TREE_TYPE (type), elts[i]);
1485 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1489 return build_vector (type, elts);
1492 case VEC_WIDEN_MULT_LO_EXPR:
1493 case VEC_WIDEN_MULT_HI_EXPR:
1494 case VEC_WIDEN_MULT_EVEN_EXPR:
1495 case VEC_WIDEN_MULT_ODD_EXPR:
1497 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1498 unsigned int out, ofs, scale;
1501 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1502 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1503 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1506 elts = XALLOCAVEC (tree, nelts * 4);
1507 if (!vec_cst_ctor_to_array (arg1, elts)
1508 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1511 if (code == VEC_WIDEN_MULT_LO_EXPR)
1512 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1513 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1514 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1515 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1517 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1520 for (out = 0; out < nelts; out++)
1522 unsigned int in1 = (out << scale) + ofs;
1523 unsigned int in2 = in1 + nelts * 2;
1526 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1527 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1529 if (t1 == NULL_TREE || t2 == NULL_TREE)
1531 elts[out] = const_binop (MULT_EXPR, t1, t2);
1532 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1536 return build_vector (type, elts);
1542 if (TREE_CODE_CLASS (code) != tcc_binary)
1545 /* Make sure type and arg0 have the same saturating flag. */
1546 gcc_checking_assert (TYPE_SATURATING (type)
1547 == TYPE_SATURATING (TREE_TYPE (arg1)));
1549 return const_binop (code, arg1, arg2);
1552 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1553 Return zero if computing the constants is not possible. */
1556 const_unop (enum tree_code code, tree type, tree arg0)
1562 case FIX_TRUNC_EXPR:
1563 case FIXED_CONVERT_EXPR:
1564 return fold_convert_const (code, type, arg0);
1566 case ADDR_SPACE_CONVERT_EXPR:
1567 if (integer_zerop (arg0))
1568 return fold_convert_const (code, type, arg0);
1571 case VIEW_CONVERT_EXPR:
1572 return fold_view_convert_expr (type, arg0);
1576 /* Can't call fold_negate_const directly here as that doesn't
1577 handle all cases and we might not be able to negate some
1579 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1580 if (tem && CONSTANT_CLASS_P (tem))
1586 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1587 return fold_abs_const (arg0, type);
1591 if (TREE_CODE (arg0) == COMPLEX_CST)
1593 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1595 return build_complex (type, TREE_REALPART (arg0), ipart);
1600 if (TREE_CODE (arg0) == INTEGER_CST)
1601 return fold_not_const (arg0, type);
1602 /* Perform BIT_NOT_EXPR on each element individually. */
1603 else if (TREE_CODE (arg0) == VECTOR_CST)
1607 unsigned count = VECTOR_CST_NELTS (arg0), i;
1609 elements = XALLOCAVEC (tree, count);
1610 for (i = 0; i < count; i++)
1612 elem = VECTOR_CST_ELT (arg0, i);
1613 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1614 if (elem == NULL_TREE)
1619 return build_vector (type, elements);
1623 case TRUTH_NOT_EXPR:
1624 if (TREE_CODE (arg0) == INTEGER_CST)
1625 return constant_boolean_node (integer_zerop (arg0), type);
1629 if (TREE_CODE (arg0) == COMPLEX_CST)
1630 return fold_convert (type, TREE_REALPART (arg0));
1634 if (TREE_CODE (arg0) == COMPLEX_CST)
1635 return fold_convert (type, TREE_IMAGPART (arg0));
1638 case VEC_UNPACK_LO_EXPR:
1639 case VEC_UNPACK_HI_EXPR:
1640 case VEC_UNPACK_FLOAT_LO_EXPR:
1641 case VEC_UNPACK_FLOAT_HI_EXPR:
1643 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1645 enum tree_code subcode;
1647 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1648 if (TREE_CODE (arg0) != VECTOR_CST)
1651 elts = XALLOCAVEC (tree, nelts * 2);
1652 if (!vec_cst_ctor_to_array (arg0, elts))
1655 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1656 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1659 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1662 subcode = FLOAT_EXPR;
1664 for (i = 0; i < nelts; i++)
1666 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1667 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1671 return build_vector (type, elts);
1674 case REDUC_MIN_EXPR:
1675 case REDUC_MAX_EXPR:
1676 case REDUC_PLUS_EXPR:
1678 unsigned int nelts, i;
1680 enum tree_code subcode;
1682 if (TREE_CODE (arg0) != VECTOR_CST)
1684 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1686 elts = XALLOCAVEC (tree, nelts);
1687 if (!vec_cst_ctor_to_array (arg0, elts))
1692 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1693 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1694 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1695 default: gcc_unreachable ();
1698 for (i = 1; i < nelts; i++)
1700 elts[0] = const_binop (subcode, elts[0], elts[i]);
1701 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1715 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1716 indicates which particular sizetype to create. */
1719 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1721 return build_int_cst (sizetype_tab[(int) kind], number);
1724 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1725 is a tree code. The type of the result is taken from the operands.
1726 Both must be equivalent integer types, ala int_binop_types_match_p.
1727 If the operands are constant, so is the result. */
1730 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1732 tree type = TREE_TYPE (arg0);
1734 if (arg0 == error_mark_node || arg1 == error_mark_node)
1735 return error_mark_node;
1737 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1740 /* Handle the special case of two integer constants faster. */
1741 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1743 /* And some specific cases even faster than that. */
1744 if (code == PLUS_EXPR)
1746 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1748 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1751 else if (code == MINUS_EXPR)
1753 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1756 else if (code == MULT_EXPR)
1758 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1762 /* Handle general case of two integer constants. For sizetype
1763 constant calculations we always want to know about overflow,
1764 even in the unsigned case. */
1765 return int_const_binop_1 (code, arg0, arg1, -1);
1768 return fold_build2_loc (loc, code, type, arg0, arg1);
1771 /* Given two values, either both of sizetype or both of bitsizetype,
1772 compute the difference between the two values. Return the value
1773 in signed type corresponding to the type of the operands. */
1776 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1778 tree type = TREE_TYPE (arg0);
1781 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1784 /* If the type is already signed, just do the simple thing. */
1785 if (!TYPE_UNSIGNED (type))
1786 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1788 if (type == sizetype)
1790 else if (type == bitsizetype)
1791 ctype = sbitsizetype;
1793 ctype = signed_type_for (type);
1795 /* If either operand is not a constant, do the conversions to the signed
1796 type and subtract. The hardware will do the right thing with any
1797 overflow in the subtraction. */
1798 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1799 return size_binop_loc (loc, MINUS_EXPR,
1800 fold_convert_loc (loc, ctype, arg0),
1801 fold_convert_loc (loc, ctype, arg1));
1803 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1804 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1805 overflow) and negate (which can't either). Special-case a result
1806 of zero while we're here. */
1807 if (tree_int_cst_equal (arg0, arg1))
1808 return build_int_cst (ctype, 0);
1809 else if (tree_int_cst_lt (arg1, arg0))
1810 return fold_convert_loc (loc, ctype,
1811 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1813 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1814 fold_convert_loc (loc, ctype,
1815 size_binop_loc (loc,
1820 /* A subroutine of fold_convert_const handling conversions of an
1821 INTEGER_CST to another integer type. */
1824 fold_convert_const_int_from_int (tree type, const_tree arg1)
1826 /* Given an integer constant, make new constant with new type,
1827 appropriately sign-extended or truncated. Use widest_int
1828 so that any extension is done according ARG1's type. */
1829 return force_fit_type (type, wi::to_widest (arg1),
1830 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1831 TREE_OVERFLOW (arg1));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1840 bool overflow = false;
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1866 /* If R is NaN, return zero and show we have an overflow. */
1867 if (REAL_VALUE_ISNAN (r))
1870 val = wi::zero (TYPE_PRECISION (type));
1873 /* See if R is less than the lower bound or greater than the
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (real_less (&r, &l))
1889 tree ut = TYPE_MAX_VALUE (type);
1892 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1893 if (real_less (&u, &r))
1902 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1904 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1908 /* A subroutine of fold_convert_const handling conversions of a
1909 FIXED_CST to an integer type. */
1912 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1915 double_int temp, temp_trunc;
1918 /* Right shift FIXED_CST to temp by fbit. */
1919 temp = TREE_FIXED_CST (arg1).data;
1920 mode = TREE_FIXED_CST (arg1).mode;
1921 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1923 temp = temp.rshift (GET_MODE_FBIT (mode),
1924 HOST_BITS_PER_DOUBLE_INT,
1925 SIGNED_FIXED_POINT_MODE_P (mode));
1927 /* Left shift temp to temp_trunc by fbit. */
1928 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1929 HOST_BITS_PER_DOUBLE_INT,
1930 SIGNED_FIXED_POINT_MODE_P (mode));
1934 temp = double_int_zero;
1935 temp_trunc = double_int_zero;
1938 /* If FIXED_CST is negative, we need to round the value toward 0.
1939 By checking if the fractional bits are not zero to add 1 to temp. */
1940 if (SIGNED_FIXED_POINT_MODE_P (mode)
1941 && temp_trunc.is_negative ()
1942 && TREE_FIXED_CST (arg1).data != temp_trunc)
1943 temp += double_int_one;
1945 /* Given a fixed-point constant, make new constant with new type,
1946 appropriately sign-extended or truncated. */
1947 t = force_fit_type (type, temp, -1,
1948 (temp.is_negative ()
1949 && (TYPE_UNSIGNED (type)
1950 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1951 | TREE_OVERFLOW (arg1));
1956 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1957 to another floating point type. */
1960 fold_convert_const_real_from_real (tree type, const_tree arg1)
1962 REAL_VALUE_TYPE value;
1965 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1966 t = build_real (type, value);
1968 /* If converting an infinity or NAN to a representation that doesn't
1969 have one, set the overflow bit so that we can produce some kind of
1970 error message at the appropriate point if necessary. It's not the
1971 most user-friendly message, but it's better than nothing. */
1972 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1973 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1974 TREE_OVERFLOW (t) = 1;
1975 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_NANS (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 /* Regular overflow, conversion produced an infinity in a mode that
1979 can't represent them. */
1980 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1981 && REAL_VALUE_ISINF (value)
1982 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1983 TREE_OVERFLOW (t) = 1;
1985 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1989 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1990 to a floating point type. */
1993 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1995 REAL_VALUE_TYPE value;
1998 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1999 t = build_real (type, value);
2001 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2006 to another fixed-point type. */
2009 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2011 FIXED_VALUE_TYPE value;
2015 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2016 TYPE_SATURATING (type));
2017 t = build_fixed (type, value);
2019 /* Propagate overflow flags. */
2020 if (overflow_p | TREE_OVERFLOW (arg1))
2021 TREE_OVERFLOW (t) = 1;
2025 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2026 to a fixed-point type. */
2029 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2031 FIXED_VALUE_TYPE value;
2036 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2038 di.low = TREE_INT_CST_ELT (arg1, 0);
2039 if (TREE_INT_CST_NUNITS (arg1) == 1)
2040 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2042 di.high = TREE_INT_CST_ELT (arg1, 1);
2044 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2045 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2046 TYPE_SATURATING (type));
2047 t = build_fixed (type, value);
2049 /* Propagate overflow flags. */
2050 if (overflow_p | TREE_OVERFLOW (arg1))
2051 TREE_OVERFLOW (t) = 1;
2055 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2056 to a fixed-point type. */
2059 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2061 FIXED_VALUE_TYPE value;
2065 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2066 &TREE_REAL_CST (arg1),
2067 TYPE_SATURATING (type));
2068 t = build_fixed (type, value);
2070 /* Propagate overflow flags. */
2071 if (overflow_p | TREE_OVERFLOW (arg1))
2072 TREE_OVERFLOW (t) = 1;
2076 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2077 type TYPE. If no simplification can be done return NULL_TREE. */
2080 fold_convert_const (enum tree_code code, tree type, tree arg1)
2082 if (TREE_TYPE (arg1) == type)
2085 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2086 || TREE_CODE (type) == OFFSET_TYPE)
2088 if (TREE_CODE (arg1) == INTEGER_CST)
2089 return fold_convert_const_int_from_int (type, arg1);
2090 else if (TREE_CODE (arg1) == REAL_CST)
2091 return fold_convert_const_int_from_real (code, type, arg1);
2092 else if (TREE_CODE (arg1) == FIXED_CST)
2093 return fold_convert_const_int_from_fixed (type, arg1);
2095 else if (TREE_CODE (type) == REAL_TYPE)
2097 if (TREE_CODE (arg1) == INTEGER_CST)
2098 return build_real_from_int_cst (type, arg1);
2099 else if (TREE_CODE (arg1) == REAL_CST)
2100 return fold_convert_const_real_from_real (type, arg1);
2101 else if (TREE_CODE (arg1) == FIXED_CST)
2102 return fold_convert_const_real_from_fixed (type, arg1);
2104 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2106 if (TREE_CODE (arg1) == FIXED_CST)
2107 return fold_convert_const_fixed_from_fixed (type, arg1);
2108 else if (TREE_CODE (arg1) == INTEGER_CST)
2109 return fold_convert_const_fixed_from_int (type, arg1);
2110 else if (TREE_CODE (arg1) == REAL_CST)
2111 return fold_convert_const_fixed_from_real (type, arg1);
2116 /* Construct a vector of zero elements of vector type TYPE. */
2119 build_zero_vector (tree type)
2123 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2124 return build_vector_from_val (type, t);
2127 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2130 fold_convertible_p (const_tree type, const_tree arg)
2132 tree orig = TREE_TYPE (arg);
2137 if (TREE_CODE (arg) == ERROR_MARK
2138 || TREE_CODE (type) == ERROR_MARK
2139 || TREE_CODE (orig) == ERROR_MARK)
2142 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2145 switch (TREE_CODE (type))
2147 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2148 case POINTER_TYPE: case REFERENCE_TYPE:
2150 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2151 || TREE_CODE (orig) == OFFSET_TYPE)
2153 return (TREE_CODE (orig) == VECTOR_TYPE
2154 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2157 case FIXED_POINT_TYPE:
2161 return TREE_CODE (type) == TREE_CODE (orig);
2168 /* Convert expression ARG to type TYPE. Used by the middle-end for
2169 simple conversions in preference to calling the front-end's convert. */
2172 fold_convert_loc (location_t loc, tree type, tree arg)
2174 tree orig = TREE_TYPE (arg);
2180 if (TREE_CODE (arg) == ERROR_MARK
2181 || TREE_CODE (type) == ERROR_MARK
2182 || TREE_CODE (orig) == ERROR_MARK)
2183 return error_mark_node;
2185 switch (TREE_CODE (type))
2188 case REFERENCE_TYPE:
2189 /* Handle conversions between pointers to different address spaces. */
2190 if (POINTER_TYPE_P (orig)
2191 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2192 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2193 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2196 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2198 if (TREE_CODE (arg) == INTEGER_CST)
2200 tem = fold_convert_const (NOP_EXPR, type, arg);
2201 if (tem != NULL_TREE)
2204 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2205 || TREE_CODE (orig) == OFFSET_TYPE)
2206 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2207 if (TREE_CODE (orig) == COMPLEX_TYPE)
2208 return fold_convert_loc (loc, type,
2209 fold_build1_loc (loc, REALPART_EXPR,
2210 TREE_TYPE (orig), arg));
2211 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2212 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2213 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2216 if (TREE_CODE (arg) == INTEGER_CST)
2218 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2219 if (tem != NULL_TREE)
2222 else if (TREE_CODE (arg) == REAL_CST)
2224 tem = fold_convert_const (NOP_EXPR, type, arg);
2225 if (tem != NULL_TREE)
2228 else if (TREE_CODE (arg) == FIXED_CST)
2230 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2231 if (tem != NULL_TREE)
2235 switch (TREE_CODE (orig))
2238 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2239 case POINTER_TYPE: case REFERENCE_TYPE:
2240 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2243 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2245 case FIXED_POINT_TYPE:
2246 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2249 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2250 return fold_convert_loc (loc, type, tem);
2256 case FIXED_POINT_TYPE:
2257 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2258 || TREE_CODE (arg) == REAL_CST)
2260 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2262 goto fold_convert_exit;
2265 switch (TREE_CODE (orig))
2267 case FIXED_POINT_TYPE:
2272 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2275 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert_loc (loc, type, tem);
2283 switch (TREE_CODE (orig))
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2289 case FIXED_POINT_TYPE:
2290 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2291 fold_convert_loc (loc, TREE_TYPE (type), arg),
2292 fold_convert_loc (loc, TREE_TYPE (type),
2293 integer_zero_node));
2298 if (TREE_CODE (arg) == COMPLEX_EXPR)
2300 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2301 TREE_OPERAND (arg, 0));
2302 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 1));
2304 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2307 arg = save_expr (arg);
2308 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2309 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2310 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2311 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2312 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2320 if (integer_zerop (arg))
2321 return build_zero_vector (type);
2322 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2323 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2324 || TREE_CODE (orig) == VECTOR_TYPE);
2325 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2328 tem = fold_ignored_result (arg);
2329 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2332 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2333 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2337 protected_set_expr_location_unshare (tem, loc);
2341 /* Return false if expr can be assumed not to be an lvalue, true
2345 maybe_lvalue_p (const_tree x)
2347 /* We only need to wrap lvalue tree codes. */
2348 switch (TREE_CODE (x))
2361 case ARRAY_RANGE_REF:
2367 case PREINCREMENT_EXPR:
2368 case PREDECREMENT_EXPR:
2370 case TRY_CATCH_EXPR:
2371 case WITH_CLEANUP_EXPR:
2380 /* Assume the worst for front-end tree codes. */
2381 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2389 /* Return an expr equal to X but certainly not valid as an lvalue. */
2392 non_lvalue_loc (location_t loc, tree x)
2394 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2399 if (! maybe_lvalue_p (x))
2401 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2404 /* When pedantic, return an expr equal to X but certainly not valid as a
2405 pedantic lvalue. Otherwise, return X. */
2408 pedantic_non_lvalue_loc (location_t loc, tree x)
2410 return protected_set_expr_location_unshare (x, loc);
2413 /* Given a tree comparison code, return the code that is the logical inverse.
2414 It is generally not safe to do this for floating-point comparisons, except
2415 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2416 ERROR_MARK in this case. */
2419 invert_tree_comparison (enum tree_code code, bool honor_nans)
2421 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2422 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2432 return honor_nans ? UNLE_EXPR : LE_EXPR;
2434 return honor_nans ? UNLT_EXPR : LT_EXPR;
2436 return honor_nans ? UNGE_EXPR : GE_EXPR;
2438 return honor_nans ? UNGT_EXPR : GT_EXPR;
2452 return UNORDERED_EXPR;
2453 case UNORDERED_EXPR:
2454 return ORDERED_EXPR;
2460 /* Similar, but return the comparison that results if the operands are
2461 swapped. This is safe for floating-point. */
2464 swap_tree_comparison (enum tree_code code)
2471 case UNORDERED_EXPR:
2497 /* Convert a comparison tree code from an enum tree_code representation
2498 into a compcode bit-based encoding. This function is the inverse of
2499 compcode_to_comparison. */
2501 static enum comparison_code
2502 comparison_to_compcode (enum tree_code code)
2519 return COMPCODE_ORD;
2520 case UNORDERED_EXPR:
2521 return COMPCODE_UNORD;
2523 return COMPCODE_UNLT;
2525 return COMPCODE_UNEQ;
2527 return COMPCODE_UNLE;
2529 return COMPCODE_UNGT;
2531 return COMPCODE_LTGT;
2533 return COMPCODE_UNGE;
2539 /* Convert a compcode bit-based encoding of a comparison operator back
2540 to GCC's enum tree_code representation. This function is the
2541 inverse of comparison_to_compcode. */
2543 static enum tree_code
2544 compcode_to_comparison (enum comparison_code code)
2561 return ORDERED_EXPR;
2562 case COMPCODE_UNORD:
2563 return UNORDERED_EXPR;
2581 /* Return a tree for the comparison which is the combination of
2582 doing the AND or OR (depending on CODE) of the two operations LCODE
2583 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2584 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2585 if this makes the transformation invalid. */
2588 combine_comparisons (location_t loc,
2589 enum tree_code code, enum tree_code lcode,
2590 enum tree_code rcode, tree truth_type,
2591 tree ll_arg, tree lr_arg)
2593 bool honor_nans = HONOR_NANS (ll_arg);
2594 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2595 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2600 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2601 compcode = lcompcode & rcompcode;
2604 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2605 compcode = lcompcode | rcompcode;
2614 /* Eliminate unordered comparisons, as well as LTGT and ORD
2615 which are not used unless the mode has NaNs. */
2616 compcode &= ~COMPCODE_UNORD;
2617 if (compcode == COMPCODE_LTGT)
2618 compcode = COMPCODE_NE;
2619 else if (compcode == COMPCODE_ORD)
2620 compcode = COMPCODE_TRUE;
2622 else if (flag_trapping_math)
2624 /* Check that the original operation and the optimized ones will trap
2625 under the same condition. */
2626 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2627 && (lcompcode != COMPCODE_EQ)
2628 && (lcompcode != COMPCODE_ORD);
2629 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2630 && (rcompcode != COMPCODE_EQ)
2631 && (rcompcode != COMPCODE_ORD);
2632 bool trap = (compcode & COMPCODE_UNORD) == 0
2633 && (compcode != COMPCODE_EQ)
2634 && (compcode != COMPCODE_ORD);
2636 /* In a short-circuited boolean expression the LHS might be
2637 such that the RHS, if evaluated, will never trap. For
2638 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2639 if neither x nor y is NaN. (This is a mixed blessing: for
2640 example, the expression above will never trap, hence
2641 optimizing it to x < y would be invalid). */
2642 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2643 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2646 /* If the comparison was short-circuited, and only the RHS
2647 trapped, we may now generate a spurious trap. */
2649 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2652 /* If we changed the conditions that cause a trap, we lose. */
2653 if ((ltrap || rtrap) != trap)
2657 if (compcode == COMPCODE_TRUE)
2658 return constant_boolean_node (true, truth_type);
2659 else if (compcode == COMPCODE_FALSE)
2660 return constant_boolean_node (false, truth_type);
2663 enum tree_code tcode;
2665 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2666 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2670 /* Return nonzero if two operands (typically of the same tree node)
2671 are necessarily equal. If either argument has side-effects this
2672 function returns zero. FLAGS modifies behavior as follows:
2674 If OEP_ONLY_CONST is set, only return nonzero for constants.
2675 This function tests whether the operands are indistinguishable;
2676 it does not test whether they are equal using C's == operation.
2677 The distinction is important for IEEE floating point, because
2678 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2679 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2681 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2682 even though it may hold multiple values during a function.
2683 This is because a GCC tree node guarantees that nothing else is
2684 executed between the evaluation of its "operands" (which may often
2685 be evaluated in arbitrary order). Hence if the operands themselves
2686 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2687 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2688 unset means assuming isochronic (or instantaneous) tree equivalence.
2689 Unless comparing arbitrary expression trees, such as from different
2690 statements, this flag can usually be left unset.
2692 If OEP_PURE_SAME is set, then pure functions with identical arguments
2693 are considered the same. It is used when the caller has other ways
2694 to ensure that global memory is unchanged in between. */
2697 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2699 /* If either is ERROR_MARK, they aren't equal. */
2700 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2701 || TREE_TYPE (arg0) == error_mark_node
2702 || TREE_TYPE (arg1) == error_mark_node)
2705 /* Similar, if either does not have a type (like a released SSA name),
2706 they aren't equal. */
2707 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2710 /* Check equality of integer constants before bailing out due to
2711 precision differences. */
2712 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2713 return tree_int_cst_equal (arg0, arg1);
2715 /* If both types don't have the same signedness, then we can't consider
2716 them equal. We must check this before the STRIP_NOPS calls
2717 because they may change the signedness of the arguments. As pointers
2718 strictly don't have a signedness, require either two pointers or
2719 two non-pointers as well. */
2720 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2721 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2724 /* We cannot consider pointers to different address space equal. */
2725 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2726 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2727 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2730 /* If both types don't have the same precision, then it is not safe
2732 if (element_precision (TREE_TYPE (arg0))
2733 != element_precision (TREE_TYPE (arg1)))
2739 /* In case both args are comparisons but with different comparison
2740 code, try to swap the comparison operands of one arg to produce
2741 a match and compare that variant. */
2742 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2743 && COMPARISON_CLASS_P (arg0)
2744 && COMPARISON_CLASS_P (arg1))
2746 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2748 if (TREE_CODE (arg0) == swap_code)
2749 return operand_equal_p (TREE_OPERAND (arg0, 0),
2750 TREE_OPERAND (arg1, 1), flags)
2751 && operand_equal_p (TREE_OPERAND (arg0, 1),
2752 TREE_OPERAND (arg1, 0), flags);
2755 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2757 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2758 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2760 else if (flags & OEP_ADDRESS_OF)
2762 /* If we are interested in comparing addresses ignore
2763 MEM_REF wrappings of the base that can appear just for
2765 if (TREE_CODE (arg0) == MEM_REF
2767 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2768 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2769 && integer_zerop (TREE_OPERAND (arg0, 1)))
2771 else if (TREE_CODE (arg1) == MEM_REF
2773 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2774 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2775 && integer_zerop (TREE_OPERAND (arg1, 1)))
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2811 return tree_int_cst_equal (arg0, arg1);
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2818 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2822 if (!HONOR_SIGNED_ZEROS (arg0))
2824 /* If we do not distinguish between signed and unsigned zero,
2825 consider them equal. */
2826 if (real_zerop (arg0) && real_zerop (arg1))
2835 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2838 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2840 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2841 VECTOR_CST_ELT (arg1, i), flags))
2848 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2854 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2855 && ! memcmp (TREE_STRING_POINTER (arg0),
2856 TREE_STRING_POINTER (arg1),
2857 TREE_STRING_LENGTH (arg0)));
2860 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2861 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2862 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2867 if (flags & OEP_ONLY_CONST)
2870 /* Define macros to test an operand from arg0 and arg1 for equality and a
2871 variant that allows null and views null as being different from any
2872 non-null value. In the latter case, if either is null, the both
2873 must be; otherwise, do the normal comparison. */
2874 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2875 TREE_OPERAND (arg1, N), flags)
2877 #define OP_SAME_WITH_NULL(N) \
2878 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2879 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2881 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2884 /* Two conversions are equal only if signedness and modes match. */
2885 switch (TREE_CODE (arg0))
2888 case FIX_TRUNC_EXPR:
2889 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2890 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2900 case tcc_comparison:
2902 if (OP_SAME (0) && OP_SAME (1))
2905 /* For commutative ops, allow the other order. */
2906 return (commutative_tree_code (TREE_CODE (arg0))
2907 && operand_equal_p (TREE_OPERAND (arg0, 0),
2908 TREE_OPERAND (arg1, 1), flags)
2909 && operand_equal_p (TREE_OPERAND (arg0, 1),
2910 TREE_OPERAND (arg1, 0), flags));
2913 /* If either of the pointer (or reference) expressions we are
2914 dereferencing contain a side effect, these cannot be equal,
2915 but their addresses can be. */
2916 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2917 && (TREE_SIDE_EFFECTS (arg0)
2918 || TREE_SIDE_EFFECTS (arg1)))
2921 switch (TREE_CODE (arg0))
2924 if (!(flags & OEP_ADDRESS_OF)
2925 && (TYPE_ALIGN (TREE_TYPE (arg0))
2926 != TYPE_ALIGN (TREE_TYPE (arg1))))
2928 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2935 case TARGET_MEM_REF:
2937 /* Require equal access sizes, and similar pointer types.
2938 We can have incomplete types for array references of
2939 variable-sized arrays from the Fortran frontend
2940 though. Also verify the types are compatible. */
2941 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2942 || (TYPE_SIZE (TREE_TYPE (arg0))
2943 && TYPE_SIZE (TREE_TYPE (arg1))
2944 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2945 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2946 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2947 && ((flags & OEP_ADDRESS_OF)
2948 || (alias_ptr_types_compatible_p
2949 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2950 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2951 && (MR_DEPENDENCE_CLIQUE (arg0)
2952 == MR_DEPENDENCE_CLIQUE (arg1))
2953 && (MR_DEPENDENCE_BASE (arg0)
2954 == MR_DEPENDENCE_BASE (arg1))
2955 && (TYPE_ALIGN (TREE_TYPE (arg0))
2956 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2958 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2959 return (OP_SAME (0) && OP_SAME (1)
2960 /* TARGET_MEM_REF require equal extra operands. */
2961 && (TREE_CODE (arg0) != TARGET_MEM_REF
2962 || (OP_SAME_WITH_NULL (2)
2963 && OP_SAME_WITH_NULL (3)
2964 && OP_SAME_WITH_NULL (4))));
2967 case ARRAY_RANGE_REF:
2968 /* Operands 2 and 3 may be null.
2969 Compare the array index by value if it is constant first as we
2970 may have different types but same value here. */
2973 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2974 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2975 TREE_OPERAND (arg1, 1))
2977 && OP_SAME_WITH_NULL (2)
2978 && OP_SAME_WITH_NULL (3));
2981 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2982 may be NULL when we're called to compare MEM_EXPRs. */
2983 if (!OP_SAME_WITH_NULL (0)
2986 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2987 return OP_SAME_WITH_NULL (2);
2992 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2993 return OP_SAME (1) && OP_SAME (2);
2999 case tcc_expression:
3000 switch (TREE_CODE (arg0))
3003 return operand_equal_p (TREE_OPERAND (arg0, 0),
3004 TREE_OPERAND (arg1, 0),
3005 flags | OEP_ADDRESS_OF);
3007 case TRUTH_NOT_EXPR:
3010 case TRUTH_ANDIF_EXPR:
3011 case TRUTH_ORIF_EXPR:
3012 return OP_SAME (0) && OP_SAME (1);
3015 case WIDEN_MULT_PLUS_EXPR:
3016 case WIDEN_MULT_MINUS_EXPR:
3019 /* The multiplcation operands are commutative. */
3022 case TRUTH_AND_EXPR:
3024 case TRUTH_XOR_EXPR:
3025 if (OP_SAME (0) && OP_SAME (1))
3028 /* Otherwise take into account this is a commutative operation. */
3029 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3030 TREE_OPERAND (arg1, 1), flags)
3031 && operand_equal_p (TREE_OPERAND (arg0, 1),
3032 TREE_OPERAND (arg1, 0), flags));
3037 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3044 switch (TREE_CODE (arg0))
3047 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3048 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3049 /* If not both CALL_EXPRs are either internal or normal function
3050 functions, then they are not equal. */
3052 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3054 /* If the CALL_EXPRs call different internal functions, then they
3056 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3061 /* If the CALL_EXPRs call different functions, then they are not
3063 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3069 unsigned int cef = call_expr_flags (arg0);
3070 if (flags & OEP_PURE_SAME)
3071 cef &= ECF_CONST | ECF_PURE;
3078 /* Now see if all the arguments are the same. */
3080 const_call_expr_arg_iterator iter0, iter1;
3082 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3083 a1 = first_const_call_expr_arg (arg1, &iter1);
3085 a0 = next_const_call_expr_arg (&iter0),
3086 a1 = next_const_call_expr_arg (&iter1))
3087 if (! operand_equal_p (a0, a1, flags))
3090 /* If we get here and both argument lists are exhausted
3091 then the CALL_EXPRs are equal. */
3092 return ! (a0 || a1);
3098 case tcc_declaration:
3099 /* Consider __builtin_sqrt equal to sqrt. */
3100 return (TREE_CODE (arg0) == FUNCTION_DECL
3101 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3102 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3103 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3110 #undef OP_SAME_WITH_NULL
3113 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3114 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3116 When in doubt, return 0. */
3119 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3121 int unsignedp1, unsignedpo;
3122 tree primarg0, primarg1, primother;
3123 unsigned int correct_width;
3125 if (operand_equal_p (arg0, arg1, 0))
3128 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3129 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3132 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3133 and see if the inner values are the same. This removes any
3134 signedness comparison, which doesn't matter here. */
3135 primarg0 = arg0, primarg1 = arg1;
3136 STRIP_NOPS (primarg0);
3137 STRIP_NOPS (primarg1);
3138 if (operand_equal_p (primarg0, primarg1, 0))
3141 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3142 actual comparison operand, ARG0.
3144 First throw away any conversions to wider types
3145 already present in the operands. */
3147 primarg1 = get_narrower (arg1, &unsignedp1);
3148 primother = get_narrower (other, &unsignedpo);
3150 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3151 if (unsignedp1 == unsignedpo
3152 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3153 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3155 tree type = TREE_TYPE (arg0);
3157 /* Make sure shorter operand is extended the right way
3158 to match the longer operand. */
3159 primarg1 = fold_convert (signed_or_unsigned_type_for
3160 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3162 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3169 /* See if ARG is an expression that is either a comparison or is performing
3170 arithmetic on comparisons. The comparisons must only be comparing
3171 two different values, which will be stored in *CVAL1 and *CVAL2; if
3172 they are nonzero it means that some operands have already been found.
3173 No variables may be used anywhere else in the expression except in the
3174 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3175 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3177 If this is true, return 1. Otherwise, return zero. */
3180 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3182 enum tree_code code = TREE_CODE (arg);
3183 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3185 /* We can handle some of the tcc_expression cases here. */
3186 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3188 else if (tclass == tcc_expression
3189 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3190 || code == COMPOUND_EXPR))
3191 tclass = tcc_binary;
3193 else if (tclass == tcc_expression && code == SAVE_EXPR
3194 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3196 /* If we've already found a CVAL1 or CVAL2, this expression is
3197 two complex to handle. */
3198 if (*cval1 || *cval2)
3208 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3211 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3212 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3213 cval1, cval2, save_p));
3218 case tcc_expression:
3219 if (code == COND_EXPR)
3220 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3221 cval1, cval2, save_p)
3222 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3223 cval1, cval2, save_p)
3224 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3225 cval1, cval2, save_p));
3228 case tcc_comparison:
3229 /* First see if we can handle the first operand, then the second. For
3230 the second operand, we know *CVAL1 can't be zero. It must be that
3231 one side of the comparison is each of the values; test for the
3232 case where this isn't true by failing if the two operands
3235 if (operand_equal_p (TREE_OPERAND (arg, 0),
3236 TREE_OPERAND (arg, 1), 0))
3240 *cval1 = TREE_OPERAND (arg, 0);
3241 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3243 else if (*cval2 == 0)
3244 *cval2 = TREE_OPERAND (arg, 0);
3245 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3250 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3252 else if (*cval2 == 0)
3253 *cval2 = TREE_OPERAND (arg, 1);
3254 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3266 /* ARG is a tree that is known to contain just arithmetic operations and
3267 comparisons. Evaluate the operations in the tree substituting NEW0 for
3268 any occurrence of OLD0 as an operand of a comparison and likewise for
3272 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3273 tree old1, tree new1)
3275 tree type = TREE_TYPE (arg);
3276 enum tree_code code = TREE_CODE (arg);
3277 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3279 /* We can handle some of the tcc_expression cases here. */
3280 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3282 else if (tclass == tcc_expression
3283 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3284 tclass = tcc_binary;
3289 return fold_build1_loc (loc, code, type,
3290 eval_subst (loc, TREE_OPERAND (arg, 0),
3291 old0, new0, old1, new1));
3294 return fold_build2_loc (loc, code, type,
3295 eval_subst (loc, TREE_OPERAND (arg, 0),
3296 old0, new0, old1, new1),
3297 eval_subst (loc, TREE_OPERAND (arg, 1),
3298 old0, new0, old1, new1));
3300 case tcc_expression:
3304 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3308 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3312 return fold_build3_loc (loc, code, type,
3313 eval_subst (loc, TREE_OPERAND (arg, 0),
3314 old0, new0, old1, new1),
3315 eval_subst (loc, TREE_OPERAND (arg, 1),
3316 old0, new0, old1, new1),
3317 eval_subst (loc, TREE_OPERAND (arg, 2),
3318 old0, new0, old1, new1));
3322 /* Fall through - ??? */
3324 case tcc_comparison:
3326 tree arg0 = TREE_OPERAND (arg, 0);
3327 tree arg1 = TREE_OPERAND (arg, 1);
3329 /* We need to check both for exact equality and tree equality. The
3330 former will be true if the operand has a side-effect. In that
3331 case, we know the operand occurred exactly once. */
3333 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3335 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3338 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3340 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3343 return fold_build2_loc (loc, code, type, arg0, arg1);
3351 /* Return a tree for the case when the result of an expression is RESULT
3352 converted to TYPE and OMITTED was previously an operand of the expression
3353 but is now not needed (e.g., we folded OMITTED * 0).
3355 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3356 the conversion of RESULT to TYPE. */
3359 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3361 tree t = fold_convert_loc (loc, type, result);
3363 /* If the resulting operand is an empty statement, just return the omitted
3364 statement casted to void. */
3365 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3366 return build1_loc (loc, NOP_EXPR, void_type_node,
3367 fold_ignored_result (omitted));
3369 if (TREE_SIDE_EFFECTS (omitted))
3370 return build2_loc (loc, COMPOUND_EXPR, type,
3371 fold_ignored_result (omitted), t);
3373 return non_lvalue_loc (loc, t);
3376 /* Return a tree for the case when the result of an expression is RESULT
3377 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3378 of the expression but are now not needed.
3380 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3381 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3382 evaluated before OMITTED2. Otherwise, if neither has side effects,
3383 just do the conversion of RESULT to TYPE. */
3386 omit_two_operands_loc (location_t loc, tree type, tree result,
3387 tree omitted1, tree omitted2)
3389 tree t = fold_convert_loc (loc, type, result);
3391 if (TREE_SIDE_EFFECTS (omitted2))
3392 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3393 if (TREE_SIDE_EFFECTS (omitted1))
3394 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3396 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3400 /* Return a simplified tree node for the truth-negation of ARG. This
3401 never alters ARG itself. We assume that ARG is an operation that
3402 returns a truth value (0 or 1).
3404 FIXME: one would think we would fold the result, but it causes
3405 problems with the dominator optimizer. */
3408 fold_truth_not_expr (location_t loc, tree arg)
3410 tree type = TREE_TYPE (arg);
3411 enum tree_code code = TREE_CODE (arg);
3412 location_t loc1, loc2;
3414 /* If this is a comparison, we can simply invert it, except for
3415 floating-point non-equality comparisons, in which case we just
3416 enclose a TRUTH_NOT_EXPR around what we have. */
3418 if (TREE_CODE_CLASS (code) == tcc_comparison)
3420 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3421 if (FLOAT_TYPE_P (op_type)
3422 && flag_trapping_math
3423 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3424 && code != NE_EXPR && code != EQ_EXPR)
3427 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3428 if (code == ERROR_MARK)
3431 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3432 TREE_OPERAND (arg, 1));
3438 return constant_boolean_node (integer_zerop (arg), type);
3440 case TRUTH_AND_EXPR:
3441 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3442 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3443 return build2_loc (loc, TRUTH_OR_EXPR, type,
3444 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3445 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3448 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3449 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3450 return build2_loc (loc, TRUTH_AND_EXPR, type,
3451 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3452 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3454 case TRUTH_XOR_EXPR:
3455 /* Here we can invert either operand. We invert the first operand
3456 unless the second operand is a TRUTH_NOT_EXPR in which case our
3457 result is the XOR of the first operand with the inside of the
3458 negation of the second operand. */
3460 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3461 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3462 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3464 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3465 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3466 TREE_OPERAND (arg, 1));
3468 case TRUTH_ANDIF_EXPR:
3469 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3470 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3471 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3472 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3473 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3475 case TRUTH_ORIF_EXPR:
3476 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3477 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3478 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3479 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3480 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3482 case TRUTH_NOT_EXPR:
3483 return TREE_OPERAND (arg, 0);
3487 tree arg1 = TREE_OPERAND (arg, 1);
3488 tree arg2 = TREE_OPERAND (arg, 2);
3490 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3491 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3493 /* A COND_EXPR may have a throw as one operand, which
3494 then has void type. Just leave void operands
3496 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3497 VOID_TYPE_P (TREE_TYPE (arg1))
3498 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3499 VOID_TYPE_P (TREE_TYPE (arg2))
3500 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3504 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3505 return build2_loc (loc, COMPOUND_EXPR, type,
3506 TREE_OPERAND (arg, 0),
3507 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3509 case NON_LVALUE_EXPR:
3510 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3511 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3514 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3515 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3517 /* ... fall through ... */
3520 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3521 return build1_loc (loc, TREE_CODE (arg), type,
3522 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3525 if (!integer_onep (TREE_OPERAND (arg, 1)))
3527 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3530 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3532 case CLEANUP_POINT_EXPR:
3533 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3534 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3535 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3542 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3543 assume that ARG is an operation that returns a truth value (0 or 1
3544 for scalars, 0 or -1 for vectors). Return the folded expression if
3545 folding is successful. Otherwise, return NULL_TREE. */
3548 fold_invert_truthvalue (location_t loc, tree arg)
3550 tree type = TREE_TYPE (arg);
3551 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3557 /* Return a simplified tree node for the truth-negation of ARG. This
3558 never alters ARG itself. We assume that ARG is an operation that
3559 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3562 invert_truthvalue_loc (location_t loc, tree arg)
3564 if (TREE_CODE (arg) == ERROR_MARK)
3567 tree type = TREE_TYPE (arg);
3568 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3574 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3575 with code CODE. This optimization is unsafe. */
3577 distribute_real_division (location_t loc, enum tree_code code, tree type,
3578 tree arg0, tree arg1)
3580 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3581 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3583 /* (A / C) +- (B / C) -> (A +- B) / C. */
3585 && operand_equal_p (TREE_OPERAND (arg0, 1),
3586 TREE_OPERAND (arg1, 1), 0))
3587 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3588 fold_build2_loc (loc, code, type,
3589 TREE_OPERAND (arg0, 0),
3590 TREE_OPERAND (arg1, 0)),
3591 TREE_OPERAND (arg0, 1));
3593 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3594 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3595 TREE_OPERAND (arg1, 0), 0)
3596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3597 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3599 REAL_VALUE_TYPE r0, r1;
3600 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3601 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3603 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3605 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3606 real_arithmetic (&r0, code, &r0, &r1);
3607 return fold_build2_loc (loc, MULT_EXPR, type,
3608 TREE_OPERAND (arg0, 0),
3609 build_real (type, r0));
3615 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3616 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3619 make_bit_field_ref (location_t loc, tree inner, tree type,
3620 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3622 tree result, bftype;
3626 tree size = TYPE_SIZE (TREE_TYPE (inner));
3627 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3628 || POINTER_TYPE_P (TREE_TYPE (inner)))
3629 && tree_fits_shwi_p (size)
3630 && tree_to_shwi (size) == bitsize)
3631 return fold_convert_loc (loc, type, inner);
3635 if (TYPE_PRECISION (bftype) != bitsize
3636 || TYPE_UNSIGNED (bftype) == !unsignedp)
3637 bftype = build_nonstandard_integer_type (bitsize, 0);
3639 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3640 size_int (bitsize), bitsize_int (bitpos));
3643 result = fold_convert_loc (loc, type, result);
3648 /* Optimize a bit-field compare.
3650 There are two cases: First is a compare against a constant and the
3651 second is a comparison of two items where the fields are at the same
3652 bit position relative to the start of a chunk (byte, halfword, word)
3653 large enough to contain it. In these cases we can avoid the shift
3654 implicit in bitfield extractions.
3656 For constants, we emit a compare of the shifted constant with the
3657 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3658 compared. For two fields at the same position, we do the ANDs with the
3659 similar mask and compare the result of the ANDs.
3661 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3662 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3663 are the left and right operands of the comparison, respectively.
3665 If the optimization described above can be done, we return the resulting
3666 tree. Otherwise we return zero. */
3669 optimize_bit_field_compare (location_t loc, enum tree_code code,
3670 tree compare_type, tree lhs, tree rhs)
3672 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3673 tree type = TREE_TYPE (lhs);
3675 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3676 machine_mode lmode, rmode, nmode;
3677 int lunsignedp, runsignedp;
3678 int lvolatilep = 0, rvolatilep = 0;
3679 tree linner, rinner = NULL_TREE;
3683 /* Get all the information about the extractions being done. If the bit size
3684 if the same as the size of the underlying object, we aren't doing an
3685 extraction at all and so can do nothing. We also don't want to
3686 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3687 then will no longer be able to replace it. */
3688 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3689 &lunsignedp, &lvolatilep, false);
3690 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3691 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3696 /* If this is not a constant, we can only do something if bit positions,
3697 sizes, and signedness are the same. */
3698 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3699 &runsignedp, &rvolatilep, false);
3701 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3702 || lunsignedp != runsignedp || offset != 0
3703 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3707 /* See if we can find a mode to refer to this field. We should be able to,
3708 but fail if we can't. */
3709 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3710 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3711 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3712 TYPE_ALIGN (TREE_TYPE (rinner))),
3714 if (nmode == VOIDmode)
3717 /* Set signed and unsigned types of the precision of this mode for the
3719 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3721 /* Compute the bit position and size for the new reference and our offset
3722 within it. If the new reference is the same size as the original, we
3723 won't optimize anything, so return zero. */
3724 nbitsize = GET_MODE_BITSIZE (nmode);
3725 nbitpos = lbitpos & ~ (nbitsize - 1);
3727 if (nbitsize == lbitsize)
3730 if (BYTES_BIG_ENDIAN)
3731 lbitpos = nbitsize - lbitsize - lbitpos;
3733 /* Make the mask to be used against the extracted field. */
3734 mask = build_int_cst_type (unsigned_type, -1);
3735 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3736 mask = const_binop (RSHIFT_EXPR, mask,
3737 size_int (nbitsize - lbitsize - lbitpos));
3740 /* If not comparing with constant, just rework the comparison
3742 return fold_build2_loc (loc, code, compare_type,
3743 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3744 make_bit_field_ref (loc, linner,
3749 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3750 make_bit_field_ref (loc, rinner,
3756 /* Otherwise, we are handling the constant case. See if the constant is too
3757 big for the field. Warn and return a tree of for 0 (false) if so. We do
3758 this not only for its own sake, but to avoid having to test for this
3759 error case below. If we didn't, we might generate wrong code.
3761 For unsigned fields, the constant shifted right by the field length should
3762 be all zero. For signed fields, the high-order bits should agree with
3767 if (wi::lrshift (rhs, lbitsize) != 0)
3769 warning (0, "comparison is always %d due to width of bit-field",
3771 return constant_boolean_node (code == NE_EXPR, compare_type);
3776 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3777 if (tem != 0 && tem != -1)
3779 warning (0, "comparison is always %d due to width of bit-field",
3781 return constant_boolean_node (code == NE_EXPR, compare_type);
3785 /* Single-bit compares should always be against zero. */
3786 if (lbitsize == 1 && ! integer_zerop (rhs))
3788 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3789 rhs = build_int_cst (type, 0);
3792 /* Make a new bitfield reference, shift the constant over the
3793 appropriate number of bits and mask it with the computed mask
3794 (in case this was a signed field). If we changed it, make a new one. */
3795 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3797 rhs = const_binop (BIT_AND_EXPR,
3798 const_binop (LSHIFT_EXPR,
3799 fold_convert_loc (loc, unsigned_type, rhs),
3800 size_int (lbitpos)),
3803 lhs = build2_loc (loc, code, compare_type,
3804 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3808 /* Subroutine for fold_truth_andor_1: decode a field reference.
3810 If EXP is a comparison reference, we return the innermost reference.
3812 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3813 set to the starting bit number.
3815 If the innermost field can be completely contained in a mode-sized
3816 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3818 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3819 otherwise it is not changed.
3821 *PUNSIGNEDP is set to the signedness of the field.
3823 *PMASK is set to the mask used. This is either contained in a
3824 BIT_AND_EXPR or derived from the width of the field.
3826 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3828 Return 0 if this is not a component reference or is one that we can't
3829 do anything with. */
3832 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3833 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3834 int *punsignedp, int *pvolatilep,
3835 tree *pmask, tree *pand_mask)
3837 tree outer_type = 0;
3839 tree mask, inner, offset;
3841 unsigned int precision;
3843 /* All the optimizations using this function assume integer fields.
3844 There are problems with FP fields since the type_for_size call
3845 below can fail for, e.g., XFmode. */
3846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3849 /* We are interested in the bare arrangement of bits, so strip everything
3850 that doesn't affect the machine mode. However, record the type of the
3851 outermost expression if it may matter below. */
3852 if (CONVERT_EXPR_P (exp)
3853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3854 outer_type = TREE_TYPE (exp);
3857 if (TREE_CODE (exp) == BIT_AND_EXPR)
3859 and_mask = TREE_OPERAND (exp, 1);
3860 exp = TREE_OPERAND (exp, 0);
3861 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3862 if (TREE_CODE (and_mask) != INTEGER_CST)
3866 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3867 punsignedp, pvolatilep, false);
3868 if ((inner == exp && and_mask == 0)
3869 || *pbitsize < 0 || offset != 0
3870 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3873 /* If the number of bits in the reference is the same as the bitsize of
3874 the outer type, then the outer type gives the signedness. Otherwise
3875 (in case of a small bitfield) the signedness is unchanged. */
3876 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3877 *punsignedp = TYPE_UNSIGNED (outer_type);
3879 /* Compute the mask to access the bitfield. */
3880 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3881 precision = TYPE_PRECISION (unsigned_type);
3883 mask = build_int_cst_type (unsigned_type, -1);
3885 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3886 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3888 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3890 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3891 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3894 *pand_mask = and_mask;
3898 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3899 bit positions and MASK is SIGNED. */
3902 all_ones_mask_p (const_tree mask, unsigned int size)
3904 tree type = TREE_TYPE (mask);
3905 unsigned int precision = TYPE_PRECISION (type);
3907 /* If this function returns true when the type of the mask is
3908 UNSIGNED, then there will be errors. In particular see
3909 gcc.c-torture/execute/990326-1.c. There does not appear to be
3910 any documentation paper trail as to why this is so. But the pre
3911 wide-int worked with that restriction and it has been preserved
3913 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3916 return wi::mask (size, false, precision) == mask;
3919 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3920 represents the sign bit of EXP's type. If EXP represents a sign
3921 or zero extension, also test VAL against the unextended type.
3922 The return value is the (sub)expression whose sign bit is VAL,
3923 or NULL_TREE otherwise. */
3926 sign_bit_p (tree exp, const_tree val)
3931 /* Tree EXP must have an integral type. */
3932 t = TREE_TYPE (exp);
3933 if (! INTEGRAL_TYPE_P (t))
3936 /* Tree VAL must be an integer constant. */
3937 if (TREE_CODE (val) != INTEGER_CST
3938 || TREE_OVERFLOW (val))
3941 width = TYPE_PRECISION (t);
3942 if (wi::only_sign_bit_p (val, width))
3945 /* Handle extension from a narrower type. */
3946 if (TREE_CODE (exp) == NOP_EXPR
3947 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3948 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3953 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3954 to be evaluated unconditionally. */
3957 simple_operand_p (const_tree exp)
3959 /* Strip any conversions that don't change the machine mode. */
3962 return (CONSTANT_CLASS_P (exp)
3963 || TREE_CODE (exp) == SSA_NAME
3965 && ! TREE_ADDRESSABLE (exp)
3966 && ! TREE_THIS_VOLATILE (exp)
3967 && ! DECL_NONLOCAL (exp)
3968 /* Don't regard global variables as simple. They may be
3969 allocated in ways unknown to the compiler (shared memory,
3970 #pragma weak, etc). */
3971 && ! TREE_PUBLIC (exp)
3972 && ! DECL_EXTERNAL (exp)
3973 /* Weakrefs are not safe to be read, since they can be NULL.
3974 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3975 have DECL_WEAK flag set. */
3976 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3977 /* Loading a static variable is unduly expensive, but global
3978 registers aren't expensive. */
3979 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3982 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3983 to be evaluated unconditionally.
3984 I addition to simple_operand_p, we assume that comparisons, conversions,
3985 and logic-not operations are simple, if their operands are simple, too. */
3988 simple_operand_p_2 (tree exp)
3990 enum tree_code code;
3992 if (TREE_SIDE_EFFECTS (exp)
3993 || tree_could_trap_p (exp))
3996 while (CONVERT_EXPR_P (exp))
3997 exp = TREE_OPERAND (exp, 0);
3999 code = TREE_CODE (exp);
4001 if (TREE_CODE_CLASS (code) == tcc_comparison)
4002 return (simple_operand_p (TREE_OPERAND (exp, 0))
4003 && simple_operand_p (TREE_OPERAND (exp, 1)));
4005 if (code == TRUTH_NOT_EXPR)
4006 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4008 return simple_operand_p (exp);
4012 /* The following functions are subroutines to fold_range_test and allow it to
4013 try to change a logical combination of comparisons into a range test.
4016 X == 2 || X == 3 || X == 4 || X == 5
4020 (unsigned) (X - 2) <= 3
4022 We describe each set of comparisons as being either inside or outside
4023 a range, using a variable named like IN_P, and then describe the
4024 range with a lower and upper bound. If one of the bounds is omitted,
4025 it represents either the highest or lowest value of the type.
4027 In the comments below, we represent a range by two numbers in brackets
4028 preceded by a "+" to designate being inside that range, or a "-" to
4029 designate being outside that range, so the condition can be inverted by
4030 flipping the prefix. An omitted bound is represented by a "-". For
4031 example, "- [-, 10]" means being outside the range starting at the lowest
4032 possible value and ending at 10, in other words, being greater than 10.
4033 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4036 We set up things so that the missing bounds are handled in a consistent
4037 manner so neither a missing bound nor "true" and "false" need to be
4038 handled using a special case. */
4040 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4041 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4042 and UPPER1_P are nonzero if the respective argument is an upper bound
4043 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4044 must be specified for a comparison. ARG1 will be converted to ARG0's
4045 type if both are specified. */
4048 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4049 tree arg1, int upper1_p)
4055 /* If neither arg represents infinity, do the normal operation.
4056 Else, if not a comparison, return infinity. Else handle the special
4057 comparison rules. Note that most of the cases below won't occur, but
4058 are handled for consistency. */
4060 if (arg0 != 0 && arg1 != 0)
4062 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4063 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4065 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4068 if (TREE_CODE_CLASS (code) != tcc_comparison)
4071 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4072 for neither. In real maths, we cannot assume open ended ranges are
4073 the same. But, this is computer arithmetic, where numbers are finite.
4074 We can therefore make the transformation of any unbounded range with
4075 the value Z, Z being greater than any representable number. This permits
4076 us to treat unbounded ranges as equal. */
4077 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4078 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4082 result = sgn0 == sgn1;
4085 result = sgn0 != sgn1;
4088 result = sgn0 < sgn1;
4091 result = sgn0 <= sgn1;
4094 result = sgn0 > sgn1;
4097 result = sgn0 >= sgn1;
4103 return constant_boolean_node (result, type);
4106 /* Helper routine for make_range. Perform one step for it, return
4107 new expression if the loop should continue or NULL_TREE if it should
4111 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4112 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4113 bool *strict_overflow_p)
4115 tree arg0_type = TREE_TYPE (arg0);
4116 tree n_low, n_high, low = *p_low, high = *p_high;
4117 int in_p = *p_in_p, n_in_p;
4121 case TRUTH_NOT_EXPR:
4122 /* We can only do something if the range is testing for zero. */
4123 if (low == NULL_TREE || high == NULL_TREE
4124 || ! integer_zerop (low) || ! integer_zerop (high))
4129 case EQ_EXPR: case NE_EXPR:
4130 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4131 /* We can only do something if the range is testing for zero
4132 and if the second operand is an integer constant. Note that
4133 saying something is "in" the range we make is done by
4134 complementing IN_P since it will set in the initial case of
4135 being not equal to zero; "out" is leaving it alone. */
4136 if (low == NULL_TREE || high == NULL_TREE
4137 || ! integer_zerop (low) || ! integer_zerop (high)
4138 || TREE_CODE (arg1) != INTEGER_CST)
4143 case NE_EXPR: /* - [c, c] */
4146 case EQ_EXPR: /* + [c, c] */
4147 in_p = ! in_p, low = high = arg1;
4149 case GT_EXPR: /* - [-, c] */
4150 low = 0, high = arg1;
4152 case GE_EXPR: /* + [c, -] */
4153 in_p = ! in_p, low = arg1, high = 0;
4155 case LT_EXPR: /* - [c, -] */
4156 low = arg1, high = 0;
4158 case LE_EXPR: /* + [-, c] */
4159 in_p = ! in_p, low = 0, high = arg1;
4165 /* If this is an unsigned comparison, we also know that EXP is
4166 greater than or equal to zero. We base the range tests we make
4167 on that fact, so we record it here so we can parse existing
4168 range tests. We test arg0_type since often the return type
4169 of, e.g. EQ_EXPR, is boolean. */
4170 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4174 build_int_cst (arg0_type, 0),
4178 in_p = n_in_p, low = n_low, high = n_high;
4180 /* If the high bound is missing, but we have a nonzero low
4181 bound, reverse the range so it goes from zero to the low bound
4183 if (high == 0 && low && ! integer_zerop (low))
4186 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4187 build_int_cst (TREE_TYPE (low), 1), 0);
4188 low = build_int_cst (arg0_type, 0);
4198 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4199 low and high are non-NULL, then normalize will DTRT. */
4200 if (!TYPE_UNSIGNED (arg0_type)
4201 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4203 if (low == NULL_TREE)
4204 low = TYPE_MIN_VALUE (arg0_type);
4205 if (high == NULL_TREE)
4206 high = TYPE_MAX_VALUE (arg0_type);
4209 /* (-x) IN [a,b] -> x in [-b, -a] */
4210 n_low = range_binop (MINUS_EXPR, exp_type,
4211 build_int_cst (exp_type, 0),
4213 n_high = range_binop (MINUS_EXPR, exp_type,
4214 build_int_cst (exp_type, 0),
4216 if (n_high != 0 && TREE_OVERFLOW (n_high))
4222 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4223 build_int_cst (exp_type, 1));
4227 if (TREE_CODE (arg1) != INTEGER_CST)
4230 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4231 move a constant to the other side. */
4232 if (!TYPE_UNSIGNED (arg0_type)
4233 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4236 /* If EXP is signed, any overflow in the computation is undefined,
4237 so we don't worry about it so long as our computations on
4238 the bounds don't overflow. For unsigned, overflow is defined
4239 and this is exactly the right thing. */
4240 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4241 arg0_type, low, 0, arg1, 0);
4242 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4243 arg0_type, high, 1, arg1, 0);
4244 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4245 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4248 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4249 *strict_overflow_p = true;
4252 /* Check for an unsigned range which has wrapped around the maximum
4253 value thus making n_high < n_low, and normalize it. */
4254 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4256 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4257 build_int_cst (TREE_TYPE (n_high), 1), 0);
4258 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4259 build_int_cst (TREE_TYPE (n_low), 1), 0);
4261 /* If the range is of the form +/- [ x+1, x ], we won't
4262 be able to normalize it. But then, it represents the
4263 whole range or the empty set, so make it
4265 if (tree_int_cst_equal (n_low, low)
4266 && tree_int_cst_equal (n_high, high))
4272 low = n_low, high = n_high;
4280 case NON_LVALUE_EXPR:
4281 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4284 if (! INTEGRAL_TYPE_P (arg0_type)
4285 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4286 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4289 n_low = low, n_high = high;
4292 n_low = fold_convert_loc (loc, arg0_type, n_low);
4295 n_high = fold_convert_loc (loc, arg0_type, n_high);
4297 /* If we're converting arg0 from an unsigned type, to exp,
4298 a signed type, we will be doing the comparison as unsigned.
4299 The tests above have already verified that LOW and HIGH
4302 So we have to ensure that we will handle large unsigned
4303 values the same way that the current signed bounds treat
4306 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4310 /* For fixed-point modes, we need to pass the saturating flag
4311 as the 2nd parameter. */
4312 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4314 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4315 TYPE_SATURATING (arg0_type));
4318 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4320 /* A range without an upper bound is, naturally, unbounded.
4321 Since convert would have cropped a very large value, use
4322 the max value for the destination type. */
4324 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4325 : TYPE_MAX_VALUE (arg0_type);
4327 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4328 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4329 fold_convert_loc (loc, arg0_type,
4331 build_int_cst (arg0_type, 1));
4333 /* If the low bound is specified, "and" the range with the
4334 range for which the original unsigned value will be
4338 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4339 1, fold_convert_loc (loc, arg0_type,
4344 in_p = (n_in_p == in_p);
4348 /* Otherwise, "or" the range with the range of the input
4349 that will be interpreted as negative. */
4350 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4351 1, fold_convert_loc (loc, arg0_type,
4356 in_p = (in_p != n_in_p);
4370 /* Given EXP, a logical expression, set the range it is testing into
4371 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4372 actually being tested. *PLOW and *PHIGH will be made of the same
4373 type as the returned expression. If EXP is not a comparison, we
4374 will most likely not be returning a useful value and range. Set
4375 *STRICT_OVERFLOW_P to true if the return value is only valid
4376 because signed overflow is undefined; otherwise, do not change
4377 *STRICT_OVERFLOW_P. */
4380 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4381 bool *strict_overflow_p)
4383 enum tree_code code;
4384 tree arg0, arg1 = NULL_TREE;
4385 tree exp_type, nexp;
4388 location_t loc = EXPR_LOCATION (exp);
4390 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4391 and see if we can refine the range. Some of the cases below may not
4392 happen, but it doesn't seem worth worrying about this. We "continue"
4393 the outer loop when we've changed something; otherwise we "break"
4394 the switch, which will "break" the while. */
4397 low = high = build_int_cst (TREE_TYPE (exp), 0);
4401 code = TREE_CODE (exp);
4402 exp_type = TREE_TYPE (exp);
4405 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4407 if (TREE_OPERAND_LENGTH (exp) > 0)
4408 arg0 = TREE_OPERAND (exp, 0);
4409 if (TREE_CODE_CLASS (code) == tcc_binary
4410 || TREE_CODE_CLASS (code) == tcc_comparison
4411 || (TREE_CODE_CLASS (code) == tcc_expression
4412 && TREE_OPERAND_LENGTH (exp) > 1))
4413 arg1 = TREE_OPERAND (exp, 1);
4415 if (arg0 == NULL_TREE)
4418 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4419 &high, &in_p, strict_overflow_p);
4420 if (nexp == NULL_TREE)
4425 /* If EXP is a constant, we can evaluate whether this is true or false. */
4426 if (TREE_CODE (exp) == INTEGER_CST)
4428 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4430 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4436 *pin_p = in_p, *plow = low, *phigh = high;
4440 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4441 type, TYPE, return an expression to test if EXP is in (or out of, depending
4442 on IN_P) the range. Return 0 if the test couldn't be created. */
4445 build_range_check (location_t loc, tree type, tree exp, int in_p,
4446 tree low, tree high)
4448 tree etype = TREE_TYPE (exp), value;
4450 /* Disable this optimization for function pointer expressions
4451 on targets that require function pointer canonicalization. */
4452 if (targetm.have_canonicalize_funcptr_for_compare ()
4453 && TREE_CODE (etype) == POINTER_TYPE
4454 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4459 value = build_range_check (loc, type, exp, 1, low, high);
4461 return invert_truthvalue_loc (loc, value);
4466 if (low == 0 && high == 0)
4467 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4470 return fold_build2_loc (loc, LE_EXPR, type, exp,
4471 fold_convert_loc (loc, etype, high));
4474 return fold_build2_loc (loc, GE_EXPR, type, exp,
4475 fold_convert_loc (loc, etype, low));
4477 if (operand_equal_p (low, high, 0))
4478 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4479 fold_convert_loc (loc, etype, low));
4481 if (integer_zerop (low))
4483 if (! TYPE_UNSIGNED (etype))
4485 etype = unsigned_type_for (etype);
4486 high = fold_convert_loc (loc, etype, high);
4487 exp = fold_convert_loc (loc, etype, exp);
4489 return build_range_check (loc, type, exp, 1, 0, high);
4492 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4493 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4495 int prec = TYPE_PRECISION (etype);
4497 if (wi::mask (prec - 1, false, prec) == high)
4499 if (TYPE_UNSIGNED (etype))
4501 tree signed_etype = signed_type_for (etype);
4502 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4504 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4506 etype = signed_etype;
4507 exp = fold_convert_loc (loc, etype, exp);
4509 return fold_build2_loc (loc, GT_EXPR, type, exp,
4510 build_int_cst (etype, 0));
4514 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4515 This requires wrap-around arithmetics for the type of the expression.
4516 First make sure that arithmetics in this type is valid, then make sure
4517 that it wraps around. */
4518 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4519 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4520 TYPE_UNSIGNED (etype));
4522 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4524 tree utype, minv, maxv;
4526 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4527 for the type in question, as we rely on this here. */
4528 utype = unsigned_type_for (etype);
4529 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4530 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4531 build_int_cst (TREE_TYPE (maxv), 1), 1);
4532 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4534 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4541 high = fold_convert_loc (loc, etype, high);
4542 low = fold_convert_loc (loc, etype, low);
4543 exp = fold_convert_loc (loc, etype, exp);
4545 value = const_binop (MINUS_EXPR, high, low);
4548 if (POINTER_TYPE_P (etype))
4550 if (value != 0 && !TREE_OVERFLOW (value))
4552 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4553 return build_range_check (loc, type,
4554 fold_build_pointer_plus_loc (loc, exp, low),
4555 1, build_int_cst (etype, 0), value);
4560 if (value != 0 && !TREE_OVERFLOW (value))
4561 return build_range_check (loc, type,
4562 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4563 1, build_int_cst (etype, 0), value);
4568 /* Return the predecessor of VAL in its type, handling the infinite case. */
4571 range_predecessor (tree val)
4573 tree type = TREE_TYPE (val);
4575 if (INTEGRAL_TYPE_P (type)
4576 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4579 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4580 build_int_cst (TREE_TYPE (val), 1), 0);
4583 /* Return the successor of VAL in its type, handling the infinite case. */
4586 range_successor (tree val)
4588 tree type = TREE_TYPE (val);
4590 if (INTEGRAL_TYPE_P (type)
4591 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4594 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4595 build_int_cst (TREE_TYPE (val), 1), 0);
4598 /* Given two ranges, see if we can merge them into one. Return 1 if we
4599 can, 0 if we can't. Set the output range into the specified parameters. */
4602 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4603 tree high0, int in1_p, tree low1, tree high1)
4611 int lowequal = ((low0 == 0 && low1 == 0)
4612 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4613 low0, 0, low1, 0)));
4614 int highequal = ((high0 == 0 && high1 == 0)
4615 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4616 high0, 1, high1, 1)));
4618 /* Make range 0 be the range that starts first, or ends last if they
4619 start at the same value. Swap them if it isn't. */
4620 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4623 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4624 high1, 1, high0, 1))))
4626 temp = in0_p, in0_p = in1_p, in1_p = temp;
4627 tem = low0, low0 = low1, low1 = tem;
4628 tem = high0, high0 = high1, high1 = tem;
4631 /* Now flag two cases, whether the ranges are disjoint or whether the
4632 second range is totally subsumed in the first. Note that the tests
4633 below are simplified by the ones above. */
4634 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4635 high0, 1, low1, 0));
4636 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4637 high1, 1, high0, 1));
4639 /* We now have four cases, depending on whether we are including or
4640 excluding the two ranges. */
4643 /* If they don't overlap, the result is false. If the second range
4644 is a subset it is the result. Otherwise, the range is from the start
4645 of the second to the end of the first. */
4647 in_p = 0, low = high = 0;
4649 in_p = 1, low = low1, high = high1;
4651 in_p = 1, low = low1, high = high0;
4654 else if (in0_p && ! in1_p)
4656 /* If they don't overlap, the result is the first range. If they are
4657 equal, the result is false. If the second range is a subset of the
4658 first, and the ranges begin at the same place, we go from just after
4659 the end of the second range to the end of the first. If the second
4660 range is not a subset of the first, or if it is a subset and both
4661 ranges end at the same place, the range starts at the start of the
4662 first range and ends just before the second range.
4663 Otherwise, we can't describe this as a single range. */
4665 in_p = 1, low = low0, high = high0;
4666 else if (lowequal && highequal)
4667 in_p = 0, low = high = 0;
4668 else if (subset && lowequal)
4670 low = range_successor (high1);
4675 /* We are in the weird situation where high0 > high1 but
4676 high1 has no successor. Punt. */
4680 else if (! subset || highequal)
4683 high = range_predecessor (low1);
4687 /* low0 < low1 but low1 has no predecessor. Punt. */
4695 else if (! in0_p && in1_p)
4697 /* If they don't overlap, the result is the second range. If the second
4698 is a subset of the first, the result is false. Otherwise,
4699 the range starts just after the first range and ends at the
4700 end of the second. */
4702 in_p = 1, low = low1, high = high1;
4703 else if (subset || highequal)
4704 in_p = 0, low = high = 0;
4707 low = range_successor (high0);
4712 /* high1 > high0 but high0 has no successor. Punt. */
4720 /* The case where we are excluding both ranges. Here the complex case
4721 is if they don't overlap. In that case, the only time we have a
4722 range is if they are adjacent. If the second is a subset of the
4723 first, the result is the first. Otherwise, the range to exclude
4724 starts at the beginning of the first range and ends at the end of the
4728 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4729 range_successor (high0),
4731 in_p = 0, low = low0, high = high1;
4734 /* Canonicalize - [min, x] into - [-, x]. */
4735 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4736 switch (TREE_CODE (TREE_TYPE (low0)))
4739 if (TYPE_PRECISION (TREE_TYPE (low0))
4740 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4744 if (tree_int_cst_equal (low0,
4745 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4749 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4750 && integer_zerop (low0))
4757 /* Canonicalize - [x, max] into - [x, -]. */
4758 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4759 switch (TREE_CODE (TREE_TYPE (high1)))
4762 if (TYPE_PRECISION (TREE_TYPE (high1))
4763 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4767 if (tree_int_cst_equal (high1,
4768 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4772 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4773 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4775 build_int_cst (TREE_TYPE (high1), 1),
4783 /* The ranges might be also adjacent between the maximum and
4784 minimum values of the given type. For
4785 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4786 return + [x + 1, y - 1]. */
4787 if (low0 == 0 && high1 == 0)
4789 low = range_successor (high0);
4790 high = range_predecessor (low1);
4791 if (low == 0 || high == 0)
4801 in_p = 0, low = low0, high = high0;
4803 in_p = 0, low = low0, high = high1;
4806 *pin_p = in_p, *plow = low, *phigh = high;
4811 /* Subroutine of fold, looking inside expressions of the form
4812 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4813 of the COND_EXPR. This function is being used also to optimize
4814 A op B ? C : A, by reversing the comparison first.
4816 Return a folded expression whose code is not a COND_EXPR
4817 anymore, or NULL_TREE if no folding opportunity is found. */
4820 fold_cond_expr_with_comparison (location_t loc, tree type,
4821 tree arg0, tree arg1, tree arg2)
4823 enum tree_code comp_code = TREE_CODE (arg0);
4824 tree arg00 = TREE_OPERAND (arg0, 0);
4825 tree arg01 = TREE_OPERAND (arg0, 1);
4826 tree arg1_type = TREE_TYPE (arg1);
4832 /* If we have A op 0 ? A : -A, consider applying the following
4835 A == 0? A : -A same as -A
4836 A != 0? A : -A same as A
4837 A >= 0? A : -A same as abs (A)
4838 A > 0? A : -A same as abs (A)
4839 A <= 0? A : -A same as -abs (A)
4840 A < 0? A : -A same as -abs (A)
4842 None of these transformations work for modes with signed
4843 zeros. If A is +/-0, the first two transformations will
4844 change the sign of the result (from +0 to -0, or vice
4845 versa). The last four will fix the sign of the result,
4846 even though the original expressions could be positive or
4847 negative, depending on the sign of A.
4849 Note that all these transformations are correct if A is
4850 NaN, since the two alternatives (A and -A) are also NaNs. */
4851 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4852 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4853 ? real_zerop (arg01)
4854 : integer_zerop (arg01))
4855 && ((TREE_CODE (arg2) == NEGATE_EXPR
4856 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4857 /* In the case that A is of the form X-Y, '-A' (arg2) may
4858 have already been folded to Y-X, check for that. */
4859 || (TREE_CODE (arg1) == MINUS_EXPR
4860 && TREE_CODE (arg2) == MINUS_EXPR
4861 && operand_equal_p (TREE_OPERAND (arg1, 0),
4862 TREE_OPERAND (arg2, 1), 0)
4863 && operand_equal_p (TREE_OPERAND (arg1, 1),
4864 TREE_OPERAND (arg2, 0), 0))))
4869 tem = fold_convert_loc (loc, arg1_type, arg1);
4870 return pedantic_non_lvalue_loc (loc,
4871 fold_convert_loc (loc, type,
4872 negate_expr (tem)));
4875 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4878 if (flag_trapping_math)
4883 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4884 arg1 = fold_convert_loc (loc, signed_type_for
4885 (TREE_TYPE (arg1)), arg1);
4886 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4887 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4890 if (flag_trapping_math)
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert_loc (loc, signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return negate_expr (fold_convert_loc (loc, type, tem));
4900 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4904 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4905 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4906 both transformations are correct when A is NaN: A != 0
4907 is then true, and A == 0 is false. */
4909 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4910 && integer_zerop (arg01) && integer_zerop (arg2))
4912 if (comp_code == NE_EXPR)
4913 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4914 else if (comp_code == EQ_EXPR)
4915 return build_zero_cst (type);
4918 /* Try some transformations of A op B ? A : B.
4920 A == B? A : B same as B
4921 A != B? A : B same as A
4922 A >= B? A : B same as max (A, B)
4923 A > B? A : B same as max (B, A)
4924 A <= B? A : B same as min (A, B)
4925 A < B? A : B same as min (B, A)
4927 As above, these transformations don't work in the presence
4928 of signed zeros. For example, if A and B are zeros of
4929 opposite sign, the first two transformations will change
4930 the sign of the result. In the last four, the original
4931 expressions give different results for (A=+0, B=-0) and
4932 (A=-0, B=+0), but the transformed expressions do not.
4934 The first two transformations are correct if either A or B
4935 is a NaN. In the first transformation, the condition will
4936 be false, and B will indeed be chosen. In the case of the
4937 second transformation, the condition A != B will be true,
4938 and A will be chosen.
4940 The conversions to max() and min() are not correct if B is
4941 a number and A is not. The conditions in the original
4942 expressions will be false, so all four give B. The min()
4943 and max() versions would give a NaN instead. */
4944 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4945 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4946 /* Avoid these transformations if the COND_EXPR may be used
4947 as an lvalue in the C++ front-end. PR c++/19199. */
4949 || VECTOR_TYPE_P (type)
4950 || (! lang_GNU_CXX ()
4951 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4952 || ! maybe_lvalue_p (arg1)
4953 || ! maybe_lvalue_p (arg2)))
4955 tree comp_op0 = arg00;
4956 tree comp_op1 = arg01;
4957 tree comp_type = TREE_TYPE (comp_op0);
4959 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4960 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4970 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4972 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4977 /* In C++ a ?: expression can be an lvalue, so put the
4978 operand which will be used if they are equal first
4979 so that we can convert this back to the
4980 corresponding COND_EXPR. */
4981 if (!HONOR_NANS (arg1))
4983 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4984 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4985 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4986 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4987 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4988 comp_op1, comp_op0);
4989 return pedantic_non_lvalue_loc (loc,
4990 fold_convert_loc (loc, type, tem));
4997 if (!HONOR_NANS (arg1))
4999 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5000 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5001 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5002 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5003 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5004 comp_op1, comp_op0);
5005 return pedantic_non_lvalue_loc (loc,
5006 fold_convert_loc (loc, type, tem));
5010 if (!HONOR_NANS (arg1))
5011 return pedantic_non_lvalue_loc (loc,
5012 fold_convert_loc (loc, type, arg2));
5015 if (!HONOR_NANS (arg1))
5016 return pedantic_non_lvalue_loc (loc,
5017 fold_convert_loc (loc, type, arg1));
5020 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5025 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5026 we might still be able to simplify this. For example,
5027 if C1 is one less or one more than C2, this might have started
5028 out as a MIN or MAX and been transformed by this function.
5029 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5031 if (INTEGRAL_TYPE_P (type)
5032 && TREE_CODE (arg01) == INTEGER_CST
5033 && TREE_CODE (arg2) == INTEGER_CST)
5037 if (TREE_CODE (arg1) == INTEGER_CST)
5039 /* We can replace A with C1 in this case. */
5040 arg1 = fold_convert_loc (loc, type, arg01);
5041 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5044 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5045 MIN_EXPR, to preserve the signedness of the comparison. */
5046 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5048 && operand_equal_p (arg01,
5049 const_binop (PLUS_EXPR, arg2,
5050 build_int_cst (type, 1)),
5053 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5054 fold_convert_loc (loc, TREE_TYPE (arg00),
5056 return pedantic_non_lvalue_loc (loc,
5057 fold_convert_loc (loc, type, tem));
5062 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5064 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5066 && operand_equal_p (arg01,
5067 const_binop (MINUS_EXPR, arg2,
5068 build_int_cst (type, 1)),
5071 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5072 fold_convert_loc (loc, TREE_TYPE (arg00),
5074 return pedantic_non_lvalue_loc (loc,
5075 fold_convert_loc (loc, type, tem));
5080 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5081 MAX_EXPR, to preserve the signedness of the comparison. */
5082 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5084 && operand_equal_p (arg01,
5085 const_binop (MINUS_EXPR, arg2,
5086 build_int_cst (type, 1)),
5089 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5090 fold_convert_loc (loc, TREE_TYPE (arg00),
5092 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5097 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5098 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5100 && operand_equal_p (arg01,
5101 const_binop (PLUS_EXPR, arg2,
5102 build_int_cst (type, 1)),
5105 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5106 fold_convert_loc (loc, TREE_TYPE (arg00),
5108 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5122 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5123 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5124 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5128 /* EXP is some logical combination of boolean tests. See if we can
5129 merge it into some range test. Return the new tree if so. */
5132 fold_range_test (location_t loc, enum tree_code code, tree type,
5135 int or_op = (code == TRUTH_ORIF_EXPR
5136 || code == TRUTH_OR_EXPR);
5137 int in0_p, in1_p, in_p;
5138 tree low0, low1, low, high0, high1, high;
5139 bool strict_overflow_p = false;
5141 const char * const warnmsg = G_("assuming signed overflow does not occur "
5142 "when simplifying range test");
5144 if (!INTEGRAL_TYPE_P (type))
5147 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5148 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5150 /* If this is an OR operation, invert both sides; we will invert
5151 again at the end. */
5153 in0_p = ! in0_p, in1_p = ! in1_p;
5155 /* If both expressions are the same, if we can merge the ranges, and we
5156 can build the range test, return it or it inverted. If one of the
5157 ranges is always true or always false, consider it to be the same
5158 expression as the other. */
5159 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5160 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5162 && 0 != (tem = (build_range_check (loc, type,
5164 : rhs != 0 ? rhs : integer_zero_node,
5167 if (strict_overflow_p)
5168 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5169 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5172 /* On machines where the branch cost is expensive, if this is a
5173 short-circuited branch and the underlying object on both sides
5174 is the same, make a non-short-circuit operation. */
5175 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5176 && lhs != 0 && rhs != 0
5177 && (code == TRUTH_ANDIF_EXPR
5178 || code == TRUTH_ORIF_EXPR)
5179 && operand_equal_p (lhs, rhs, 0))
5181 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5182 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5183 which cases we can't do this. */
5184 if (simple_operand_p (lhs))
5185 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5186 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5189 else if (!lang_hooks.decls.global_bindings_p ()
5190 && !CONTAINS_PLACEHOLDER_P (lhs))
5192 tree common = save_expr (lhs);
5194 if (0 != (lhs = build_range_check (loc, type, common,
5195 or_op ? ! in0_p : in0_p,
5197 && (0 != (rhs = build_range_check (loc, type, common,
5198 or_op ? ! in1_p : in1_p,
5201 if (strict_overflow_p)
5202 fold_overflow_warning (warnmsg,
5203 WARN_STRICT_OVERFLOW_COMPARISON);
5204 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5205 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5214 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5215 bit value. Arrange things so the extra bits will be set to zero if and
5216 only if C is signed-extended to its full width. If MASK is nonzero,
5217 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5220 unextend (tree c, int p, int unsignedp, tree mask)
5222 tree type = TREE_TYPE (c);
5223 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5226 if (p == modesize || unsignedp)
5229 /* We work by getting just the sign bit into the low-order bit, then
5230 into the high-order bit, then sign-extend. We then XOR that value
5232 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5234 /* We must use a signed type in order to get an arithmetic right shift.
5235 However, we must also avoid introducing accidental overflows, so that
5236 a subsequent call to integer_zerop will work. Hence we must
5237 do the type conversion here. At this point, the constant is either
5238 zero or one, and the conversion to a signed type can never overflow.
5239 We could get an overflow if this conversion is done anywhere else. */
5240 if (TYPE_UNSIGNED (type))
5241 temp = fold_convert (signed_type_for (type), temp);
5243 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5244 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5246 temp = const_binop (BIT_AND_EXPR, temp,
5247 fold_convert (TREE_TYPE (c), mask));
5248 /* If necessary, convert the type back to match the type of C. */
5249 if (TYPE_UNSIGNED (type))
5250 temp = fold_convert (type, temp);
5252 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5255 /* For an expression that has the form
5259 we can drop one of the inner expressions and simplify to
5263 LOC is the location of the resulting expression. OP is the inner
5264 logical operation; the left-hand side in the examples above, while CMPOP
5265 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5266 removing a condition that guards another, as in
5267 (A != NULL && A->...) || A == NULL
5268 which we must not transform. If RHS_ONLY is true, only eliminate the
5269 right-most operand of the inner logical operation. */
5272 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5275 tree type = TREE_TYPE (cmpop);
5276 enum tree_code code = TREE_CODE (cmpop);
5277 enum tree_code truthop_code = TREE_CODE (op);
5278 tree lhs = TREE_OPERAND (op, 0);
5279 tree rhs = TREE_OPERAND (op, 1);
5280 tree orig_lhs = lhs, orig_rhs = rhs;
5281 enum tree_code rhs_code = TREE_CODE (rhs);
5282 enum tree_code lhs_code = TREE_CODE (lhs);
5283 enum tree_code inv_code;
5285 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5288 if (TREE_CODE_CLASS (code) != tcc_comparison)
5291 if (rhs_code == truthop_code)
5293 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5294 if (newrhs != NULL_TREE)
5297 rhs_code = TREE_CODE (rhs);
5300 if (lhs_code == truthop_code && !rhs_only)
5302 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5303 if (newlhs != NULL_TREE)
5306 lhs_code = TREE_CODE (lhs);
5310 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5311 if (inv_code == rhs_code
5312 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5313 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5315 if (!rhs_only && inv_code == lhs_code
5316 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5317 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5319 if (rhs != orig_rhs || lhs != orig_lhs)
5320 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5325 /* Find ways of folding logical expressions of LHS and RHS:
5326 Try to merge two comparisons to the same innermost item.
5327 Look for range tests like "ch >= '0' && ch <= '9'".
5328 Look for combinations of simple terms on machines with expensive branches
5329 and evaluate the RHS unconditionally.
5331 For example, if we have p->a == 2 && p->b == 4 and we can make an
5332 object large enough to span both A and B, we can do this with a comparison
5333 against the object ANDed with the a mask.
5335 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5336 operations to do this with one comparison.
5338 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5339 function and the one above.
5341 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5342 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5344 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5347 We return the simplified tree or 0 if no optimization is possible. */
5350 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5353 /* If this is the "or" of two comparisons, we can do something if
5354 the comparisons are NE_EXPR. If this is the "and", we can do something
5355 if the comparisons are EQ_EXPR. I.e.,
5356 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5358 WANTED_CODE is this operation code. For single bit fields, we can
5359 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5360 comparison for one-bit fields. */
5362 enum tree_code wanted_code;
5363 enum tree_code lcode, rcode;
5364 tree ll_arg, lr_arg, rl_arg, rr_arg;
5365 tree ll_inner, lr_inner, rl_inner, rr_inner;
5366 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5367 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5368 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5369 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5370 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5371 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5372 machine_mode lnmode, rnmode;
5373 tree ll_mask, lr_mask, rl_mask, rr_mask;
5374 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5375 tree l_const, r_const;
5376 tree lntype, rntype, result;
5377 HOST_WIDE_INT first_bit, end_bit;
5380 /* Start by getting the comparison codes. Fail if anything is volatile.
5381 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5382 it were surrounded with a NE_EXPR. */
5384 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5387 lcode = TREE_CODE (lhs);
5388 rcode = TREE_CODE (rhs);
5390 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5392 lhs = build2 (NE_EXPR, truth_type, lhs,
5393 build_int_cst (TREE_TYPE (lhs), 0));
5397 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5399 rhs = build2 (NE_EXPR, truth_type, rhs,
5400 build_int_cst (TREE_TYPE (rhs), 0));
5404 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5405 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5408 ll_arg = TREE_OPERAND (lhs, 0);
5409 lr_arg = TREE_OPERAND (lhs, 1);
5410 rl_arg = TREE_OPERAND (rhs, 0);
5411 rr_arg = TREE_OPERAND (rhs, 1);
5413 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5414 if (simple_operand_p (ll_arg)
5415 && simple_operand_p (lr_arg))
5417 if (operand_equal_p (ll_arg, rl_arg, 0)
5418 && operand_equal_p (lr_arg, rr_arg, 0))
5420 result = combine_comparisons (loc, code, lcode, rcode,
5421 truth_type, ll_arg, lr_arg);
5425 else if (operand_equal_p (ll_arg, rr_arg, 0)
5426 && operand_equal_p (lr_arg, rl_arg, 0))
5428 result = combine_comparisons (loc, code, lcode,
5429 swap_tree_comparison (rcode),
5430 truth_type, ll_arg, lr_arg);
5436 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5437 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5439 /* If the RHS can be evaluated unconditionally and its operands are
5440 simple, it wins to evaluate the RHS unconditionally on machines
5441 with expensive branches. In this case, this isn't a comparison
5442 that can be merged. */
5444 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5446 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5447 && simple_operand_p (rl_arg)
5448 && simple_operand_p (rr_arg))
5450 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5451 if (code == TRUTH_OR_EXPR
5452 && lcode == NE_EXPR && integer_zerop (lr_arg)
5453 && rcode == NE_EXPR && integer_zerop (rr_arg)
5454 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5455 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5456 return build2_loc (loc, NE_EXPR, truth_type,
5457 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5459 build_int_cst (TREE_TYPE (ll_arg), 0));
5461 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5462 if (code == TRUTH_AND_EXPR
5463 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5464 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5465 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5466 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5467 return build2_loc (loc, EQ_EXPR, truth_type,
5468 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5470 build_int_cst (TREE_TYPE (ll_arg), 0));
5473 /* See if the comparisons can be merged. Then get all the parameters for
5476 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5477 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5481 ll_inner = decode_field_reference (loc, ll_arg,
5482 &ll_bitsize, &ll_bitpos, &ll_mode,
5483 &ll_unsignedp, &volatilep, &ll_mask,
5485 lr_inner = decode_field_reference (loc, lr_arg,
5486 &lr_bitsize, &lr_bitpos, &lr_mode,
5487 &lr_unsignedp, &volatilep, &lr_mask,
5489 rl_inner = decode_field_reference (loc, rl_arg,
5490 &rl_bitsize, &rl_bitpos, &rl_mode,
5491 &rl_unsignedp, &volatilep, &rl_mask,
5493 rr_inner = decode_field_reference (loc, rr_arg,
5494 &rr_bitsize, &rr_bitpos, &rr_mode,
5495 &rr_unsignedp, &volatilep, &rr_mask,
5498 /* It must be true that the inner operation on the lhs of each
5499 comparison must be the same if we are to be able to do anything.
5500 Then see if we have constants. If not, the same must be true for
5502 if (volatilep || ll_inner == 0 || rl_inner == 0
5503 || ! operand_equal_p (ll_inner, rl_inner, 0))
5506 if (TREE_CODE (lr_arg) == INTEGER_CST
5507 && TREE_CODE (rr_arg) == INTEGER_CST)
5508 l_const = lr_arg, r_const = rr_arg;
5509 else if (lr_inner == 0 || rr_inner == 0
5510 || ! operand_equal_p (lr_inner, rr_inner, 0))
5513 l_const = r_const = 0;
5515 /* If either comparison code is not correct for our logical operation,
5516 fail. However, we can convert a one-bit comparison against zero into
5517 the opposite comparison against that bit being set in the field. */
5519 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5520 if (lcode != wanted_code)
5522 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5524 /* Make the left operand unsigned, since we are only interested
5525 in the value of one bit. Otherwise we are doing the wrong
5534 /* This is analogous to the code for l_const above. */
5535 if (rcode != wanted_code)
5537 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5546 /* See if we can find a mode that contains both fields being compared on
5547 the left. If we can't, fail. Otherwise, update all constants and masks
5548 to be relative to a field of that size. */
5549 first_bit = MIN (ll_bitpos, rl_bitpos);
5550 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5551 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5552 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5554 if (lnmode == VOIDmode)
5557 lnbitsize = GET_MODE_BITSIZE (lnmode);
5558 lnbitpos = first_bit & ~ (lnbitsize - 1);
5559 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5560 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5562 if (BYTES_BIG_ENDIAN)
5564 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5565 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5568 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5569 size_int (xll_bitpos));
5570 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5571 size_int (xrl_bitpos));
5575 l_const = fold_convert_loc (loc, lntype, l_const);
5576 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5577 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5578 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5579 fold_build1_loc (loc, BIT_NOT_EXPR,
5582 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5584 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5589 r_const = fold_convert_loc (loc, lntype, r_const);
5590 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5591 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5592 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5593 fold_build1_loc (loc, BIT_NOT_EXPR,
5596 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5598 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5602 /* If the right sides are not constant, do the same for it. Also,
5603 disallow this optimization if a size or signedness mismatch occurs
5604 between the left and right sides. */
5607 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5608 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5609 /* Make sure the two fields on the right
5610 correspond to the left without being swapped. */
5611 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5614 first_bit = MIN (lr_bitpos, rr_bitpos);
5615 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5616 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5617 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5619 if (rnmode == VOIDmode)
5622 rnbitsize = GET_MODE_BITSIZE (rnmode);
5623 rnbitpos = first_bit & ~ (rnbitsize - 1);
5624 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5625 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5627 if (BYTES_BIG_ENDIAN)
5629 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5630 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5633 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5635 size_int (xlr_bitpos));
5636 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5638 size_int (xrr_bitpos));
5640 /* Make a mask that corresponds to both fields being compared.
5641 Do this for both items being compared. If the operands are the
5642 same size and the bits being compared are in the same position
5643 then we can do this by masking both and comparing the masked
5645 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5646 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5647 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5649 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5650 ll_unsignedp || rl_unsignedp);
5651 if (! all_ones_mask_p (ll_mask, lnbitsize))
5652 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5654 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5655 lr_unsignedp || rr_unsignedp);
5656 if (! all_ones_mask_p (lr_mask, rnbitsize))
5657 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5659 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5662 /* There is still another way we can do something: If both pairs of
5663 fields being compared are adjacent, we may be able to make a wider
5664 field containing them both.
5666 Note that we still must mask the lhs/rhs expressions. Furthermore,
5667 the mask must be shifted to account for the shift done by
5668 make_bit_field_ref. */
5669 if ((ll_bitsize + ll_bitpos == rl_bitpos
5670 && lr_bitsize + lr_bitpos == rr_bitpos)
5671 || (ll_bitpos == rl_bitpos + rl_bitsize
5672 && lr_bitpos == rr_bitpos + rr_bitsize))
5676 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5677 ll_bitsize + rl_bitsize,
5678 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5679 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5680 lr_bitsize + rr_bitsize,
5681 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5683 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5684 size_int (MIN (xll_bitpos, xrl_bitpos)));
5685 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5686 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5688 /* Convert to the smaller type before masking out unwanted bits. */
5690 if (lntype != rntype)
5692 if (lnbitsize > rnbitsize)
5694 lhs = fold_convert_loc (loc, rntype, lhs);
5695 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5698 else if (lnbitsize < rnbitsize)
5700 rhs = fold_convert_loc (loc, lntype, rhs);
5701 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5706 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5707 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5709 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5710 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5712 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5718 /* Handle the case of comparisons with constants. If there is something in
5719 common between the masks, those bits of the constants must be the same.
5720 If not, the condition is always false. Test for this to avoid generating
5721 incorrect code below. */
5722 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5723 if (! integer_zerop (result)
5724 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5725 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5727 if (wanted_code == NE_EXPR)
5729 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5730 return constant_boolean_node (true, truth_type);
5734 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5735 return constant_boolean_node (false, truth_type);
5739 /* Construct the expression we will return. First get the component
5740 reference we will make. Unless the mask is all ones the width of
5741 that field, perform the mask operation. Then compare with the
5743 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5744 ll_unsignedp || rl_unsignedp);
5746 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5747 if (! all_ones_mask_p (ll_mask, lnbitsize))
5748 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5750 return build2_loc (loc, wanted_code, truth_type, result,
5751 const_binop (BIT_IOR_EXPR, l_const, r_const));
5754 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5758 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5762 enum tree_code op_code;
5765 int consts_equal, consts_lt;
5768 STRIP_SIGN_NOPS (arg0);
5770 op_code = TREE_CODE (arg0);
5771 minmax_const = TREE_OPERAND (arg0, 1);
5772 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5773 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5774 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5775 inner = TREE_OPERAND (arg0, 0);
5777 /* If something does not permit us to optimize, return the original tree. */
5778 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5779 || TREE_CODE (comp_const) != INTEGER_CST
5780 || TREE_OVERFLOW (comp_const)
5781 || TREE_CODE (minmax_const) != INTEGER_CST
5782 || TREE_OVERFLOW (minmax_const))
5785 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5786 and GT_EXPR, doing the rest with recursive calls using logical
5790 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5793 = optimize_minmax_comparison (loc,
5794 invert_tree_comparison (code, false),
5797 return invert_truthvalue_loc (loc, tem);
5803 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5804 optimize_minmax_comparison
5805 (loc, EQ_EXPR, type, arg0, comp_const),
5806 optimize_minmax_comparison
5807 (loc, GT_EXPR, type, arg0, comp_const));
5810 if (op_code == MAX_EXPR && consts_equal)
5811 /* MAX (X, 0) == 0 -> X <= 0 */
5812 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5814 else if (op_code == MAX_EXPR && consts_lt)
5815 /* MAX (X, 0) == 5 -> X == 5 */
5816 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5818 else if (op_code == MAX_EXPR)
5819 /* MAX (X, 0) == -1 -> false */
5820 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5822 else if (consts_equal)
5823 /* MIN (X, 0) == 0 -> X >= 0 */
5824 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5827 /* MIN (X, 0) == 5 -> false */
5828 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5831 /* MIN (X, 0) == -1 -> X == -1 */
5832 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5835 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5836 /* MAX (X, 0) > 0 -> X > 0
5837 MAX (X, 0) > 5 -> X > 5 */
5838 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5840 else if (op_code == MAX_EXPR)
5841 /* MAX (X, 0) > -1 -> true */
5842 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5844 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5845 /* MIN (X, 0) > 0 -> false
5846 MIN (X, 0) > 5 -> false */
5847 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5850 /* MIN (X, 0) > -1 -> X > -1 */
5851 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5858 /* T is an integer expression that is being multiplied, divided, or taken a
5859 modulus (CODE says which and what kind of divide or modulus) by a
5860 constant C. See if we can eliminate that operation by folding it with
5861 other operations already in T. WIDE_TYPE, if non-null, is a type that
5862 should be used for the computation if wider than our type.
5864 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5865 (X * 2) + (Y * 4). We must, however, be assured that either the original
5866 expression would not overflow or that overflow is undefined for the type
5867 in the language in question.
5869 If we return a non-null expression, it is an equivalent form of the
5870 original computation, but need not be in the original type.
5872 We set *STRICT_OVERFLOW_P to true if the return values depends on
5873 signed overflow being undefined. Otherwise we do not change
5874 *STRICT_OVERFLOW_P. */
5877 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5878 bool *strict_overflow_p)
5880 /* To avoid exponential search depth, refuse to allow recursion past
5881 three levels. Beyond that (1) it's highly unlikely that we'll find
5882 something interesting and (2) we've probably processed it before
5883 when we built the inner expression. */
5892 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5899 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5900 bool *strict_overflow_p)
5902 tree type = TREE_TYPE (t);
5903 enum tree_code tcode = TREE_CODE (t);
5904 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5905 > GET_MODE_SIZE (TYPE_MODE (type)))
5906 ? wide_type : type);
5908 int same_p = tcode == code;
5909 tree op0 = NULL_TREE, op1 = NULL_TREE;
5910 bool sub_strict_overflow_p;
5912 /* Don't deal with constants of zero here; they confuse the code below. */
5913 if (integer_zerop (c))
5916 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5917 op0 = TREE_OPERAND (t, 0);
5919 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5920 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5922 /* Note that we need not handle conditional operations here since fold
5923 already handles those cases. So just do arithmetic here. */
5927 /* For a constant, we can always simplify if we are a multiply
5928 or (for divide and modulus) if it is a multiple of our constant. */
5929 if (code == MULT_EXPR
5930 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5931 return const_binop (code, fold_convert (ctype, t),
5932 fold_convert (ctype, c));
5935 CASE_CONVERT: case NON_LVALUE_EXPR:
5936 /* If op0 is an expression ... */
5937 if ((COMPARISON_CLASS_P (op0)
5938 || UNARY_CLASS_P (op0)
5939 || BINARY_CLASS_P (op0)
5940 || VL_EXP_CLASS_P (op0)
5941 || EXPRESSION_CLASS_P (op0))
5942 /* ... and has wrapping overflow, and its type is smaller
5943 than ctype, then we cannot pass through as widening. */
5944 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5945 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5946 && (TYPE_PRECISION (ctype)
5947 > TYPE_PRECISION (TREE_TYPE (op0))))
5948 /* ... or this is a truncation (t is narrower than op0),
5949 then we cannot pass through this narrowing. */
5950 || (TYPE_PRECISION (type)
5951 < TYPE_PRECISION (TREE_TYPE (op0)))
5952 /* ... or signedness changes for division or modulus,
5953 then we cannot pass through this conversion. */
5954 || (code != MULT_EXPR
5955 && (TYPE_UNSIGNED (ctype)
5956 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5957 /* ... or has undefined overflow while the converted to
5958 type has not, we cannot do the operation in the inner type
5959 as that would introduce undefined overflow. */
5960 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5961 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5962 && !TYPE_OVERFLOW_UNDEFINED (type))))
5965 /* Pass the constant down and see if we can make a simplification. If
5966 we can, replace this expression with the inner simplification for
5967 possible later conversion to our or some other type. */
5968 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5969 && TREE_CODE (t2) == INTEGER_CST
5970 && !TREE_OVERFLOW (t2)
5971 && (0 != (t1 = extract_muldiv (op0, t2, code,
5973 ? ctype : NULL_TREE,
5974 strict_overflow_p))))
5979 /* If widening the type changes it from signed to unsigned, then we
5980 must avoid building ABS_EXPR itself as unsigned. */
5981 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5983 tree cstype = (*signed_type_for) (ctype);
5984 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5987 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5988 return fold_convert (ctype, t1);
5992 /* If the constant is negative, we cannot simplify this. */
5993 if (tree_int_cst_sgn (c) == -1)
5997 /* For division and modulus, type can't be unsigned, as e.g.
5998 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5999 For signed types, even with wrapping overflow, this is fine. */
6000 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6002 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6004 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6007 case MIN_EXPR: case MAX_EXPR:
6008 /* If widening the type changes the signedness, then we can't perform
6009 this optimization as that changes the result. */
6010 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6013 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6014 sub_strict_overflow_p = false;
6015 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6016 &sub_strict_overflow_p)) != 0
6017 && (t2 = extract_muldiv (op1, c, code, wide_type,
6018 &sub_strict_overflow_p)) != 0)
6020 if (tree_int_cst_sgn (c) < 0)
6021 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6022 if (sub_strict_overflow_p)
6023 *strict_overflow_p = true;
6024 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6025 fold_convert (ctype, t2));
6029 case LSHIFT_EXPR: case RSHIFT_EXPR:
6030 /* If the second operand is constant, this is a multiplication
6031 or floor division, by a power of two, so we can treat it that
6032 way unless the multiplier or divisor overflows. Signed
6033 left-shift overflow is implementation-defined rather than
6034 undefined in C90, so do not convert signed left shift into
6036 if (TREE_CODE (op1) == INTEGER_CST
6037 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6038 /* const_binop may not detect overflow correctly,
6039 so check for it explicitly here. */
6040 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6041 && 0 != (t1 = fold_convert (ctype,
6042 const_binop (LSHIFT_EXPR,
6045 && !TREE_OVERFLOW (t1))
6046 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6047 ? MULT_EXPR : FLOOR_DIV_EXPR,
6049 fold_convert (ctype, op0),
6051 c, code, wide_type, strict_overflow_p);
6054 case PLUS_EXPR: case MINUS_EXPR:
6055 /* See if we can eliminate the operation on both sides. If we can, we
6056 can return a new PLUS or MINUS. If we can't, the only remaining
6057 cases where we can do anything are if the second operand is a
6059 sub_strict_overflow_p = false;
6060 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6061 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6062 if (t1 != 0 && t2 != 0
6063 && (code == MULT_EXPR
6064 /* If not multiplication, we can only do this if both operands
6065 are divisible by c. */
6066 || (multiple_of_p (ctype, op0, c)
6067 && multiple_of_p (ctype, op1, c))))
6069 if (sub_strict_overflow_p)
6070 *strict_overflow_p = true;
6071 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6072 fold_convert (ctype, t2));
6075 /* If this was a subtraction, negate OP1 and set it to be an addition.
6076 This simplifies the logic below. */
6077 if (tcode == MINUS_EXPR)
6079 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6080 /* If OP1 was not easily negatable, the constant may be OP0. */
6081 if (TREE_CODE (op0) == INTEGER_CST)
6083 std::swap (op0, op1);
6088 if (TREE_CODE (op1) != INTEGER_CST)
6091 /* If either OP1 or C are negative, this optimization is not safe for
6092 some of the division and remainder types while for others we need
6093 to change the code. */
6094 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6096 if (code == CEIL_DIV_EXPR)
6097 code = FLOOR_DIV_EXPR;
6098 else if (code == FLOOR_DIV_EXPR)
6099 code = CEIL_DIV_EXPR;
6100 else if (code != MULT_EXPR
6101 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6105 /* If it's a multiply or a division/modulus operation of a multiple
6106 of our constant, do the operation and verify it doesn't overflow. */
6107 if (code == MULT_EXPR
6108 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6110 op1 = const_binop (code, fold_convert (ctype, op1),
6111 fold_convert (ctype, c));
6112 /* We allow the constant to overflow with wrapping semantics. */
6114 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6120 /* If we have an unsigned type, we cannot widen the operation since it
6121 will change the result if the original computation overflowed. */
6122 if (TYPE_UNSIGNED (ctype) && ctype != type)
6125 /* If we were able to eliminate our operation from the first side,
6126 apply our operation to the second side and reform the PLUS. */
6127 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6128 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6130 /* The last case is if we are a multiply. In that case, we can
6131 apply the distributive law to commute the multiply and addition
6132 if the multiplication of the constants doesn't overflow
6133 and overflow is defined. With undefined overflow
6134 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6135 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6136 return fold_build2 (tcode, ctype,
6137 fold_build2 (code, ctype,
6138 fold_convert (ctype, op0),
6139 fold_convert (ctype, c)),
6145 /* We have a special case here if we are doing something like
6146 (C * 8) % 4 since we know that's zero. */
6147 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6148 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6149 /* If the multiplication can overflow we cannot optimize this. */
6150 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6151 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6152 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6154 *strict_overflow_p = true;
6155 return omit_one_operand (type, integer_zero_node, op0);
6158 /* ... fall through ... */
6160 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6161 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6162 /* If we can extract our operation from the LHS, do so and return a
6163 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6164 do something only if the second operand is a constant. */
6166 && (t1 = extract_muldiv (op0, c, code, wide_type,
6167 strict_overflow_p)) != 0)
6168 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6169 fold_convert (ctype, op1));
6170 else if (tcode == MULT_EXPR && code == MULT_EXPR
6171 && (t1 = extract_muldiv (op1, c, code, wide_type,
6172 strict_overflow_p)) != 0)
6173 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6174 fold_convert (ctype, t1));
6175 else if (TREE_CODE (op1) != INTEGER_CST)
6178 /* If these are the same operation types, we can associate them
6179 assuming no overflow. */
6182 bool overflow_p = false;
6183 bool overflow_mul_p;
6184 signop sign = TYPE_SIGN (ctype);
6185 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6186 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6188 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6192 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6193 TYPE_SIGN (TREE_TYPE (op1)));
6194 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6195 wide_int_to_tree (ctype, mul));
6199 /* If these operations "cancel" each other, we have the main
6200 optimizations of this pass, which occur when either constant is a
6201 multiple of the other, in which case we replace this with either an
6202 operation or CODE or TCODE.
6204 If we have an unsigned type, we cannot do this since it will change
6205 the result if the original computation overflowed. */
6206 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6207 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6208 || (tcode == MULT_EXPR
6209 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6210 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6211 && code != MULT_EXPR)))
6213 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6215 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6216 *strict_overflow_p = true;
6217 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6218 fold_convert (ctype,
6219 const_binop (TRUNC_DIV_EXPR,
6222 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6224 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6225 *strict_overflow_p = true;
6226 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6227 fold_convert (ctype,
6228 const_binop (TRUNC_DIV_EXPR,
6241 /* Return a node which has the indicated constant VALUE (either 0 or
6242 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6243 and is of the indicated TYPE. */
6246 constant_boolean_node (bool value, tree type)
6248 if (type == integer_type_node)
6249 return value ? integer_one_node : integer_zero_node;
6250 else if (type == boolean_type_node)
6251 return value ? boolean_true_node : boolean_false_node;
6252 else if (TREE_CODE (type) == VECTOR_TYPE)
6253 return build_vector_from_val (type,
6254 build_int_cst (TREE_TYPE (type),
6257 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6261 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6262 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6263 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6264 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6265 COND is the first argument to CODE; otherwise (as in the example
6266 given here), it is the second argument. TYPE is the type of the
6267 original expression. Return NULL_TREE if no simplification is
6271 fold_binary_op_with_conditional_arg (location_t loc,
6272 enum tree_code code,
6273 tree type, tree op0, tree op1,
6274 tree cond, tree arg, int cond_first_p)
6276 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6277 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6278 tree test, true_value, false_value;
6279 tree lhs = NULL_TREE;
6280 tree rhs = NULL_TREE;
6281 enum tree_code cond_code = COND_EXPR;
6283 if (TREE_CODE (cond) == COND_EXPR
6284 || TREE_CODE (cond) == VEC_COND_EXPR)
6286 test = TREE_OPERAND (cond, 0);
6287 true_value = TREE_OPERAND (cond, 1);
6288 false_value = TREE_OPERAND (cond, 2);
6289 /* If this operand throws an expression, then it does not make
6290 sense to try to perform a logical or arithmetic operation
6292 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6294 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6299 tree testtype = TREE_TYPE (cond);
6301 true_value = constant_boolean_node (true, testtype);
6302 false_value = constant_boolean_node (false, testtype);
6305 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6306 cond_code = VEC_COND_EXPR;
6308 /* This transformation is only worthwhile if we don't have to wrap ARG
6309 in a SAVE_EXPR and the operation can be simplified without recursing
6310 on at least one of the branches once its pushed inside the COND_EXPR. */
6311 if (!TREE_CONSTANT (arg)
6312 && (TREE_SIDE_EFFECTS (arg)
6313 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6314 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6317 arg = fold_convert_loc (loc, arg_type, arg);
6320 true_value = fold_convert_loc (loc, cond_type, true_value);
6322 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6324 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6328 false_value = fold_convert_loc (loc, cond_type, false_value);
6330 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6332 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6335 /* Check that we have simplified at least one of the branches. */
6336 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6339 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6343 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6345 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6346 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6347 ADDEND is the same as X.
6349 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6350 and finite. The problematic cases are when X is zero, and its mode
6351 has signed zeros. In the case of rounding towards -infinity,
6352 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6353 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6356 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6358 if (!real_zerop (addend))
6361 /* Don't allow the fold with -fsignaling-nans. */
6362 if (HONOR_SNANS (element_mode (type)))
6365 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6366 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6369 /* In a vector or complex, we would need to check the sign of all zeros. */
6370 if (TREE_CODE (addend) != REAL_CST)
6373 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6374 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6377 /* The mode has signed zeros, and we have to honor their sign.
6378 In this situation, there is only one case we can return true for.
6379 X - 0 is the same as X unless rounding towards -infinity is
6381 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6384 /* Subroutine of fold() that optimizes comparisons of a division by
6385 a nonzero integer constant against an integer constant, i.e.
6388 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6389 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6390 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6392 The function returns the constant folded tree if a simplification
6393 can be made, and NULL_TREE otherwise. */
6396 fold_div_compare (location_t loc,
6397 enum tree_code code, tree type, tree arg0, tree arg1)
6399 tree prod, tmp, hi, lo;
6400 tree arg00 = TREE_OPERAND (arg0, 0);
6401 tree arg01 = TREE_OPERAND (arg0, 1);
6402 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6403 bool neg_overflow = false;
6406 /* We have to do this the hard way to detect unsigned overflow.
6407 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6408 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6409 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6410 neg_overflow = false;
6412 if (sign == UNSIGNED)
6414 tmp = int_const_binop (MINUS_EXPR, arg01,
6415 build_int_cst (TREE_TYPE (arg01), 1));
6418 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6419 val = wi::add (prod, tmp, sign, &overflow);
6420 hi = force_fit_type (TREE_TYPE (arg00), val,
6421 -1, overflow | TREE_OVERFLOW (prod));
6423 else if (tree_int_cst_sgn (arg01) >= 0)
6425 tmp = int_const_binop (MINUS_EXPR, arg01,
6426 build_int_cst (TREE_TYPE (arg01), 1));
6427 switch (tree_int_cst_sgn (arg1))
6430 neg_overflow = true;
6431 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6436 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6441 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6451 /* A negative divisor reverses the relational operators. */
6452 code = swap_tree_comparison (code);
6454 tmp = int_const_binop (PLUS_EXPR, arg01,
6455 build_int_cst (TREE_TYPE (arg01), 1));
6456 switch (tree_int_cst_sgn (arg1))
6459 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6464 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6469 neg_overflow = true;
6470 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6482 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6483 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6484 if (TREE_OVERFLOW (hi))
6485 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6486 if (TREE_OVERFLOW (lo))
6487 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6488 return build_range_check (loc, type, arg00, 1, lo, hi);
6491 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6492 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6493 if (TREE_OVERFLOW (hi))
6494 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6495 if (TREE_OVERFLOW (lo))
6496 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6497 return build_range_check (loc, type, arg00, 0, lo, hi);
6500 if (TREE_OVERFLOW (lo))
6502 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6503 return omit_one_operand_loc (loc, type, tmp, arg00);
6505 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6508 if (TREE_OVERFLOW (hi))
6510 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6511 return omit_one_operand_loc (loc, type, tmp, arg00);
6513 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6516 if (TREE_OVERFLOW (hi))
6518 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6519 return omit_one_operand_loc (loc, type, tmp, arg00);
6521 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6524 if (TREE_OVERFLOW (lo))
6526 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6527 return omit_one_operand_loc (loc, type, tmp, arg00);
6529 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6539 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6540 equality/inequality test, then return a simplified form of the test
6541 using a sign testing. Otherwise return NULL. TYPE is the desired
6545 fold_single_bit_test_into_sign_test (location_t loc,
6546 enum tree_code code, tree arg0, tree arg1,
6549 /* If this is testing a single bit, we can optimize the test. */
6550 if ((code == NE_EXPR || code == EQ_EXPR)
6551 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6552 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6555 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6556 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6558 if (arg00 != NULL_TREE
6559 /* This is only a win if casting to a signed type is cheap,
6560 i.e. when arg00's type is not a partial mode. */
6561 && TYPE_PRECISION (TREE_TYPE (arg00))
6562 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6564 tree stype = signed_type_for (TREE_TYPE (arg00));
6565 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6567 fold_convert_loc (loc, stype, arg00),
6568 build_int_cst (stype, 0));
6575 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6576 equality/inequality test, then return a simplified form of
6577 the test using shifts and logical operations. Otherwise return
6578 NULL. TYPE is the desired result type. */
6581 fold_single_bit_test (location_t loc, enum tree_code code,
6582 tree arg0, tree arg1, tree result_type)
6584 /* If this is testing a single bit, we can optimize the test. */
6585 if ((code == NE_EXPR || code == EQ_EXPR)
6586 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6587 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6589 tree inner = TREE_OPERAND (arg0, 0);
6590 tree type = TREE_TYPE (arg0);
6591 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6592 machine_mode operand_mode = TYPE_MODE (type);
6594 tree signed_type, unsigned_type, intermediate_type;
6597 /* First, see if we can fold the single bit test into a sign-bit
6599 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6604 /* Otherwise we have (A & C) != 0 where C is a single bit,
6605 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6606 Similarly for (A & C) == 0. */
6608 /* If INNER is a right shift of a constant and it plus BITNUM does
6609 not overflow, adjust BITNUM and INNER. */
6610 if (TREE_CODE (inner) == RSHIFT_EXPR
6611 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6612 && bitnum < TYPE_PRECISION (type)
6613 && wi::ltu_p (TREE_OPERAND (inner, 1),
6614 TYPE_PRECISION (type) - bitnum))
6616 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6617 inner = TREE_OPERAND (inner, 0);
6620 /* If we are going to be able to omit the AND below, we must do our
6621 operations as unsigned. If we must use the AND, we have a choice.
6622 Normally unsigned is faster, but for some machines signed is. */
6623 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6624 && !flag_syntax_only) ? 0 : 1;
6626 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6627 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6628 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6629 inner = fold_convert_loc (loc, intermediate_type, inner);
6632 inner = build2 (RSHIFT_EXPR, intermediate_type,
6633 inner, size_int (bitnum));
6635 one = build_int_cst (intermediate_type, 1);
6637 if (code == EQ_EXPR)
6638 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6640 /* Put the AND last so it can combine with more things. */
6641 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6643 /* Make sure to return the proper type. */
6644 inner = fold_convert_loc (loc, result_type, inner);
6651 /* Check whether we are allowed to reorder operands arg0 and arg1,
6652 such that the evaluation of arg1 occurs before arg0. */
6655 reorder_operands_p (const_tree arg0, const_tree arg1)
6657 if (! flag_evaluation_order)
6659 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6661 return ! TREE_SIDE_EFFECTS (arg0)
6662 && ! TREE_SIDE_EFFECTS (arg1);
6665 /* Test whether it is preferable two swap two operands, ARG0 and
6666 ARG1, for example because ARG0 is an integer constant and ARG1
6667 isn't. If REORDER is true, only recommend swapping if we can
6668 evaluate the operands in reverse order. */
6671 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6673 if (CONSTANT_CLASS_P (arg1))
6675 if (CONSTANT_CLASS_P (arg0))
6681 if (TREE_CONSTANT (arg1))
6683 if (TREE_CONSTANT (arg0))
6686 if (reorder && flag_evaluation_order
6687 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6690 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6691 for commutative and comparison operators. Ensuring a canonical
6692 form allows the optimizers to find additional redundancies without
6693 having to explicitly check for both orderings. */
6694 if (TREE_CODE (arg0) == SSA_NAME
6695 && TREE_CODE (arg1) == SSA_NAME
6696 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6699 /* Put SSA_NAMEs last. */
6700 if (TREE_CODE (arg1) == SSA_NAME)
6702 if (TREE_CODE (arg0) == SSA_NAME)
6705 /* Put variables last. */
6715 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6716 means A >= Y && A != MAX, but in this case we know that
6717 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6720 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6722 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6724 if (TREE_CODE (bound) == LT_EXPR)
6725 a = TREE_OPERAND (bound, 0);
6726 else if (TREE_CODE (bound) == GT_EXPR)
6727 a = TREE_OPERAND (bound, 1);
6731 typea = TREE_TYPE (a);
6732 if (!INTEGRAL_TYPE_P (typea)
6733 && !POINTER_TYPE_P (typea))
6736 if (TREE_CODE (ineq) == LT_EXPR)
6738 a1 = TREE_OPERAND (ineq, 1);
6739 y = TREE_OPERAND (ineq, 0);
6741 else if (TREE_CODE (ineq) == GT_EXPR)
6743 a1 = TREE_OPERAND (ineq, 0);
6744 y = TREE_OPERAND (ineq, 1);
6749 if (TREE_TYPE (a1) != typea)
6752 if (POINTER_TYPE_P (typea))
6754 /* Convert the pointer types into integer before taking the difference. */
6755 tree ta = fold_convert_loc (loc, ssizetype, a);
6756 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6757 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6760 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6762 if (!diff || !integer_onep (diff))
6765 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6768 /* Fold a sum or difference of at least one multiplication.
6769 Returns the folded tree or NULL if no simplification could be made. */
6772 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6773 tree arg0, tree arg1)
6775 tree arg00, arg01, arg10, arg11;
6776 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6778 /* (A * C) +- (B * C) -> (A+-B) * C.
6779 (A * C) +- A -> A * (C+-1).
6780 We are most concerned about the case where C is a constant,
6781 but other combinations show up during loop reduction. Since
6782 it is not difficult, try all four possibilities. */
6784 if (TREE_CODE (arg0) == MULT_EXPR)
6786 arg00 = TREE_OPERAND (arg0, 0);
6787 arg01 = TREE_OPERAND (arg0, 1);
6789 else if (TREE_CODE (arg0) == INTEGER_CST)
6791 arg00 = build_one_cst (type);
6796 /* We cannot generate constant 1 for fract. */
6797 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6800 arg01 = build_one_cst (type);
6802 if (TREE_CODE (arg1) == MULT_EXPR)
6804 arg10 = TREE_OPERAND (arg1, 0);
6805 arg11 = TREE_OPERAND (arg1, 1);
6807 else if (TREE_CODE (arg1) == INTEGER_CST)
6809 arg10 = build_one_cst (type);
6810 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6811 the purpose of this canonicalization. */
6812 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6813 && negate_expr_p (arg1)
6814 && code == PLUS_EXPR)
6816 arg11 = negate_expr (arg1);
6824 /* We cannot generate constant 1 for fract. */
6825 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6828 arg11 = build_one_cst (type);
6832 if (operand_equal_p (arg01, arg11, 0))
6833 same = arg01, alt0 = arg00, alt1 = arg10;
6834 else if (operand_equal_p (arg00, arg10, 0))
6835 same = arg00, alt0 = arg01, alt1 = arg11;
6836 else if (operand_equal_p (arg00, arg11, 0))
6837 same = arg00, alt0 = arg01, alt1 = arg10;
6838 else if (operand_equal_p (arg01, arg10, 0))
6839 same = arg01, alt0 = arg00, alt1 = arg11;
6841 /* No identical multiplicands; see if we can find a common
6842 power-of-two factor in non-power-of-two multiplies. This
6843 can help in multi-dimensional array access. */
6844 else if (tree_fits_shwi_p (arg01)
6845 && tree_fits_shwi_p (arg11))
6847 HOST_WIDE_INT int01, int11, tmp;
6850 int01 = tree_to_shwi (arg01);
6851 int11 = tree_to_shwi (arg11);
6853 /* Move min of absolute values to int11. */
6854 if (absu_hwi (int01) < absu_hwi (int11))
6856 tmp = int01, int01 = int11, int11 = tmp;
6857 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6864 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6865 /* The remainder should not be a constant, otherwise we
6866 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6867 increased the number of multiplications necessary. */
6868 && TREE_CODE (arg10) != INTEGER_CST)
6870 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6871 build_int_cst (TREE_TYPE (arg00),
6876 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6881 return fold_build2_loc (loc, MULT_EXPR, type,
6882 fold_build2_loc (loc, code, type,
6883 fold_convert_loc (loc, type, alt0),
6884 fold_convert_loc (loc, type, alt1)),
6885 fold_convert_loc (loc, type, same));
6890 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6891 specified by EXPR into the buffer PTR of length LEN bytes.
6892 Return the number of bytes placed in the buffer, or zero
6896 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6898 tree type = TREE_TYPE (expr);
6899 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6900 int byte, offset, word, words;
6901 unsigned char value;
6903 if ((off == -1 && total_bytes > len)
6904 || off >= total_bytes)
6908 words = total_bytes / UNITS_PER_WORD;
6910 for (byte = 0; byte < total_bytes; byte++)
6912 int bitpos = byte * BITS_PER_UNIT;
6913 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6915 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6917 if (total_bytes > UNITS_PER_WORD)
6919 word = byte / UNITS_PER_WORD;
6920 if (WORDS_BIG_ENDIAN)
6921 word = (words - 1) - word;
6922 offset = word * UNITS_PER_WORD;
6923 if (BYTES_BIG_ENDIAN)
6924 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6926 offset += byte % UNITS_PER_WORD;
6929 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6931 && offset - off < len)
6932 ptr[offset - off] = value;
6934 return MIN (len, total_bytes - off);
6938 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6939 specified by EXPR into the buffer PTR of length LEN bytes.
6940 Return the number of bytes placed in the buffer, or zero
6944 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
6946 tree type = TREE_TYPE (expr);
6947 machine_mode mode = TYPE_MODE (type);
6948 int total_bytes = GET_MODE_SIZE (mode);
6949 FIXED_VALUE_TYPE value;
6950 tree i_value, i_type;
6952 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
6955 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
6957 if (NULL_TREE == i_type
6958 || TYPE_PRECISION (i_type) != total_bytes)
6961 value = TREE_FIXED_CST (expr);
6962 i_value = double_int_to_tree (i_type, value.data);
6964 return native_encode_int (i_value, ptr, len, off);
6968 /* Subroutine of native_encode_expr. Encode the REAL_CST
6969 specified by EXPR into the buffer PTR of length LEN bytes.
6970 Return the number of bytes placed in the buffer, or zero
6974 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
6976 tree type = TREE_TYPE (expr);
6977 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6978 int byte, offset, word, words, bitpos;
6979 unsigned char value;
6981 /* There are always 32 bits in each long, no matter the size of
6982 the hosts long. We handle floating point representations with
6986 if ((off == -1 && total_bytes > len)
6987 || off >= total_bytes)
6991 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
6993 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6995 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
6996 bitpos += BITS_PER_UNIT)
6998 byte = (bitpos / BITS_PER_UNIT) & 3;
6999 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7001 if (UNITS_PER_WORD < 4)
7003 word = byte / UNITS_PER_WORD;
7004 if (WORDS_BIG_ENDIAN)
7005 word = (words - 1) - word;
7006 offset = word * UNITS_PER_WORD;
7007 if (BYTES_BIG_ENDIAN)
7008 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7010 offset += byte % UNITS_PER_WORD;
7013 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7014 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7016 && offset - off < len)
7017 ptr[offset - off] = value;
7019 return MIN (len, total_bytes - off);
7022 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7023 specified by EXPR into the buffer PTR of length LEN bytes.
7024 Return the number of bytes placed in the buffer, or zero
7028 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7033 part = TREE_REALPART (expr);
7034 rsize = native_encode_expr (part, ptr, len, off);
7038 part = TREE_IMAGPART (expr);
7040 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7041 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7045 return rsize + isize;
7049 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7050 specified by EXPR into the buffer PTR of length LEN bytes.
7051 Return the number of bytes placed in the buffer, or zero
7055 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7062 count = VECTOR_CST_NELTS (expr);
7063 itype = TREE_TYPE (TREE_TYPE (expr));
7064 size = GET_MODE_SIZE (TYPE_MODE (itype));
7065 for (i = 0; i < count; i++)
7072 elem = VECTOR_CST_ELT (expr, i);
7073 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7074 if ((off == -1 && res != size)
7087 /* Subroutine of native_encode_expr. Encode the STRING_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7093 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7095 tree type = TREE_TYPE (expr);
7096 HOST_WIDE_INT total_bytes;
7098 if (TREE_CODE (type) != ARRAY_TYPE
7099 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7100 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7101 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7103 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7104 if ((off == -1 && total_bytes > len)
7105 || off >= total_bytes)
7109 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7112 if (off < TREE_STRING_LENGTH (expr))
7114 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7115 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7117 memset (ptr + written, 0,
7118 MIN (total_bytes - written, len - written));
7121 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7122 return MIN (total_bytes - off, len);
7126 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7127 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7128 buffer PTR of length LEN bytes. If OFF is not -1 then start
7129 the encoding at byte offset OFF and encode at most LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero upon failure. */
7133 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7135 /* We don't support starting at negative offset and -1 is special. */
7139 switch (TREE_CODE (expr))
7142 return native_encode_int (expr, ptr, len, off);
7145 return native_encode_real (expr, ptr, len, off);
7148 return native_encode_fixed (expr, ptr, len, off);
7151 return native_encode_complex (expr, ptr, len, off);
7154 return native_encode_vector (expr, ptr, len, off);
7157 return native_encode_string (expr, ptr, len, off);
7165 /* Subroutine of native_interpret_expr. Interpret the contents of
7166 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7167 If the buffer cannot be interpreted, return NULL_TREE. */
7170 native_interpret_int (tree type, const unsigned char *ptr, int len)
7172 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7174 if (total_bytes > len
7175 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7178 wide_int result = wi::from_buffer (ptr, total_bytes);
7180 return wide_int_to_tree (type, result);
7184 /* Subroutine of native_interpret_expr. Interpret the contents of
7185 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7186 If the buffer cannot be interpreted, return NULL_TREE. */
7189 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7191 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7193 FIXED_VALUE_TYPE fixed_value;
7195 if (total_bytes > len
7196 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7199 result = double_int::from_buffer (ptr, total_bytes);
7200 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7202 return build_fixed (type, fixed_value);
7206 /* Subroutine of native_interpret_expr. Interpret the contents of
7207 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7208 If the buffer cannot be interpreted, return NULL_TREE. */
7211 native_interpret_real (tree type, const unsigned char *ptr, int len)
7213 machine_mode mode = TYPE_MODE (type);
7214 int total_bytes = GET_MODE_SIZE (mode);
7215 unsigned char value;
7216 /* There are always 32 bits in each long, no matter the size of
7217 the hosts long. We handle floating point representations with
7222 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7223 if (total_bytes > len || total_bytes > 24)
7225 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7227 memset (tmp, 0, sizeof (tmp));
7228 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7229 bitpos += BITS_PER_UNIT)
7231 /* Both OFFSET and BYTE index within a long;
7232 bitpos indexes the whole float. */
7233 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7234 if (UNITS_PER_WORD < 4)
7236 int word = byte / UNITS_PER_WORD;
7237 if (WORDS_BIG_ENDIAN)
7238 word = (words - 1) - word;
7239 offset = word * UNITS_PER_WORD;
7240 if (BYTES_BIG_ENDIAN)
7241 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7243 offset += byte % UNITS_PER_WORD;
7248 if (BYTES_BIG_ENDIAN)
7250 /* Reverse bytes within each long, or within the entire float
7251 if it's smaller than a long (for HFmode). */
7252 offset = MIN (3, total_bytes - 1) - offset;
7253 gcc_assert (offset >= 0);
7256 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7258 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7261 real_from_target (&r, tmp, mode);
7262 return build_real (type, r);
7266 /* Subroutine of native_interpret_expr. Interpret the contents of
7267 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7268 If the buffer cannot be interpreted, return NULL_TREE. */
7271 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7273 tree etype, rpart, ipart;
7276 etype = TREE_TYPE (type);
7277 size = GET_MODE_SIZE (TYPE_MODE (etype));
7280 rpart = native_interpret_expr (etype, ptr, size);
7283 ipart = native_interpret_expr (etype, ptr+size, size);
7286 return build_complex (type, rpart, ipart);
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7295 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7301 etype = TREE_TYPE (type);
7302 size = GET_MODE_SIZE (TYPE_MODE (etype));
7303 count = TYPE_VECTOR_SUBPARTS (type);
7304 if (size * count > len)
7307 elements = XALLOCAVEC (tree, count);
7308 for (i = count - 1; i >= 0; i--)
7310 elem = native_interpret_expr (etype, ptr+(i*size), size);
7315 return build_vector (type, elements);
7319 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7320 the buffer PTR of length LEN as a constant of type TYPE. For
7321 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7322 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7323 return NULL_TREE. */
7326 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7328 switch (TREE_CODE (type))
7334 case REFERENCE_TYPE:
7335 return native_interpret_int (type, ptr, len);
7338 return native_interpret_real (type, ptr, len);
7340 case FIXED_POINT_TYPE:
7341 return native_interpret_fixed (type, ptr, len);
7344 return native_interpret_complex (type, ptr, len);
7347 return native_interpret_vector (type, ptr, len);
7354 /* Returns true if we can interpret the contents of a native encoding
7358 can_native_interpret_type_p (tree type)
7360 switch (TREE_CODE (type))
7366 case REFERENCE_TYPE:
7367 case FIXED_POINT_TYPE:
7377 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7378 TYPE at compile-time. If we're unable to perform the conversion
7379 return NULL_TREE. */
7382 fold_view_convert_expr (tree type, tree expr)
7384 /* We support up to 512-bit values (for V8DFmode). */
7385 unsigned char buffer[64];
7388 /* Check that the host and target are sane. */
7389 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7392 len = native_encode_expr (expr, buffer, sizeof (buffer));
7396 return native_interpret_expr (type, buffer, len);
7399 /* Build an expression for the address of T. Folds away INDIRECT_REF
7400 to avoid confusing the gimplify process. */
7403 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7405 /* The size of the object is not relevant when talking about its address. */
7406 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7407 t = TREE_OPERAND (t, 0);
7409 if (TREE_CODE (t) == INDIRECT_REF)
7411 t = TREE_OPERAND (t, 0);
7413 if (TREE_TYPE (t) != ptrtype)
7414 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7416 else if (TREE_CODE (t) == MEM_REF
7417 && integer_zerop (TREE_OPERAND (t, 1)))
7418 return TREE_OPERAND (t, 0);
7419 else if (TREE_CODE (t) == MEM_REF
7420 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7421 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7422 TREE_OPERAND (t, 0),
7423 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7424 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7426 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7428 if (TREE_TYPE (t) != ptrtype)
7429 t = fold_convert_loc (loc, ptrtype, t);
7432 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7437 /* Build an expression for the address of T. */
7440 build_fold_addr_expr_loc (location_t loc, tree t)
7442 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7444 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7447 /* Fold a unary expression of code CODE and type TYPE with operand
7448 OP0. Return the folded expression if folding is successful.
7449 Otherwise, return NULL_TREE. */
7452 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7456 enum tree_code_class kind = TREE_CODE_CLASS (code);
7458 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7459 && TREE_CODE_LENGTH (code) == 1);
7464 if (CONVERT_EXPR_CODE_P (code)
7465 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7467 /* Don't use STRIP_NOPS, because signedness of argument type
7469 STRIP_SIGN_NOPS (arg0);
7473 /* Strip any conversions that don't change the mode. This
7474 is safe for every expression, except for a comparison
7475 expression because its signedness is derived from its
7478 Note that this is done as an internal manipulation within
7479 the constant folder, in order to find the simplest
7480 representation of the arguments so that their form can be
7481 studied. In any cases, the appropriate type conversions
7482 should be put back in the tree that will get out of the
7487 if (CONSTANT_CLASS_P (arg0))
7489 tree tem = const_unop (code, type, arg0);
7492 if (TREE_TYPE (tem) != type)
7493 tem = fold_convert_loc (loc, type, tem);
7499 tem = generic_simplify (loc, code, type, op0);
7503 if (TREE_CODE_CLASS (code) == tcc_unary)
7505 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7506 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7507 fold_build1_loc (loc, code, type,
7508 fold_convert_loc (loc, TREE_TYPE (op0),
7509 TREE_OPERAND (arg0, 1))));
7510 else if (TREE_CODE (arg0) == COND_EXPR)
7512 tree arg01 = TREE_OPERAND (arg0, 1);
7513 tree arg02 = TREE_OPERAND (arg0, 2);
7514 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7515 arg01 = fold_build1_loc (loc, code, type,
7516 fold_convert_loc (loc,
7517 TREE_TYPE (op0), arg01));
7518 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7519 arg02 = fold_build1_loc (loc, code, type,
7520 fold_convert_loc (loc,
7521 TREE_TYPE (op0), arg02));
7522 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7525 /* If this was a conversion, and all we did was to move into
7526 inside the COND_EXPR, bring it back out. But leave it if
7527 it is a conversion from integer to integer and the
7528 result precision is no wider than a word since such a
7529 conversion is cheap and may be optimized away by combine,
7530 while it couldn't if it were outside the COND_EXPR. Then return
7531 so we don't get into an infinite recursion loop taking the
7532 conversion out and then back in. */
7534 if ((CONVERT_EXPR_CODE_P (code)
7535 || code == NON_LVALUE_EXPR)
7536 && TREE_CODE (tem) == COND_EXPR
7537 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7538 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7539 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7540 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7541 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7542 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7543 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7545 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7546 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7547 || flag_syntax_only))
7548 tem = build1_loc (loc, code, type,
7550 TREE_TYPE (TREE_OPERAND
7551 (TREE_OPERAND (tem, 1), 0)),
7552 TREE_OPERAND (tem, 0),
7553 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7554 TREE_OPERAND (TREE_OPERAND (tem, 2),
7562 case NON_LVALUE_EXPR:
7563 if (!maybe_lvalue_p (op0))
7564 return fold_convert_loc (loc, type, op0);
7569 case FIX_TRUNC_EXPR:
7570 if (COMPARISON_CLASS_P (op0))
7572 /* If we have (type) (a CMP b) and type is an integral type, return
7573 new expression involving the new type. Canonicalize
7574 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7576 Do not fold the result as that would not simplify further, also
7577 folding again results in recursions. */
7578 if (TREE_CODE (type) == BOOLEAN_TYPE)
7579 return build2_loc (loc, TREE_CODE (op0), type,
7580 TREE_OPERAND (op0, 0),
7581 TREE_OPERAND (op0, 1));
7582 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7583 && TREE_CODE (type) != VECTOR_TYPE)
7584 return build3_loc (loc, COND_EXPR, type, op0,
7585 constant_boolean_node (true, type),
7586 constant_boolean_node (false, type));
7589 /* Handle (T *)&A.B.C for A being of type T and B and C
7590 living at offset zero. This occurs frequently in
7591 C++ upcasting and then accessing the base. */
7592 if (TREE_CODE (op0) == ADDR_EXPR
7593 && POINTER_TYPE_P (type)
7594 && handled_component_p (TREE_OPERAND (op0, 0)))
7596 HOST_WIDE_INT bitsize, bitpos;
7599 int unsignedp, volatilep;
7600 tree base = TREE_OPERAND (op0, 0);
7601 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7602 &mode, &unsignedp, &volatilep, false);
7603 /* If the reference was to a (constant) zero offset, we can use
7604 the address of the base if it has the same base type
7605 as the result type and the pointer type is unqualified. */
7606 if (! offset && bitpos == 0
7607 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7608 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7609 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7610 return fold_convert_loc (loc, type,
7611 build_fold_addr_expr_loc (loc, base));
7614 if (TREE_CODE (op0) == MODIFY_EXPR
7615 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7616 /* Detect assigning a bitfield. */
7617 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7619 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7621 /* Don't leave an assignment inside a conversion
7622 unless assigning a bitfield. */
7623 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7624 /* First do the assignment, then return converted constant. */
7625 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7626 TREE_NO_WARNING (tem) = 1;
7627 TREE_USED (tem) = 1;
7631 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7632 constants (if x has signed type, the sign bit cannot be set
7633 in c). This folds extension into the BIT_AND_EXPR.
7634 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7635 very likely don't have maximal range for their precision and this
7636 transformation effectively doesn't preserve non-maximal ranges. */
7637 if (TREE_CODE (type) == INTEGER_TYPE
7638 && TREE_CODE (op0) == BIT_AND_EXPR
7639 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7641 tree and_expr = op0;
7642 tree and0 = TREE_OPERAND (and_expr, 0);
7643 tree and1 = TREE_OPERAND (and_expr, 1);
7646 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7647 || (TYPE_PRECISION (type)
7648 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7650 else if (TYPE_PRECISION (TREE_TYPE (and1))
7651 <= HOST_BITS_PER_WIDE_INT
7652 && tree_fits_uhwi_p (and1))
7654 unsigned HOST_WIDE_INT cst;
7656 cst = tree_to_uhwi (and1);
7657 cst &= HOST_WIDE_INT_M1U
7658 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7659 change = (cst == 0);
7661 && !flag_syntax_only
7662 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7665 tree uns = unsigned_type_for (TREE_TYPE (and0));
7666 and0 = fold_convert_loc (loc, uns, and0);
7667 and1 = fold_convert_loc (loc, uns, and1);
7672 tem = force_fit_type (type, wi::to_widest (and1), 0,
7673 TREE_OVERFLOW (and1));
7674 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7675 fold_convert_loc (loc, type, and0), tem);
7679 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7680 when one of the new casts will fold away. Conservatively we assume
7681 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7682 if (POINTER_TYPE_P (type)
7683 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7684 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7685 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7686 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7687 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7689 tree arg00 = TREE_OPERAND (arg0, 0);
7690 tree arg01 = TREE_OPERAND (arg0, 1);
7692 return fold_build_pointer_plus_loc
7693 (loc, fold_convert_loc (loc, type, arg00), arg01);
7696 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7697 of the same precision, and X is an integer type not narrower than
7698 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7699 if (INTEGRAL_TYPE_P (type)
7700 && TREE_CODE (op0) == BIT_NOT_EXPR
7701 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7702 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7703 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7705 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7706 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7707 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7708 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7709 fold_convert_loc (loc, type, tem));
7712 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7713 type of X and Y (integer types only). */
7714 if (INTEGRAL_TYPE_P (type)
7715 && TREE_CODE (op0) == MULT_EXPR
7716 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7717 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7719 /* Be careful not to introduce new overflows. */
7721 if (TYPE_OVERFLOW_WRAPS (type))
7724 mult_type = unsigned_type_for (type);
7726 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7728 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7729 fold_convert_loc (loc, mult_type,
7730 TREE_OPERAND (op0, 0)),
7731 fold_convert_loc (loc, mult_type,
7732 TREE_OPERAND (op0, 1)));
7733 return fold_convert_loc (loc, type, tem);
7739 case VIEW_CONVERT_EXPR:
7740 if (TREE_CODE (op0) == MEM_REF)
7741 return fold_build2_loc (loc, MEM_REF, type,
7742 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7747 tem = fold_negate_expr (loc, arg0);
7749 return fold_convert_loc (loc, type, tem);
7753 /* Convert fabs((double)float) into (double)fabsf(float). */
7754 if (TREE_CODE (arg0) == NOP_EXPR
7755 && TREE_CODE (type) == REAL_TYPE)
7757 tree targ0 = strip_float_extensions (arg0);
7759 return fold_convert_loc (loc, type,
7760 fold_build1_loc (loc, ABS_EXPR,
7765 /* Strip sign ops from argument. */
7766 if (TREE_CODE (type) == REAL_TYPE)
7768 tem = fold_strip_sign_ops (arg0);
7770 return fold_build1_loc (loc, ABS_EXPR, type,
7771 fold_convert_loc (loc, type, tem));
7776 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7777 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7778 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7779 fold_convert_loc (loc, type,
7780 TREE_OPERAND (arg0, 0)))))
7781 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7782 fold_convert_loc (loc, type,
7783 TREE_OPERAND (arg0, 1)));
7784 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7785 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7786 fold_convert_loc (loc, type,
7787 TREE_OPERAND (arg0, 1)))))
7788 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7789 fold_convert_loc (loc, type,
7790 TREE_OPERAND (arg0, 0)), tem);
7794 case TRUTH_NOT_EXPR:
7795 /* Note that the operand of this must be an int
7796 and its values must be 0 or 1.
7797 ("true" is a fixed value perhaps depending on the language,
7798 but we don't handle values other than 1 correctly yet.) */
7799 tem = fold_truth_not_expr (loc, arg0);
7802 return fold_convert_loc (loc, type, tem);
7805 /* Fold *&X to X if X is an lvalue. */
7806 if (TREE_CODE (op0) == ADDR_EXPR)
7808 tree op00 = TREE_OPERAND (op0, 0);
7809 if ((TREE_CODE (op00) == VAR_DECL
7810 || TREE_CODE (op00) == PARM_DECL
7811 || TREE_CODE (op00) == RESULT_DECL)
7812 && !TREE_READONLY (op00))
7819 } /* switch (code) */
7823 /* If the operation was a conversion do _not_ mark a resulting constant
7824 with TREE_OVERFLOW if the original constant was not. These conversions
7825 have implementation defined behavior and retaining the TREE_OVERFLOW
7826 flag here would confuse later passes such as VRP. */
7828 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7829 tree type, tree op0)
7831 tree res = fold_unary_loc (loc, code, type, op0);
7833 && TREE_CODE (res) == INTEGER_CST
7834 && TREE_CODE (op0) == INTEGER_CST
7835 && CONVERT_EXPR_CODE_P (code))
7836 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7841 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7842 operands OP0 and OP1. LOC is the location of the resulting expression.
7843 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7844 Return the folded expression if folding is successful. Otherwise,
7845 return NULL_TREE. */
7847 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7848 tree arg0, tree arg1, tree op0, tree op1)
7852 /* We only do these simplifications if we are optimizing. */
7856 /* Check for things like (A || B) && (A || C). We can convert this
7857 to A || (B && C). Note that either operator can be any of the four
7858 truth and/or operations and the transformation will still be
7859 valid. Also note that we only care about order for the
7860 ANDIF and ORIF operators. If B contains side effects, this
7861 might change the truth-value of A. */
7862 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7863 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7864 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7865 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7866 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7867 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7869 tree a00 = TREE_OPERAND (arg0, 0);
7870 tree a01 = TREE_OPERAND (arg0, 1);
7871 tree a10 = TREE_OPERAND (arg1, 0);
7872 tree a11 = TREE_OPERAND (arg1, 1);
7873 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7874 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7875 && (code == TRUTH_AND_EXPR
7876 || code == TRUTH_OR_EXPR));
7878 if (operand_equal_p (a00, a10, 0))
7879 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7880 fold_build2_loc (loc, code, type, a01, a11));
7881 else if (commutative && operand_equal_p (a00, a11, 0))
7882 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7883 fold_build2_loc (loc, code, type, a01, a10));
7884 else if (commutative && operand_equal_p (a01, a10, 0))
7885 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7886 fold_build2_loc (loc, code, type, a00, a11));
7888 /* This case if tricky because we must either have commutative
7889 operators or else A10 must not have side-effects. */
7891 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7892 && operand_equal_p (a01, a11, 0))
7893 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7894 fold_build2_loc (loc, code, type, a00, a10),
7898 /* See if we can build a range comparison. */
7899 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7902 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7903 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7905 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7907 return fold_build2_loc (loc, code, type, tem, arg1);
7910 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7911 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7913 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7915 return fold_build2_loc (loc, code, type, arg0, tem);
7918 /* Check for the possibility of merging component references. If our
7919 lhs is another similar operation, try to merge its rhs with our
7920 rhs. Then try to merge our lhs and rhs. */
7921 if (TREE_CODE (arg0) == code
7922 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7923 TREE_OPERAND (arg0, 1), arg1)))
7924 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7926 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7929 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7930 && (code == TRUTH_AND_EXPR
7931 || code == TRUTH_ANDIF_EXPR
7932 || code == TRUTH_OR_EXPR
7933 || code == TRUTH_ORIF_EXPR))
7935 enum tree_code ncode, icode;
7937 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
7938 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
7939 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
7941 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7942 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7943 We don't want to pack more than two leafs to a non-IF AND/OR
7945 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7946 equal to IF-CODE, then we don't want to add right-hand operand.
7947 If the inner right-hand side of left-hand operand has
7948 side-effects, or isn't simple, then we can't add to it,
7949 as otherwise we might destroy if-sequence. */
7950 if (TREE_CODE (arg0) == icode
7951 && simple_operand_p_2 (arg1)
7952 /* Needed for sequence points to handle trappings, and
7954 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
7956 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
7958 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
7961 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
7962 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
7963 else if (TREE_CODE (arg1) == icode
7964 && simple_operand_p_2 (arg0)
7965 /* Needed for sequence points to handle trappings, and
7967 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
7969 tem = fold_build2_loc (loc, ncode, type,
7970 arg0, TREE_OPERAND (arg1, 0));
7971 return fold_build2_loc (loc, icode, type, tem,
7972 TREE_OPERAND (arg1, 1));
7974 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
7976 For sequence point consistancy, we need to check for trapping,
7977 and side-effects. */
7978 else if (code == icode && simple_operand_p_2 (arg0)
7979 && simple_operand_p_2 (arg1))
7980 return fold_build2_loc (loc, ncode, type, arg0, arg1);
7986 /* Fold a binary expression of code CODE and type TYPE with operands
7987 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7988 Return the folded expression if folding is successful. Otherwise,
7989 return NULL_TREE. */
7992 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
7994 enum tree_code compl_code;
7996 if (code == MIN_EXPR)
7997 compl_code = MAX_EXPR;
7998 else if (code == MAX_EXPR)
7999 compl_code = MIN_EXPR;
8003 /* MIN (MAX (a, b), b) == b. */
8004 if (TREE_CODE (op0) == compl_code
8005 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8006 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8008 /* MIN (MAX (b, a), b) == b. */
8009 if (TREE_CODE (op0) == compl_code
8010 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8011 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8012 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8014 /* MIN (a, MAX (a, b)) == a. */
8015 if (TREE_CODE (op1) == compl_code
8016 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8017 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8018 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8020 /* MIN (a, MAX (b, a)) == a. */
8021 if (TREE_CODE (op1) == compl_code
8022 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8023 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8024 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8029 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8030 by changing CODE to reduce the magnitude of constants involved in
8031 ARG0 of the comparison.
8032 Returns a canonicalized comparison tree if a simplification was
8033 possible, otherwise returns NULL_TREE.
8034 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8035 valid if signed overflow is undefined. */
8038 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8039 tree arg0, tree arg1,
8040 bool *strict_overflow_p)
8042 enum tree_code code0 = TREE_CODE (arg0);
8043 tree t, cst0 = NULL_TREE;
8046 /* Match A +- CST code arg1. We can change this only if overflow
8048 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8049 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8050 /* In principle pointers also have undefined overflow behavior,
8051 but that causes problems elsewhere. */
8052 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8053 && (code0 == MINUS_EXPR
8054 || code0 == PLUS_EXPR)
8055 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8058 /* Identify the constant in arg0 and its sign. */
8059 cst0 = TREE_OPERAND (arg0, 1);
8060 sgn0 = tree_int_cst_sgn (cst0);
8062 /* Overflowed constants and zero will cause problems. */
8063 if (integer_zerop (cst0)
8064 || TREE_OVERFLOW (cst0))
8067 /* See if we can reduce the magnitude of the constant in
8068 arg0 by changing the comparison code. */
8069 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8071 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8073 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8074 else if (code == GT_EXPR
8075 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8077 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8078 else if (code == LE_EXPR
8079 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8081 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8082 else if (code == GE_EXPR
8083 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8087 *strict_overflow_p = true;
8089 /* Now build the constant reduced in magnitude. But not if that
8090 would produce one outside of its types range. */
8091 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8093 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8094 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8096 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8097 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8100 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8101 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8102 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8103 t = fold_convert (TREE_TYPE (arg1), t);
8105 return fold_build2_loc (loc, code, type, t, arg1);
8108 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8109 overflow further. Try to decrease the magnitude of constants involved
8110 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8111 and put sole constants at the second argument position.
8112 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8115 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8116 tree arg0, tree arg1)
8119 bool strict_overflow_p;
8120 const char * const warnmsg = G_("assuming signed overflow does not occur "
8121 "when reducing constant in comparison");
8123 /* Try canonicalization by simplifying arg0. */
8124 strict_overflow_p = false;
8125 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8126 &strict_overflow_p);
8129 if (strict_overflow_p)
8130 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8134 /* Try canonicalization by simplifying arg1 using the swapped
8136 code = swap_tree_comparison (code);
8137 strict_overflow_p = false;
8138 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8139 &strict_overflow_p);
8140 if (t && strict_overflow_p)
8141 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8145 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8146 space. This is used to avoid issuing overflow warnings for
8147 expressions like &p->x which can not wrap. */
8150 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8152 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8159 int precision = TYPE_PRECISION (TREE_TYPE (base));
8160 if (offset == NULL_TREE)
8161 wi_offset = wi::zero (precision);
8162 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8168 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8169 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8173 if (!wi::fits_uhwi_p (total))
8176 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8180 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8182 if (TREE_CODE (base) == ADDR_EXPR)
8184 HOST_WIDE_INT base_size;
8186 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8187 if (base_size > 0 && size < base_size)
8191 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8194 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8195 kind INTEGER_CST. This makes sure to properly sign-extend the
8198 static HOST_WIDE_INT
8199 size_low_cst (const_tree t)
8201 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8202 int prec = TYPE_PRECISION (TREE_TYPE (t));
8203 if (prec < HOST_BITS_PER_WIDE_INT)
8204 return sext_hwi (w, prec);
8208 /* Subroutine of fold_binary. This routine performs all of the
8209 transformations that are common to the equality/inequality
8210 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8211 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8212 fold_binary should call fold_binary. Fold a comparison with
8213 tree code CODE and type TYPE with operands OP0 and OP1. Return
8214 the folded comparison or NULL_TREE. */
8217 fold_comparison (location_t loc, enum tree_code code, tree type,
8220 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8221 tree arg0, arg1, tem;
8226 STRIP_SIGN_NOPS (arg0);
8227 STRIP_SIGN_NOPS (arg1);
8229 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8230 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8232 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8233 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8235 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8236 && TREE_CODE (arg1) == INTEGER_CST
8237 && !TREE_OVERFLOW (arg1))
8239 const enum tree_code
8240 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8241 tree const1 = TREE_OPERAND (arg0, 1);
8242 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8243 tree variable = TREE_OPERAND (arg0, 0);
8244 tree new_const = int_const_binop (reverse_op, const2, const1);
8246 /* If the constant operation overflowed this can be
8247 simplified as a comparison against INT_MAX/INT_MIN. */
8248 if (TREE_OVERFLOW (new_const)
8249 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8251 int const1_sgn = tree_int_cst_sgn (const1);
8252 enum tree_code code2 = code;
8254 /* Get the sign of the constant on the lhs if the
8255 operation were VARIABLE + CONST1. */
8256 if (TREE_CODE (arg0) == MINUS_EXPR)
8257 const1_sgn = -const1_sgn;
8259 /* The sign of the constant determines if we overflowed
8260 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8261 Canonicalize to the INT_MIN overflow by swapping the comparison
8263 if (const1_sgn == -1)
8264 code2 = swap_tree_comparison (code);
8266 /* We now can look at the canonicalized case
8267 VARIABLE + 1 CODE2 INT_MIN
8268 and decide on the result. */
8275 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8281 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8290 fold_overflow_warning ("assuming signed overflow does not occur "
8291 "when changing X +- C1 cmp C2 to "
8293 WARN_STRICT_OVERFLOW_COMPARISON);
8294 return fold_build2_loc (loc, code, type, variable, new_const);
8298 /* For comparisons of pointers we can decompose it to a compile time
8299 comparison of the base objects and the offsets into the object.
8300 This requires at least one operand being an ADDR_EXPR or a
8301 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8302 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8303 && (TREE_CODE (arg0) == ADDR_EXPR
8304 || TREE_CODE (arg1) == ADDR_EXPR
8305 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8306 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8308 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8309 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8311 int volatilep, unsignedp;
8312 bool indirect_base0 = false, indirect_base1 = false;
8314 /* Get base and offset for the access. Strip ADDR_EXPR for
8315 get_inner_reference, but put it back by stripping INDIRECT_REF
8316 off the base object if possible. indirect_baseN will be true
8317 if baseN is not an address but refers to the object itself. */
8319 if (TREE_CODE (arg0) == ADDR_EXPR)
8321 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8322 &bitsize, &bitpos0, &offset0, &mode,
8323 &unsignedp, &volatilep, false);
8324 if (TREE_CODE (base0) == INDIRECT_REF)
8325 base0 = TREE_OPERAND (base0, 0);
8327 indirect_base0 = true;
8329 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8331 base0 = TREE_OPERAND (arg0, 0);
8332 STRIP_SIGN_NOPS (base0);
8333 if (TREE_CODE (base0) == ADDR_EXPR)
8335 base0 = TREE_OPERAND (base0, 0);
8336 indirect_base0 = true;
8338 offset0 = TREE_OPERAND (arg0, 1);
8339 if (tree_fits_shwi_p (offset0))
8341 HOST_WIDE_INT off = size_low_cst (offset0);
8342 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8344 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8346 bitpos0 = off * BITS_PER_UNIT;
8347 offset0 = NULL_TREE;
8353 if (TREE_CODE (arg1) == ADDR_EXPR)
8355 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8356 &bitsize, &bitpos1, &offset1, &mode,
8357 &unsignedp, &volatilep, false);
8358 if (TREE_CODE (base1) == INDIRECT_REF)
8359 base1 = TREE_OPERAND (base1, 0);
8361 indirect_base1 = true;
8363 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8365 base1 = TREE_OPERAND (arg1, 0);
8366 STRIP_SIGN_NOPS (base1);
8367 if (TREE_CODE (base1) == ADDR_EXPR)
8369 base1 = TREE_OPERAND (base1, 0);
8370 indirect_base1 = true;
8372 offset1 = TREE_OPERAND (arg1, 1);
8373 if (tree_fits_shwi_p (offset1))
8375 HOST_WIDE_INT off = size_low_cst (offset1);
8376 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8378 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8380 bitpos1 = off * BITS_PER_UNIT;
8381 offset1 = NULL_TREE;
8386 /* If we have equivalent bases we might be able to simplify. */
8387 if (indirect_base0 == indirect_base1
8388 && operand_equal_p (base0, base1, 0))
8390 /* We can fold this expression to a constant if the non-constant
8391 offset parts are equal. */
8392 if ((offset0 == offset1
8393 || (offset0 && offset1
8394 && operand_equal_p (offset0, offset1, 0)))
8397 || (indirect_base0 && DECL_P (base0))
8398 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8402 && bitpos0 != bitpos1
8403 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8404 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8405 fold_overflow_warning (("assuming pointer wraparound does not "
8406 "occur when comparing P +- C1 with "
8408 WARN_STRICT_OVERFLOW_CONDITIONAL);
8413 return constant_boolean_node (bitpos0 == bitpos1, type);
8415 return constant_boolean_node (bitpos0 != bitpos1, type);
8417 return constant_boolean_node (bitpos0 < bitpos1, type);
8419 return constant_boolean_node (bitpos0 <= bitpos1, type);
8421 return constant_boolean_node (bitpos0 >= bitpos1, type);
8423 return constant_boolean_node (bitpos0 > bitpos1, type);
8427 /* We can simplify the comparison to a comparison of the variable
8428 offset parts if the constant offset parts are equal.
8429 Be careful to use signed sizetype here because otherwise we
8430 mess with array offsets in the wrong way. This is possible
8431 because pointer arithmetic is restricted to retain within an
8432 object and overflow on pointer differences is undefined as of
8433 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8434 else if (bitpos0 == bitpos1
8436 || (indirect_base0 && DECL_P (base0))
8437 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8439 /* By converting to signed sizetype we cover middle-end pointer
8440 arithmetic which operates on unsigned pointer types of size
8441 type size and ARRAY_REF offsets which are properly sign or
8442 zero extended from their type in case it is narrower than
8444 if (offset0 == NULL_TREE)
8445 offset0 = build_int_cst (ssizetype, 0);
8447 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8448 if (offset1 == NULL_TREE)
8449 offset1 = build_int_cst (ssizetype, 0);
8451 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8454 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8455 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8456 fold_overflow_warning (("assuming pointer wraparound does not "
8457 "occur when comparing P +- C1 with "
8459 WARN_STRICT_OVERFLOW_COMPARISON);
8461 return fold_build2_loc (loc, code, type, offset0, offset1);
8464 /* For equal offsets we can simplify to a comparison of the
8466 else if (bitpos0 == bitpos1
8468 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8470 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8471 && ((offset0 == offset1)
8472 || (offset0 && offset1
8473 && operand_equal_p (offset0, offset1, 0))))
8476 base0 = build_fold_addr_expr_loc (loc, base0);
8478 base1 = build_fold_addr_expr_loc (loc, base1);
8479 return fold_build2_loc (loc, code, type, base0, base1);
8483 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8484 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8485 the resulting offset is smaller in absolute value than the
8486 original one and has the same sign. */
8487 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8488 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8489 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8490 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8491 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8492 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8493 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8494 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8496 tree const1 = TREE_OPERAND (arg0, 1);
8497 tree const2 = TREE_OPERAND (arg1, 1);
8498 tree variable1 = TREE_OPERAND (arg0, 0);
8499 tree variable2 = TREE_OPERAND (arg1, 0);
8501 const char * const warnmsg = G_("assuming signed overflow does not "
8502 "occur when combining constants around "
8505 /* Put the constant on the side where it doesn't overflow and is
8506 of lower absolute value and of same sign than before. */
8507 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8508 ? MINUS_EXPR : PLUS_EXPR,
8510 if (!TREE_OVERFLOW (cst)
8511 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8512 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8514 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8515 return fold_build2_loc (loc, code, type,
8517 fold_build2_loc (loc, TREE_CODE (arg1),
8522 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8523 ? MINUS_EXPR : PLUS_EXPR,
8525 if (!TREE_OVERFLOW (cst)
8526 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8527 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8529 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8530 return fold_build2_loc (loc, code, type,
8531 fold_build2_loc (loc, TREE_CODE (arg0),
8538 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8542 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8543 constant, we can simplify it. */
8544 if (TREE_CODE (arg1) == INTEGER_CST
8545 && (TREE_CODE (arg0) == MIN_EXPR
8546 || TREE_CODE (arg0) == MAX_EXPR)
8547 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8549 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8554 /* If we are comparing an expression that just has comparisons
8555 of two integer values, arithmetic expressions of those comparisons,
8556 and constants, we can simplify it. There are only three cases
8557 to check: the two values can either be equal, the first can be
8558 greater, or the second can be greater. Fold the expression for
8559 those three values. Since each value must be 0 or 1, we have
8560 eight possibilities, each of which corresponds to the constant 0
8561 or 1 or one of the six possible comparisons.
8563 This handles common cases like (a > b) == 0 but also handles
8564 expressions like ((x > y) - (y > x)) > 0, which supposedly
8565 occur in macroized code. */
8567 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8569 tree cval1 = 0, cval2 = 0;
8572 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8573 /* Don't handle degenerate cases here; they should already
8574 have been handled anyway. */
8575 && cval1 != 0 && cval2 != 0
8576 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8577 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8578 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8579 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8580 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8581 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8582 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8584 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8585 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8587 /* We can't just pass T to eval_subst in case cval1 or cval2
8588 was the same as ARG1. */
8591 = fold_build2_loc (loc, code, type,
8592 eval_subst (loc, arg0, cval1, maxval,
8596 = fold_build2_loc (loc, code, type,
8597 eval_subst (loc, arg0, cval1, maxval,
8601 = fold_build2_loc (loc, code, type,
8602 eval_subst (loc, arg0, cval1, minval,
8606 /* All three of these results should be 0 or 1. Confirm they are.
8607 Then use those values to select the proper code to use. */
8609 if (TREE_CODE (high_result) == INTEGER_CST
8610 && TREE_CODE (equal_result) == INTEGER_CST
8611 && TREE_CODE (low_result) == INTEGER_CST)
8613 /* Make a 3-bit mask with the high-order bit being the
8614 value for `>', the next for '=', and the low for '<'. */
8615 switch ((integer_onep (high_result) * 4)
8616 + (integer_onep (equal_result) * 2)
8617 + integer_onep (low_result))
8621 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8642 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8647 tem = save_expr (build2 (code, type, cval1, cval2));
8648 SET_EXPR_LOCATION (tem, loc);
8651 return fold_build2_loc (loc, code, type, cval1, cval2);
8656 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8657 into a single range test. */
8658 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8659 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8660 && TREE_CODE (arg1) == INTEGER_CST
8661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8662 && !integer_zerop (TREE_OPERAND (arg0, 1))
8663 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8664 && !TREE_OVERFLOW (arg1))
8666 tem = fold_div_compare (loc, code, type, arg0, arg1);
8667 if (tem != NULL_TREE)
8675 /* Subroutine of fold_binary. Optimize complex multiplications of the
8676 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8677 argument EXPR represents the expression "z" of type TYPE. */
8680 fold_mult_zconjz (location_t loc, tree type, tree expr)
8682 tree itype = TREE_TYPE (type);
8683 tree rpart, ipart, tem;
8685 if (TREE_CODE (expr) == COMPLEX_EXPR)
8687 rpart = TREE_OPERAND (expr, 0);
8688 ipart = TREE_OPERAND (expr, 1);
8690 else if (TREE_CODE (expr) == COMPLEX_CST)
8692 rpart = TREE_REALPART (expr);
8693 ipart = TREE_IMAGPART (expr);
8697 expr = save_expr (expr);
8698 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8699 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8702 rpart = save_expr (rpart);
8703 ipart = save_expr (ipart);
8704 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8705 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8706 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8707 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8708 build_zero_cst (itype));
8712 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8713 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8716 vec_cst_ctor_to_array (tree arg, tree *elts)
8718 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8720 if (TREE_CODE (arg) == VECTOR_CST)
8722 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8723 elts[i] = VECTOR_CST_ELT (arg, i);
8725 else if (TREE_CODE (arg) == CONSTRUCTOR)
8727 constructor_elt *elt;
8729 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8730 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8733 elts[i] = elt->value;
8737 for (; i < nelts; i++)
8739 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8743 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8744 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8745 NULL_TREE otherwise. */
8748 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8750 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8752 bool need_ctor = false;
8754 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8755 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8756 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8757 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8760 elts = XALLOCAVEC (tree, nelts * 3);
8761 if (!vec_cst_ctor_to_array (arg0, elts)
8762 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8765 for (i = 0; i < nelts; i++)
8767 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8769 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8774 vec<constructor_elt, va_gc> *v;
8775 vec_alloc (v, nelts);
8776 for (i = 0; i < nelts; i++)
8777 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8778 return build_constructor (type, v);
8781 return build_vector (type, &elts[2 * nelts]);
8784 /* Try to fold a pointer difference of type TYPE two address expressions of
8785 array references AREF0 and AREF1 using location LOC. Return a
8786 simplified expression for the difference or NULL_TREE. */
8789 fold_addr_of_array_ref_difference (location_t loc, tree type,
8790 tree aref0, tree aref1)
8792 tree base0 = TREE_OPERAND (aref0, 0);
8793 tree base1 = TREE_OPERAND (aref1, 0);
8794 tree base_offset = build_int_cst (type, 0);
8796 /* If the bases are array references as well, recurse. If the bases
8797 are pointer indirections compute the difference of the pointers.
8798 If the bases are equal, we are set. */
8799 if ((TREE_CODE (base0) == ARRAY_REF
8800 && TREE_CODE (base1) == ARRAY_REF
8802 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8803 || (INDIRECT_REF_P (base0)
8804 && INDIRECT_REF_P (base1)
8805 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
8806 TREE_OPERAND (base0, 0),
8807 TREE_OPERAND (base1, 0))))
8808 || operand_equal_p (base0, base1, 0))
8810 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8811 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8812 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8813 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8814 return fold_build2_loc (loc, PLUS_EXPR, type,
8816 fold_build2_loc (loc, MULT_EXPR, type,
8822 /* If the real or vector real constant CST of type TYPE has an exact
8823 inverse, return it, else return NULL. */
8826 exact_inverse (tree type, tree cst)
8829 tree unit_type, *elts;
8831 unsigned vec_nelts, i;
8833 switch (TREE_CODE (cst))
8836 r = TREE_REAL_CST (cst);
8838 if (exact_real_inverse (TYPE_MODE (type), &r))
8839 return build_real (type, r);
8844 vec_nelts = VECTOR_CST_NELTS (cst);
8845 elts = XALLOCAVEC (tree, vec_nelts);
8846 unit_type = TREE_TYPE (type);
8847 mode = TYPE_MODE (unit_type);
8849 for (i = 0; i < vec_nelts; i++)
8851 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8852 if (!exact_real_inverse (mode, &r))
8854 elts[i] = build_real (unit_type, r);
8857 return build_vector (type, elts);
8864 /* Mask out the tz least significant bits of X of type TYPE where
8865 tz is the number of trailing zeroes in Y. */
8867 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8869 int tz = wi::ctz (y);
8871 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8875 /* Return true when T is an address and is known to be nonzero.
8876 For floating point we further ensure that T is not denormal.
8877 Similar logic is present in nonzero_address in rtlanal.h.
8879 If the return value is based on the assumption that signed overflow
8880 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8881 change *STRICT_OVERFLOW_P. */
8884 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8886 tree type = TREE_TYPE (t);
8887 enum tree_code code;
8889 /* Doing something useful for floating point would need more work. */
8890 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8893 code = TREE_CODE (t);
8894 switch (TREE_CODE_CLASS (code))
8897 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8900 case tcc_comparison:
8901 return tree_binary_nonzero_warnv_p (code, type,
8902 TREE_OPERAND (t, 0),
8903 TREE_OPERAND (t, 1),
8906 case tcc_declaration:
8908 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8916 case TRUTH_NOT_EXPR:
8917 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8920 case TRUTH_AND_EXPR:
8922 case TRUTH_XOR_EXPR:
8923 return tree_binary_nonzero_warnv_p (code, type,
8924 TREE_OPERAND (t, 0),
8925 TREE_OPERAND (t, 1),
8933 case WITH_SIZE_EXPR:
8935 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8940 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8944 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8949 tree fndecl = get_callee_fndecl (t);
8950 if (!fndecl) return false;
8951 if (flag_delete_null_pointer_checks && !flag_check_new
8952 && DECL_IS_OPERATOR_NEW (fndecl)
8953 && !TREE_NOTHROW (fndecl))
8955 if (flag_delete_null_pointer_checks
8956 && lookup_attribute ("returns_nonnull",
8957 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8959 return alloca_call_p (t);
8968 /* Return true when T is an address and is known to be nonzero.
8969 Handle warnings about undefined signed overflow. */
8972 tree_expr_nonzero_p (tree t)
8974 bool ret, strict_overflow_p;
8976 strict_overflow_p = false;
8977 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8978 if (strict_overflow_p)
8979 fold_overflow_warning (("assuming signed overflow does not occur when "
8980 "determining that expression is always "
8982 WARN_STRICT_OVERFLOW_MISC);
8986 /* Fold a binary expression of code CODE and type TYPE with operands
8987 OP0 and OP1. LOC is the location of the resulting expression.
8988 Return the folded expression if folding is successful. Otherwise,
8989 return NULL_TREE. */
8992 fold_binary_loc (location_t loc,
8993 enum tree_code code, tree type, tree op0, tree op1)
8995 enum tree_code_class kind = TREE_CODE_CLASS (code);
8996 tree arg0, arg1, tem;
8997 tree t1 = NULL_TREE;
8998 bool strict_overflow_p;
9001 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9002 && TREE_CODE_LENGTH (code) == 2
9004 && op1 != NULL_TREE);
9009 /* Strip any conversions that don't change the mode. This is
9010 safe for every expression, except for a comparison expression
9011 because its signedness is derived from its operands. So, in
9012 the latter case, only strip conversions that don't change the
9013 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9016 Note that this is done as an internal manipulation within the
9017 constant folder, in order to find the simplest representation
9018 of the arguments so that their form can be studied. In any
9019 cases, the appropriate type conversions should be put back in
9020 the tree that will get out of the constant folder. */
9022 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9024 STRIP_SIGN_NOPS (arg0);
9025 STRIP_SIGN_NOPS (arg1);
9033 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9034 constant but we can't do arithmetic on them. */
9035 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9037 tem = const_binop (code, type, arg0, arg1);
9038 if (tem != NULL_TREE)
9040 if (TREE_TYPE (tem) != type)
9041 tem = fold_convert_loc (loc, type, tem);
9046 /* If this is a commutative operation, and ARG0 is a constant, move it
9047 to ARG1 to reduce the number of tests below. */
9048 if (commutative_tree_code (code)
9049 && tree_swap_operands_p (arg0, arg1, true))
9050 return fold_build2_loc (loc, code, type, op1, op0);
9052 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9053 to ARG1 to reduce the number of tests below. */
9054 if (kind == tcc_comparison
9055 && tree_swap_operands_p (arg0, arg1, true))
9056 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9058 tem = generic_simplify (loc, code, type, op0, op1);
9062 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9064 First check for cases where an arithmetic operation is applied to a
9065 compound, conditional, or comparison operation. Push the arithmetic
9066 operation inside the compound or conditional to see if any folding
9067 can then be done. Convert comparison to conditional for this purpose.
9068 The also optimizes non-constant cases that used to be done in
9071 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9072 one of the operands is a comparison and the other is a comparison, a
9073 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9074 code below would make the expression more complex. Change it to a
9075 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9076 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9078 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9079 || code == EQ_EXPR || code == NE_EXPR)
9080 && TREE_CODE (type) != VECTOR_TYPE
9081 && ((truth_value_p (TREE_CODE (arg0))
9082 && (truth_value_p (TREE_CODE (arg1))
9083 || (TREE_CODE (arg1) == BIT_AND_EXPR
9084 && integer_onep (TREE_OPERAND (arg1, 1)))))
9085 || (truth_value_p (TREE_CODE (arg1))
9086 && (truth_value_p (TREE_CODE (arg0))
9087 || (TREE_CODE (arg0) == BIT_AND_EXPR
9088 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9090 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9091 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9094 fold_convert_loc (loc, boolean_type_node, arg0),
9095 fold_convert_loc (loc, boolean_type_node, arg1));
9097 if (code == EQ_EXPR)
9098 tem = invert_truthvalue_loc (loc, tem);
9100 return fold_convert_loc (loc, type, tem);
9103 if (TREE_CODE_CLASS (code) == tcc_binary
9104 || TREE_CODE_CLASS (code) == tcc_comparison)
9106 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9108 tem = fold_build2_loc (loc, code, type,
9109 fold_convert_loc (loc, TREE_TYPE (op0),
9110 TREE_OPERAND (arg0, 1)), op1);
9111 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9114 if (TREE_CODE (arg1) == COMPOUND_EXPR
9115 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9117 tem = fold_build2_loc (loc, code, type, op0,
9118 fold_convert_loc (loc, TREE_TYPE (op1),
9119 TREE_OPERAND (arg1, 1)));
9120 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9124 if (TREE_CODE (arg0) == COND_EXPR
9125 || TREE_CODE (arg0) == VEC_COND_EXPR
9126 || COMPARISON_CLASS_P (arg0))
9128 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9130 /*cond_first_p=*/1);
9131 if (tem != NULL_TREE)
9135 if (TREE_CODE (arg1) == COND_EXPR
9136 || TREE_CODE (arg1) == VEC_COND_EXPR
9137 || COMPARISON_CLASS_P (arg1))
9139 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9141 /*cond_first_p=*/0);
9142 if (tem != NULL_TREE)
9150 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9151 if (TREE_CODE (arg0) == ADDR_EXPR
9152 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9154 tree iref = TREE_OPERAND (arg0, 0);
9155 return fold_build2 (MEM_REF, type,
9156 TREE_OPERAND (iref, 0),
9157 int_const_binop (PLUS_EXPR, arg1,
9158 TREE_OPERAND (iref, 1)));
9161 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9162 if (TREE_CODE (arg0) == ADDR_EXPR
9163 && handled_component_p (TREE_OPERAND (arg0, 0)))
9166 HOST_WIDE_INT coffset;
9167 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9171 return fold_build2 (MEM_REF, type,
9172 build_fold_addr_expr (base),
9173 int_const_binop (PLUS_EXPR, arg1,
9174 size_int (coffset)));
9179 case POINTER_PLUS_EXPR:
9180 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9181 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9182 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9183 return fold_convert_loc (loc, type,
9184 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9185 fold_convert_loc (loc, sizetype,
9187 fold_convert_loc (loc, sizetype,
9193 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9195 /* X + (X / CST) * -CST is X % CST. */
9196 if (TREE_CODE (arg1) == MULT_EXPR
9197 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9198 && operand_equal_p (arg0,
9199 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9201 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9202 tree cst1 = TREE_OPERAND (arg1, 1);
9203 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9205 if (sum && integer_zerop (sum))
9206 return fold_convert_loc (loc, type,
9207 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9208 TREE_TYPE (arg0), arg0,
9213 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9214 one. Make sure the type is not saturating and has the signedness of
9215 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9216 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9217 if ((TREE_CODE (arg0) == MULT_EXPR
9218 || TREE_CODE (arg1) == MULT_EXPR)
9219 && !TYPE_SATURATING (type)
9220 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9221 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9222 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9224 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9229 if (! FLOAT_TYPE_P (type))
9231 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9232 (plus (plus (mult) (mult)) (foo)) so that we can
9233 take advantage of the factoring cases below. */
9234 if (ANY_INTEGRAL_TYPE_P (type)
9235 && TYPE_OVERFLOW_WRAPS (type)
9236 && (((TREE_CODE (arg0) == PLUS_EXPR
9237 || TREE_CODE (arg0) == MINUS_EXPR)
9238 && TREE_CODE (arg1) == MULT_EXPR)
9239 || ((TREE_CODE (arg1) == PLUS_EXPR
9240 || TREE_CODE (arg1) == MINUS_EXPR)
9241 && TREE_CODE (arg0) == MULT_EXPR)))
9243 tree parg0, parg1, parg, marg;
9244 enum tree_code pcode;
9246 if (TREE_CODE (arg1) == MULT_EXPR)
9247 parg = arg0, marg = arg1;
9249 parg = arg1, marg = arg0;
9250 pcode = TREE_CODE (parg);
9251 parg0 = TREE_OPERAND (parg, 0);
9252 parg1 = TREE_OPERAND (parg, 1);
9256 if (TREE_CODE (parg0) == MULT_EXPR
9257 && TREE_CODE (parg1) != MULT_EXPR)
9258 return fold_build2_loc (loc, pcode, type,
9259 fold_build2_loc (loc, PLUS_EXPR, type,
9260 fold_convert_loc (loc, type,
9262 fold_convert_loc (loc, type,
9264 fold_convert_loc (loc, type, parg1));
9265 if (TREE_CODE (parg0) != MULT_EXPR
9266 && TREE_CODE (parg1) == MULT_EXPR)
9268 fold_build2_loc (loc, PLUS_EXPR, type,
9269 fold_convert_loc (loc, type, parg0),
9270 fold_build2_loc (loc, pcode, type,
9271 fold_convert_loc (loc, type, marg),
9272 fold_convert_loc (loc, type,
9278 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9279 to __complex__ ( x, y ). This is not the same for SNaNs or
9280 if signed zeros are involved. */
9281 if (!HONOR_SNANS (element_mode (arg0))
9282 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9283 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9285 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9286 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9287 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9288 bool arg0rz = false, arg0iz = false;
9289 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9290 || (arg0i && (arg0iz = real_zerop (arg0i))))
9292 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9293 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9294 if (arg0rz && arg1i && real_zerop (arg1i))
9296 tree rp = arg1r ? arg1r
9297 : build1 (REALPART_EXPR, rtype, arg1);
9298 tree ip = arg0i ? arg0i
9299 : build1 (IMAGPART_EXPR, rtype, arg0);
9300 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9302 else if (arg0iz && arg1r && real_zerop (arg1r))
9304 tree rp = arg0r ? arg0r
9305 : build1 (REALPART_EXPR, rtype, arg0);
9306 tree ip = arg1i ? arg1i
9307 : build1 (IMAGPART_EXPR, rtype, arg1);
9308 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9313 if (flag_unsafe_math_optimizations
9314 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9315 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9316 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9319 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9320 We associate floats only if the user has specified
9321 -fassociative-math. */
9322 if (flag_associative_math
9323 && TREE_CODE (arg1) == PLUS_EXPR
9324 && TREE_CODE (arg0) != MULT_EXPR)
9326 tree tree10 = TREE_OPERAND (arg1, 0);
9327 tree tree11 = TREE_OPERAND (arg1, 1);
9328 if (TREE_CODE (tree11) == MULT_EXPR
9329 && TREE_CODE (tree10) == MULT_EXPR)
9332 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9333 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9336 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9337 We associate floats only if the user has specified
9338 -fassociative-math. */
9339 if (flag_associative_math
9340 && TREE_CODE (arg0) == PLUS_EXPR
9341 && TREE_CODE (arg1) != MULT_EXPR)
9343 tree tree00 = TREE_OPERAND (arg0, 0);
9344 tree tree01 = TREE_OPERAND (arg0, 1);
9345 if (TREE_CODE (tree01) == MULT_EXPR
9346 && TREE_CODE (tree00) == MULT_EXPR)
9349 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9350 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9356 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9357 is a rotate of A by C1 bits. */
9358 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9359 is a rotate of A by B bits. */
9361 enum tree_code code0, code1;
9363 code0 = TREE_CODE (arg0);
9364 code1 = TREE_CODE (arg1);
9365 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9366 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9367 && operand_equal_p (TREE_OPERAND (arg0, 0),
9368 TREE_OPERAND (arg1, 0), 0)
9369 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9370 TYPE_UNSIGNED (rtype))
9371 /* Only create rotates in complete modes. Other cases are not
9372 expanded properly. */
9373 && (element_precision (rtype)
9374 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9376 tree tree01, tree11;
9377 enum tree_code code01, code11;
9379 tree01 = TREE_OPERAND (arg0, 1);
9380 tree11 = TREE_OPERAND (arg1, 1);
9381 STRIP_NOPS (tree01);
9382 STRIP_NOPS (tree11);
9383 code01 = TREE_CODE (tree01);
9384 code11 = TREE_CODE (tree11);
9385 if (code01 == INTEGER_CST
9386 && code11 == INTEGER_CST
9387 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9388 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9390 tem = build2_loc (loc, LROTATE_EXPR,
9391 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9392 TREE_OPERAND (arg0, 0),
9393 code0 == LSHIFT_EXPR
9394 ? TREE_OPERAND (arg0, 1)
9395 : TREE_OPERAND (arg1, 1));
9396 return fold_convert_loc (loc, type, tem);
9398 else if (code11 == MINUS_EXPR)
9400 tree tree110, tree111;
9401 tree110 = TREE_OPERAND (tree11, 0);
9402 tree111 = TREE_OPERAND (tree11, 1);
9403 STRIP_NOPS (tree110);
9404 STRIP_NOPS (tree111);
9405 if (TREE_CODE (tree110) == INTEGER_CST
9406 && 0 == compare_tree_int (tree110,
9408 (TREE_TYPE (TREE_OPERAND
9410 && operand_equal_p (tree01, tree111, 0))
9412 fold_convert_loc (loc, type,
9413 build2 ((code0 == LSHIFT_EXPR
9416 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9417 TREE_OPERAND (arg0, 0),
9418 TREE_OPERAND (arg0, 1)));
9420 else if (code01 == MINUS_EXPR)
9422 tree tree010, tree011;
9423 tree010 = TREE_OPERAND (tree01, 0);
9424 tree011 = TREE_OPERAND (tree01, 1);
9425 STRIP_NOPS (tree010);
9426 STRIP_NOPS (tree011);
9427 if (TREE_CODE (tree010) == INTEGER_CST
9428 && 0 == compare_tree_int (tree010,
9430 (TREE_TYPE (TREE_OPERAND
9432 && operand_equal_p (tree11, tree011, 0))
9433 return fold_convert_loc
9435 build2 ((code0 != LSHIFT_EXPR
9438 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9439 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9445 /* In most languages, can't associate operations on floats through
9446 parentheses. Rather than remember where the parentheses were, we
9447 don't associate floats at all, unless the user has specified
9449 And, we need to make sure type is not saturating. */
9451 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9452 && !TYPE_SATURATING (type))
9454 tree var0, con0, lit0, minus_lit0;
9455 tree var1, con1, lit1, minus_lit1;
9459 /* Split both trees into variables, constants, and literals. Then
9460 associate each group together, the constants with literals,
9461 then the result with variables. This increases the chances of
9462 literals being recombined later and of generating relocatable
9463 expressions for the sum of a constant and literal. */
9464 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9465 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9466 code == MINUS_EXPR);
9468 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9469 if (code == MINUS_EXPR)
9472 /* With undefined overflow prefer doing association in a type
9473 which wraps on overflow, if that is one of the operand types. */
9474 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9475 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9477 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9478 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9479 atype = TREE_TYPE (arg0);
9480 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9481 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9482 atype = TREE_TYPE (arg1);
9483 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9486 /* With undefined overflow we can only associate constants with one
9487 variable, and constants whose association doesn't overflow. */
9488 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9489 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9495 bool one_neg = false;
9497 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9499 tmp0 = TREE_OPERAND (tmp0, 0);
9502 if (CONVERT_EXPR_P (tmp0)
9503 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9504 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9505 <= TYPE_PRECISION (atype)))
9506 tmp0 = TREE_OPERAND (tmp0, 0);
9507 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9509 tmp1 = TREE_OPERAND (tmp1, 0);
9512 if (CONVERT_EXPR_P (tmp1)
9513 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9514 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9515 <= TYPE_PRECISION (atype)))
9516 tmp1 = TREE_OPERAND (tmp1, 0);
9517 /* The only case we can still associate with two variables
9518 is if they cancel out. */
9520 || !operand_equal_p (tmp0, tmp1, 0))
9525 /* Only do something if we found more than two objects. Otherwise,
9526 nothing has changed and we risk infinite recursion. */
9528 && (2 < ((var0 != 0) + (var1 != 0)
9529 + (con0 != 0) + (con1 != 0)
9530 + (lit0 != 0) + (lit1 != 0)
9531 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9533 bool any_overflows = false;
9534 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9535 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9536 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9537 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9538 var0 = associate_trees (loc, var0, var1, code, atype);
9539 con0 = associate_trees (loc, con0, con1, code, atype);
9540 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9541 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9544 /* Preserve the MINUS_EXPR if the negative part of the literal is
9545 greater than the positive part. Otherwise, the multiplicative
9546 folding code (i.e extract_muldiv) may be fooled in case
9547 unsigned constants are subtracted, like in the following
9548 example: ((X*2 + 4) - 8U)/2. */
9549 if (minus_lit0 && lit0)
9551 if (TREE_CODE (lit0) == INTEGER_CST
9552 && TREE_CODE (minus_lit0) == INTEGER_CST
9553 && tree_int_cst_lt (lit0, minus_lit0))
9555 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9561 lit0 = associate_trees (loc, lit0, minus_lit0,
9567 /* Don't introduce overflows through reassociation. */
9569 && ((lit0 && TREE_OVERFLOW_P (lit0))
9570 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9577 fold_convert_loc (loc, type,
9578 associate_trees (loc, var0, minus_lit0,
9579 MINUS_EXPR, atype));
9582 con0 = associate_trees (loc, con0, minus_lit0,
9585 fold_convert_loc (loc, type,
9586 associate_trees (loc, var0, con0,
9591 con0 = associate_trees (loc, con0, lit0, code, atype);
9593 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9601 /* Pointer simplifications for subtraction, simple reassociations. */
9602 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9604 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9605 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9606 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9608 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9609 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9610 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9611 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9612 return fold_build2_loc (loc, PLUS_EXPR, type,
9613 fold_build2_loc (loc, MINUS_EXPR, type,
9615 fold_build2_loc (loc, MINUS_EXPR, type,
9618 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9619 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9621 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9622 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9623 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9624 fold_convert_loc (loc, type, arg1));
9626 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9628 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9630 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9632 tree arg10 = fold_convert_loc (loc, type,
9633 TREE_OPERAND (arg1, 0));
9634 tree arg11 = fold_convert_loc (loc, type,
9635 TREE_OPERAND (arg1, 1));
9636 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9637 fold_convert_loc (loc, type, arg0),
9640 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9643 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9644 if (TREE_CODE (arg0) == NEGATE_EXPR
9645 && negate_expr_p (arg1)
9646 && reorder_operands_p (arg0, arg1))
9647 return fold_build2_loc (loc, MINUS_EXPR, type,
9648 fold_convert_loc (loc, type,
9649 negate_expr (arg1)),
9650 fold_convert_loc (loc, type,
9651 TREE_OPERAND (arg0, 0)));
9653 if (! FLOAT_TYPE_P (type))
9655 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9656 any power of 2 minus 1. */
9657 if (TREE_CODE (arg0) == BIT_AND_EXPR
9658 && TREE_CODE (arg1) == BIT_AND_EXPR
9659 && operand_equal_p (TREE_OPERAND (arg0, 0),
9660 TREE_OPERAND (arg1, 0), 0))
9662 tree mask0 = TREE_OPERAND (arg0, 1);
9663 tree mask1 = TREE_OPERAND (arg1, 1);
9664 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
9666 if (operand_equal_p (tem, mask1, 0))
9668 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
9669 TREE_OPERAND (arg0, 0), mask1);
9670 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
9675 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9676 __complex__ ( x, -y ). This is not the same for SNaNs or if
9677 signed zeros are involved. */
9678 if (!HONOR_SNANS (element_mode (arg0))
9679 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9680 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9683 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9684 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9685 bool arg0rz = false, arg0iz = false;
9686 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9687 || (arg0i && (arg0iz = real_zerop (arg0i))))
9689 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9690 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9691 if (arg0rz && arg1i && real_zerop (arg1i))
9693 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9695 : build1 (REALPART_EXPR, rtype, arg1));
9696 tree ip = arg0i ? arg0i
9697 : build1 (IMAGPART_EXPR, rtype, arg0);
9698 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9700 else if (arg0iz && arg1r && real_zerop (arg1r))
9702 tree rp = arg0r ? arg0r
9703 : build1 (REALPART_EXPR, rtype, arg0);
9704 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9706 : build1 (IMAGPART_EXPR, rtype, arg1));
9707 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9712 /* A - B -> A + (-B) if B is easily negatable. */
9713 if (negate_expr_p (arg1)
9714 && !TYPE_OVERFLOW_SANITIZED (type)
9715 && ((FLOAT_TYPE_P (type)
9716 /* Avoid this transformation if B is a positive REAL_CST. */
9717 && (TREE_CODE (arg1) != REAL_CST
9718 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9719 || INTEGRAL_TYPE_P (type)))
9720 return fold_build2_loc (loc, PLUS_EXPR, type,
9721 fold_convert_loc (loc, type, arg0),
9722 fold_convert_loc (loc, type,
9723 negate_expr (arg1)));
9725 /* Fold &a[i] - &a[j] to i-j. */
9726 if (TREE_CODE (arg0) == ADDR_EXPR
9727 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9728 && TREE_CODE (arg1) == ADDR_EXPR
9729 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9731 tree tem = fold_addr_of_array_ref_difference (loc, type,
9732 TREE_OPERAND (arg0, 0),
9733 TREE_OPERAND (arg1, 0));
9738 if (FLOAT_TYPE_P (type)
9739 && flag_unsafe_math_optimizations
9740 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9741 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9742 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9745 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9746 one. Make sure the type is not saturating and has the signedness of
9747 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9748 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9749 if ((TREE_CODE (arg0) == MULT_EXPR
9750 || TREE_CODE (arg1) == MULT_EXPR)
9751 && !TYPE_SATURATING (type)
9752 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9753 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9754 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9756 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9764 /* (-A) * (-B) -> A * B */
9765 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9766 return fold_build2_loc (loc, MULT_EXPR, type,
9767 fold_convert_loc (loc, type,
9768 TREE_OPERAND (arg0, 0)),
9769 fold_convert_loc (loc, type,
9770 negate_expr (arg1)));
9771 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9772 return fold_build2_loc (loc, MULT_EXPR, type,
9773 fold_convert_loc (loc, type,
9774 negate_expr (arg0)),
9775 fold_convert_loc (loc, type,
9776 TREE_OPERAND (arg1, 0)));
9778 if (! FLOAT_TYPE_P (type))
9780 /* Transform x * -C into -x * C if x is easily negatable. */
9781 if (TREE_CODE (arg1) == INTEGER_CST
9782 && tree_int_cst_sgn (arg1) == -1
9783 && negate_expr_p (arg0)
9784 && (tem = negate_expr (arg1)) != arg1
9785 && !TREE_OVERFLOW (tem))
9786 return fold_build2_loc (loc, MULT_EXPR, type,
9787 fold_convert_loc (loc, type,
9788 negate_expr (arg0)),
9791 /* (a * (1 << b)) is (a << b) */
9792 if (TREE_CODE (arg1) == LSHIFT_EXPR
9793 && integer_onep (TREE_OPERAND (arg1, 0)))
9794 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
9795 TREE_OPERAND (arg1, 1));
9796 if (TREE_CODE (arg0) == LSHIFT_EXPR
9797 && integer_onep (TREE_OPERAND (arg0, 0)))
9798 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
9799 TREE_OPERAND (arg0, 1));
9801 /* (A + A) * C -> A * 2 * C */
9802 if (TREE_CODE (arg0) == PLUS_EXPR
9803 && TREE_CODE (arg1) == INTEGER_CST
9804 && operand_equal_p (TREE_OPERAND (arg0, 0),
9805 TREE_OPERAND (arg0, 1), 0))
9806 return fold_build2_loc (loc, MULT_EXPR, type,
9807 omit_one_operand_loc (loc, type,
9808 TREE_OPERAND (arg0, 0),
9809 TREE_OPERAND (arg0, 1)),
9810 fold_build2_loc (loc, MULT_EXPR, type,
9811 build_int_cst (type, 2) , arg1));
9813 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9814 sign-changing only. */
9815 if (TREE_CODE (arg1) == INTEGER_CST
9816 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9817 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9818 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9820 strict_overflow_p = false;
9821 if (TREE_CODE (arg1) == INTEGER_CST
9822 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9823 &strict_overflow_p)))
9825 if (strict_overflow_p)
9826 fold_overflow_warning (("assuming signed overflow does not "
9827 "occur when simplifying "
9829 WARN_STRICT_OVERFLOW_MISC);
9830 return fold_convert_loc (loc, type, tem);
9833 /* Optimize z * conj(z) for integer complex numbers. */
9834 if (TREE_CODE (arg0) == CONJ_EXPR
9835 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9836 return fold_mult_zconjz (loc, type, arg1);
9837 if (TREE_CODE (arg1) == CONJ_EXPR
9838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9839 return fold_mult_zconjz (loc, type, arg0);
9843 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
9844 the result for floating point types due to rounding so it is applied
9845 only if -fassociative-math was specify. */
9846 if (flag_associative_math
9847 && TREE_CODE (arg0) == RDIV_EXPR
9848 && TREE_CODE (arg1) == REAL_CST
9849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9851 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9854 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
9855 TREE_OPERAND (arg0, 1));
9858 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9859 if (operand_equal_p (arg0, arg1, 0))
9861 tree tem = fold_strip_sign_ops (arg0);
9862 if (tem != NULL_TREE)
9864 tem = fold_convert_loc (loc, type, tem);
9865 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
9869 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9870 This is not the same for NaNs or if signed zeros are
9872 if (!HONOR_NANS (arg0)
9873 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9874 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9875 && TREE_CODE (arg1) == COMPLEX_CST
9876 && real_zerop (TREE_REALPART (arg1)))
9878 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9879 if (real_onep (TREE_IMAGPART (arg1)))
9881 fold_build2_loc (loc, COMPLEX_EXPR, type,
9882 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9884 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9885 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9887 fold_build2_loc (loc, COMPLEX_EXPR, type,
9888 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9889 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9893 /* Optimize z * conj(z) for floating point complex numbers.
9894 Guarded by flag_unsafe_math_optimizations as non-finite
9895 imaginary components don't produce scalar results. */
9896 if (flag_unsafe_math_optimizations
9897 && TREE_CODE (arg0) == CONJ_EXPR
9898 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9899 return fold_mult_zconjz (loc, type, arg1);
9900 if (flag_unsafe_math_optimizations
9901 && TREE_CODE (arg1) == CONJ_EXPR
9902 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9903 return fold_mult_zconjz (loc, type, arg0);
9905 if (flag_unsafe_math_optimizations)
9908 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9911 && operand_equal_p (arg0, arg1, 0))
9913 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9917 tree arg = build_real (type, dconst2);
9918 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9926 /* Canonicalize (X & C1) | C2. */
9927 if (TREE_CODE (arg0) == BIT_AND_EXPR
9928 && TREE_CODE (arg1) == INTEGER_CST
9929 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9931 int width = TYPE_PRECISION (type), w;
9932 wide_int c1 = TREE_OPERAND (arg0, 1);
9935 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9936 if ((c1 & c2) == c1)
9937 return omit_one_operand_loc (loc, type, arg1,
9938 TREE_OPERAND (arg0, 0));
9940 wide_int msk = wi::mask (width, false,
9941 TYPE_PRECISION (TREE_TYPE (arg1)));
9943 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9944 if (msk.and_not (c1 | c2) == 0)
9945 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9946 TREE_OPERAND (arg0, 0), arg1);
9948 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9949 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9950 mode which allows further optimizations. */
9953 wide_int c3 = c1.and_not (c2);
9954 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9956 wide_int mask = wi::mask (w, false,
9957 TYPE_PRECISION (type));
9958 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9966 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9967 fold_build2_loc (loc, BIT_AND_EXPR, type,
9968 TREE_OPERAND (arg0, 0),
9969 wide_int_to_tree (type,
9974 /* (X & ~Y) | (~X & Y) is X ^ Y */
9975 if (TREE_CODE (arg0) == BIT_AND_EXPR
9976 && TREE_CODE (arg1) == BIT_AND_EXPR)
9978 tree a0, a1, l0, l1, n0, n1;
9980 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9981 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9983 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9984 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9986 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
9987 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
9989 if ((operand_equal_p (n0, a0, 0)
9990 && operand_equal_p (n1, a1, 0))
9991 || (operand_equal_p (n0, a1, 0)
9992 && operand_equal_p (n1, a0, 0)))
9993 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
9996 /* See if this can be simplified into a rotate first. If that
9997 is unsuccessful continue in the association code. */
10001 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10002 if (TREE_CODE (arg0) == BIT_AND_EXPR
10003 && INTEGRAL_TYPE_P (type)
10004 && integer_onep (TREE_OPERAND (arg0, 1))
10005 && integer_onep (arg1))
10006 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10007 build_zero_cst (TREE_TYPE (arg0)));
10009 /* See if this can be simplified into a rotate first. If that
10010 is unsuccessful continue in the association code. */
10014 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10015 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10016 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10017 || (TREE_CODE (arg0) == EQ_EXPR
10018 && integer_zerop (TREE_OPERAND (arg0, 1))))
10019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10020 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10022 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10023 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10024 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10025 || (TREE_CODE (arg1) == EQ_EXPR
10026 && integer_zerop (TREE_OPERAND (arg1, 1))))
10027 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10028 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10030 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10031 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10032 && INTEGRAL_TYPE_P (type)
10033 && integer_onep (TREE_OPERAND (arg0, 1))
10034 && integer_onep (arg1))
10037 tem = TREE_OPERAND (arg0, 0);
10038 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10039 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10041 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10042 build_zero_cst (TREE_TYPE (tem)));
10044 /* Fold ~X & 1 as (X & 1) == 0. */
10045 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10046 && INTEGRAL_TYPE_P (type)
10047 && integer_onep (arg1))
10050 tem = TREE_OPERAND (arg0, 0);
10051 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10052 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10054 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10055 build_zero_cst (TREE_TYPE (tem)));
10057 /* Fold !X & 1 as X == 0. */
10058 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10059 && integer_onep (arg1))
10061 tem = TREE_OPERAND (arg0, 0);
10062 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10063 build_zero_cst (TREE_TYPE (tem)));
10066 /* Fold (X ^ Y) & Y as ~X & Y. */
10067 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10068 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10070 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10071 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10072 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10073 fold_convert_loc (loc, type, arg1));
10075 /* Fold (X ^ Y) & X as ~Y & X. */
10076 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10077 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10078 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10080 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10081 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10082 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10083 fold_convert_loc (loc, type, arg1));
10085 /* Fold X & (X ^ Y) as X & ~Y. */
10086 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10087 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10089 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10090 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10091 fold_convert_loc (loc, type, arg0),
10092 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10094 /* Fold X & (Y ^ X) as ~Y & X. */
10095 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10097 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10099 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10100 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10101 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10102 fold_convert_loc (loc, type, arg0));
10105 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10106 multiple of 1 << CST. */
10107 if (TREE_CODE (arg1) == INTEGER_CST)
10109 wide_int cst1 = arg1;
10110 wide_int ncst1 = -cst1;
10111 if ((cst1 & ncst1) == ncst1
10112 && multiple_of_p (type, arg0,
10113 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10114 return fold_convert_loc (loc, type, arg0);
10117 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10119 if (TREE_CODE (arg1) == INTEGER_CST
10120 && TREE_CODE (arg0) == MULT_EXPR
10121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10123 wide_int warg1 = arg1;
10124 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10127 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10129 else if (masked != warg1)
10131 /* Avoid the transform if arg1 is a mask of some
10132 mode which allows further optimizations. */
10133 int pop = wi::popcount (warg1);
10134 if (!(pop >= BITS_PER_UNIT
10135 && exact_log2 (pop) != -1
10136 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10137 return fold_build2_loc (loc, code, type, op0,
10138 wide_int_to_tree (type, masked));
10142 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10143 ((A & N) + B) & M -> (A + B) & M
10144 Similarly if (N & M) == 0,
10145 ((A | N) + B) & M -> (A + B) & M
10146 and for - instead of + (or unary - instead of +)
10147 and/or ^ instead of |.
10148 If B is constant and (B & M) == 0, fold into A & M. */
10149 if (TREE_CODE (arg1) == INTEGER_CST)
10151 wide_int cst1 = arg1;
10152 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10153 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10154 && (TREE_CODE (arg0) == PLUS_EXPR
10155 || TREE_CODE (arg0) == MINUS_EXPR
10156 || TREE_CODE (arg0) == NEGATE_EXPR)
10157 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10158 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10164 /* Now we know that arg0 is (C + D) or (C - D) or
10165 -C and arg1 (M) is == (1LL << cst) - 1.
10166 Store C into PMOP[0] and D into PMOP[1]. */
10167 pmop[0] = TREE_OPERAND (arg0, 0);
10169 if (TREE_CODE (arg0) != NEGATE_EXPR)
10171 pmop[1] = TREE_OPERAND (arg0, 1);
10175 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10178 for (; which >= 0; which--)
10179 switch (TREE_CODE (pmop[which]))
10184 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10187 cst0 = TREE_OPERAND (pmop[which], 1);
10189 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10194 else if (cst0 != 0)
10196 /* If C or D is of the form (A & N) where
10197 (N & M) == M, or of the form (A | N) or
10198 (A ^ N) where (N & M) == 0, replace it with A. */
10199 pmop[which] = TREE_OPERAND (pmop[which], 0);
10202 /* If C or D is a N where (N & M) == 0, it can be
10203 omitted (assumed 0). */
10204 if ((TREE_CODE (arg0) == PLUS_EXPR
10205 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10206 && (cst1 & pmop[which]) == 0)
10207 pmop[which] = NULL;
10213 /* Only build anything new if we optimized one or both arguments
10215 if (pmop[0] != TREE_OPERAND (arg0, 0)
10216 || (TREE_CODE (arg0) != NEGATE_EXPR
10217 && pmop[1] != TREE_OPERAND (arg0, 1)))
10219 tree utype = TREE_TYPE (arg0);
10220 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10222 /* Perform the operations in a type that has defined
10223 overflow behavior. */
10224 utype = unsigned_type_for (TREE_TYPE (arg0));
10225 if (pmop[0] != NULL)
10226 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10227 if (pmop[1] != NULL)
10228 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10231 if (TREE_CODE (arg0) == NEGATE_EXPR)
10232 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10233 else if (TREE_CODE (arg0) == PLUS_EXPR)
10235 if (pmop[0] != NULL && pmop[1] != NULL)
10236 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10238 else if (pmop[0] != NULL)
10240 else if (pmop[1] != NULL)
10243 return build_int_cst (type, 0);
10245 else if (pmop[0] == NULL)
10246 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10248 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10250 /* TEM is now the new binary +, - or unary - replacement. */
10251 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10252 fold_convert_loc (loc, utype, arg1));
10253 return fold_convert_loc (loc, type, tem);
10258 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10259 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10260 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10262 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10264 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10267 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10273 /* Don't touch a floating-point divide by zero unless the mode
10274 of the constant can represent infinity. */
10275 if (TREE_CODE (arg1) == REAL_CST
10276 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10277 && real_zerop (arg1))
10280 /* (-A) / (-B) -> A / B */
10281 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10282 return fold_build2_loc (loc, RDIV_EXPR, type,
10283 TREE_OPERAND (arg0, 0),
10284 negate_expr (arg1));
10285 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10286 return fold_build2_loc (loc, RDIV_EXPR, type,
10287 negate_expr (arg0),
10288 TREE_OPERAND (arg1, 0));
10290 /* Convert A/B/C to A/(B*C). */
10291 if (flag_reciprocal_math
10292 && TREE_CODE (arg0) == RDIV_EXPR)
10293 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10294 fold_build2_loc (loc, MULT_EXPR, type,
10295 TREE_OPERAND (arg0, 1), arg1));
10297 /* Convert A/(B/C) to (A/B)*C. */
10298 if (flag_reciprocal_math
10299 && TREE_CODE (arg1) == RDIV_EXPR)
10300 return fold_build2_loc (loc, MULT_EXPR, type,
10301 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10302 TREE_OPERAND (arg1, 0)),
10303 TREE_OPERAND (arg1, 1));
10305 /* Convert C1/(X*C2) into (C1/C2)/X. */
10306 if (flag_reciprocal_math
10307 && TREE_CODE (arg1) == MULT_EXPR
10308 && TREE_CODE (arg0) == REAL_CST
10309 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10311 tree tem = const_binop (RDIV_EXPR, arg0,
10312 TREE_OPERAND (arg1, 1));
10314 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10315 TREE_OPERAND (arg1, 0));
10320 case TRUNC_DIV_EXPR:
10321 /* Optimize (X & (-A)) / A where A is a power of 2,
10323 if (TREE_CODE (arg0) == BIT_AND_EXPR
10324 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10325 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10327 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10328 arg1, TREE_OPERAND (arg0, 1));
10329 if (sum && integer_zerop (sum)) {
10330 tree pow2 = build_int_cst (integer_type_node,
10331 wi::exact_log2 (arg1));
10332 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10333 TREE_OPERAND (arg0, 0), pow2);
10339 case FLOOR_DIV_EXPR:
10340 /* Simplify A / (B << N) where A and B are positive and B is
10341 a power of 2, to A >> (N + log2(B)). */
10342 strict_overflow_p = false;
10343 if (TREE_CODE (arg1) == LSHIFT_EXPR
10344 && (TYPE_UNSIGNED (type)
10345 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10347 tree sval = TREE_OPERAND (arg1, 0);
10348 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10350 tree sh_cnt = TREE_OPERAND (arg1, 1);
10351 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10352 wi::exact_log2 (sval));
10354 if (strict_overflow_p)
10355 fold_overflow_warning (("assuming signed overflow does not "
10356 "occur when simplifying A / (B << N)"),
10357 WARN_STRICT_OVERFLOW_MISC);
10359 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10361 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10362 fold_convert_loc (loc, type, arg0), sh_cnt);
10368 case ROUND_DIV_EXPR:
10369 case CEIL_DIV_EXPR:
10370 case EXACT_DIV_EXPR:
10371 if (integer_zerop (arg1))
10374 /* Convert -A / -B to A / B when the type is signed and overflow is
10376 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10377 && TREE_CODE (arg0) == NEGATE_EXPR
10378 && negate_expr_p (arg1))
10380 if (INTEGRAL_TYPE_P (type))
10381 fold_overflow_warning (("assuming signed overflow does not occur "
10382 "when distributing negation across "
10384 WARN_STRICT_OVERFLOW_MISC);
10385 return fold_build2_loc (loc, code, type,
10386 fold_convert_loc (loc, type,
10387 TREE_OPERAND (arg0, 0)),
10388 fold_convert_loc (loc, type,
10389 negate_expr (arg1)));
10391 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10392 && TREE_CODE (arg1) == NEGATE_EXPR
10393 && negate_expr_p (arg0))
10395 if (INTEGRAL_TYPE_P (type))
10396 fold_overflow_warning (("assuming signed overflow does not occur "
10397 "when distributing negation across "
10399 WARN_STRICT_OVERFLOW_MISC);
10400 return fold_build2_loc (loc, code, type,
10401 fold_convert_loc (loc, type,
10402 negate_expr (arg0)),
10403 fold_convert_loc (loc, type,
10404 TREE_OPERAND (arg1, 0)));
10407 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10408 operation, EXACT_DIV_EXPR.
10410 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10411 At one time others generated faster code, it's not clear if they do
10412 after the last round to changes to the DIV code in expmed.c. */
10413 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10414 && multiple_of_p (type, arg0, arg1))
10415 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10416 fold_convert (type, arg0),
10417 fold_convert (type, arg1));
10419 strict_overflow_p = false;
10420 if (TREE_CODE (arg1) == INTEGER_CST
10421 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10422 &strict_overflow_p)))
10424 if (strict_overflow_p)
10425 fold_overflow_warning (("assuming signed overflow does not occur "
10426 "when simplifying division"),
10427 WARN_STRICT_OVERFLOW_MISC);
10428 return fold_convert_loc (loc, type, tem);
10433 case CEIL_MOD_EXPR:
10434 case FLOOR_MOD_EXPR:
10435 case ROUND_MOD_EXPR:
10436 case TRUNC_MOD_EXPR:
10437 strict_overflow_p = false;
10438 if (TREE_CODE (arg1) == INTEGER_CST
10439 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10440 &strict_overflow_p)))
10442 if (strict_overflow_p)
10443 fold_overflow_warning (("assuming signed overflow does not occur "
10444 "when simplifying modulus"),
10445 WARN_STRICT_OVERFLOW_MISC);
10446 return fold_convert_loc (loc, type, tem);
10455 /* Since negative shift count is not well-defined,
10456 don't try to compute it in the compiler. */
10457 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10460 prec = element_precision (type);
10462 /* If we have a rotate of a bit operation with the rotate count and
10463 the second operand of the bit operation both constant,
10464 permute the two operations. */
10465 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10466 && (TREE_CODE (arg0) == BIT_AND_EXPR
10467 || TREE_CODE (arg0) == BIT_IOR_EXPR
10468 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10469 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10470 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10471 fold_build2_loc (loc, code, type,
10472 TREE_OPERAND (arg0, 0), arg1),
10473 fold_build2_loc (loc, code, type,
10474 TREE_OPERAND (arg0, 1), arg1));
10476 /* Two consecutive rotates adding up to the some integer
10477 multiple of the precision of the type can be ignored. */
10478 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10479 && TREE_CODE (arg0) == RROTATE_EXPR
10480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10481 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10483 return TREE_OPERAND (arg0, 0);
10488 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10494 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10499 case TRUTH_ANDIF_EXPR:
10500 /* Note that the operands of this must be ints
10501 and their values must be 0 or 1.
10502 ("true" is a fixed value perhaps depending on the language.) */
10503 /* If first arg is constant zero, return it. */
10504 if (integer_zerop (arg0))
10505 return fold_convert_loc (loc, type, arg0);
10506 case TRUTH_AND_EXPR:
10507 /* If either arg is constant true, drop it. */
10508 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10509 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10510 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10511 /* Preserve sequence points. */
10512 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10513 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10514 /* If second arg is constant zero, result is zero, but first arg
10515 must be evaluated. */
10516 if (integer_zerop (arg1))
10517 return omit_one_operand_loc (loc, type, arg1, arg0);
10518 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10519 case will be handled here. */
10520 if (integer_zerop (arg0))
10521 return omit_one_operand_loc (loc, type, arg0, arg1);
10523 /* !X && X is always false. */
10524 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10525 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10526 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10527 /* X && !X is always false. */
10528 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10529 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10530 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10532 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10533 means A >= Y && A != MAX, but in this case we know that
10536 if (!TREE_SIDE_EFFECTS (arg0)
10537 && !TREE_SIDE_EFFECTS (arg1))
10539 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10540 if (tem && !operand_equal_p (tem, arg0, 0))
10541 return fold_build2_loc (loc, code, type, tem, arg1);
10543 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10544 if (tem && !operand_equal_p (tem, arg1, 0))
10545 return fold_build2_loc (loc, code, type, arg0, tem);
10548 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10554 case TRUTH_ORIF_EXPR:
10555 /* Note that the operands of this must be ints
10556 and their values must be 0 or true.
10557 ("true" is a fixed value perhaps depending on the language.) */
10558 /* If first arg is constant true, return it. */
10559 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10560 return fold_convert_loc (loc, type, arg0);
10561 case TRUTH_OR_EXPR:
10562 /* If either arg is constant zero, drop it. */
10563 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10564 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10565 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10566 /* Preserve sequence points. */
10567 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10568 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10569 /* If second arg is constant true, result is true, but we must
10570 evaluate first arg. */
10571 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10572 return omit_one_operand_loc (loc, type, arg1, arg0);
10573 /* Likewise for first arg, but note this only occurs here for
10575 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10576 return omit_one_operand_loc (loc, type, arg0, arg1);
10578 /* !X || X is always true. */
10579 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10581 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10582 /* X || !X is always true. */
10583 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10585 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10587 /* (X && !Y) || (!X && Y) is X ^ Y */
10588 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10589 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10591 tree a0, a1, l0, l1, n0, n1;
10593 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10594 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10596 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10597 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10599 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10600 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10602 if ((operand_equal_p (n0, a0, 0)
10603 && operand_equal_p (n1, a1, 0))
10604 || (operand_equal_p (n0, a1, 0)
10605 && operand_equal_p (n1, a0, 0)))
10606 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10609 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10615 case TRUTH_XOR_EXPR:
10616 /* If the second arg is constant zero, drop it. */
10617 if (integer_zerop (arg1))
10618 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10619 /* If the second arg is constant true, this is a logical inversion. */
10620 if (integer_onep (arg1))
10622 tem = invert_truthvalue_loc (loc, arg0);
10623 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10625 /* Identical arguments cancel to zero. */
10626 if (operand_equal_p (arg0, arg1, 0))
10627 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10629 /* !X ^ X is always true. */
10630 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10632 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10634 /* X ^ !X is always true. */
10635 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10636 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10637 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10646 tem = fold_comparison (loc, code, type, op0, op1);
10647 if (tem != NULL_TREE)
10650 /* bool_var != 1 becomes !bool_var. */
10651 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10652 && code == NE_EXPR)
10653 return fold_convert_loc (loc, type,
10654 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10655 TREE_TYPE (arg0), arg0));
10657 /* bool_var == 0 becomes !bool_var. */
10658 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10659 && code == EQ_EXPR)
10660 return fold_convert_loc (loc, type,
10661 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10662 TREE_TYPE (arg0), arg0));
10664 /* !exp != 0 becomes !exp */
10665 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10666 && code == NE_EXPR)
10667 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10669 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10670 if ((TREE_CODE (arg0) == PLUS_EXPR
10671 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10672 || TREE_CODE (arg0) == MINUS_EXPR)
10673 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10676 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10677 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10679 tree val = TREE_OPERAND (arg0, 1);
10680 return omit_two_operands_loc (loc, type,
10681 fold_build2_loc (loc, code, type,
10683 build_int_cst (TREE_TYPE (val),
10685 TREE_OPERAND (arg0, 0), arg1);
10688 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10689 if (TREE_CODE (arg0) == MINUS_EXPR
10690 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10691 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10694 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10696 return omit_two_operands_loc (loc, type,
10698 ? boolean_true_node : boolean_false_node,
10699 TREE_OPERAND (arg0, 1), arg1);
10702 /* If this is an EQ or NE comparison with zero and ARG0 is
10703 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10704 two operations, but the latter can be done in one less insn
10705 on machines that have only two-operand insns or on which a
10706 constant cannot be the first operand. */
10707 if (TREE_CODE (arg0) == BIT_AND_EXPR
10708 && integer_zerop (arg1))
10710 tree arg00 = TREE_OPERAND (arg0, 0);
10711 tree arg01 = TREE_OPERAND (arg0, 1);
10712 if (TREE_CODE (arg00) == LSHIFT_EXPR
10713 && integer_onep (TREE_OPERAND (arg00, 0)))
10715 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10716 arg01, TREE_OPERAND (arg00, 1));
10717 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10718 build_int_cst (TREE_TYPE (arg0), 1));
10719 return fold_build2_loc (loc, code, type,
10720 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10723 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10724 && integer_onep (TREE_OPERAND (arg01, 0)))
10726 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10727 arg00, TREE_OPERAND (arg01, 1));
10728 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10729 build_int_cst (TREE_TYPE (arg0), 1));
10730 return fold_build2_loc (loc, code, type,
10731 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10736 /* If this is an NE or EQ comparison of zero against the result of a
10737 signed MOD operation whose second operand is a power of 2, make
10738 the MOD operation unsigned since it is simpler and equivalent. */
10739 if (integer_zerop (arg1)
10740 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10741 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10742 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10743 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10744 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10745 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10747 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10748 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10749 fold_convert_loc (loc, newtype,
10750 TREE_OPERAND (arg0, 0)),
10751 fold_convert_loc (loc, newtype,
10752 TREE_OPERAND (arg0, 1)));
10754 return fold_build2_loc (loc, code, type, newmod,
10755 fold_convert_loc (loc, newtype, arg1));
10758 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10759 C1 is a valid shift constant, and C2 is a power of two, i.e.
10761 if (TREE_CODE (arg0) == BIT_AND_EXPR
10762 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10763 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10765 && integer_pow2p (TREE_OPERAND (arg0, 1))
10766 && integer_zerop (arg1))
10768 tree itype = TREE_TYPE (arg0);
10769 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10770 prec = TYPE_PRECISION (itype);
10772 /* Check for a valid shift count. */
10773 if (wi::ltu_p (arg001, prec))
10775 tree arg01 = TREE_OPERAND (arg0, 1);
10776 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10777 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10778 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10779 can be rewritten as (X & (C2 << C1)) != 0. */
10780 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10782 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10783 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10784 return fold_build2_loc (loc, code, type, tem,
10785 fold_convert_loc (loc, itype, arg1));
10787 /* Otherwise, for signed (arithmetic) shifts,
10788 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10789 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10790 else if (!TYPE_UNSIGNED (itype))
10791 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10792 arg000, build_int_cst (itype, 0));
10793 /* Otherwise, of unsigned (logical) shifts,
10794 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10795 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10797 return omit_one_operand_loc (loc, type,
10798 code == EQ_EXPR ? integer_one_node
10799 : integer_zero_node,
10804 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10805 Similarly for NE_EXPR. */
10806 if (TREE_CODE (arg0) == BIT_AND_EXPR
10807 && TREE_CODE (arg1) == INTEGER_CST
10808 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10810 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10811 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10812 TREE_OPERAND (arg0, 1));
10814 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10815 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10817 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10818 if (integer_nonzerop (dandnotc))
10819 return omit_one_operand_loc (loc, type, rslt, arg0);
10822 /* If this is a comparison of a field, we may be able to simplify it. */
10823 if ((TREE_CODE (arg0) == COMPONENT_REF
10824 || TREE_CODE (arg0) == BIT_FIELD_REF)
10825 /* Handle the constant case even without -O
10826 to make sure the warnings are given. */
10827 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10829 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10834 /* Optimize comparisons of strlen vs zero to a compare of the
10835 first character of the string vs zero. To wit,
10836 strlen(ptr) == 0 => *ptr == 0
10837 strlen(ptr) != 0 => *ptr != 0
10838 Other cases should reduce to one of these two (or a constant)
10839 due to the return value of strlen being unsigned. */
10840 if (TREE_CODE (arg0) == CALL_EXPR
10841 && integer_zerop (arg1))
10843 tree fndecl = get_callee_fndecl (arg0);
10846 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10847 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10848 && call_expr_nargs (arg0) == 1
10849 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10851 tree iref = build_fold_indirect_ref_loc (loc,
10852 CALL_EXPR_ARG (arg0, 0));
10853 return fold_build2_loc (loc, code, type, iref,
10854 build_int_cst (TREE_TYPE (iref), 0));
10858 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10859 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10860 if (TREE_CODE (arg0) == RSHIFT_EXPR
10861 && integer_zerop (arg1)
10862 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10864 tree arg00 = TREE_OPERAND (arg0, 0);
10865 tree arg01 = TREE_OPERAND (arg0, 1);
10866 tree itype = TREE_TYPE (arg00);
10867 if (wi::eq_p (arg01, element_precision (itype) - 1))
10869 if (TYPE_UNSIGNED (itype))
10871 itype = signed_type_for (itype);
10872 arg00 = fold_convert_loc (loc, itype, arg00);
10874 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10875 type, arg00, build_zero_cst (itype));
10879 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10880 (X & C) == 0 when C is a single bit. */
10881 if (TREE_CODE (arg0) == BIT_AND_EXPR
10882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10883 && integer_zerop (arg1)
10884 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10886 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10887 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10888 TREE_OPERAND (arg0, 1));
10889 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10891 fold_convert_loc (loc, TREE_TYPE (arg0),
10895 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10896 constant C is a power of two, i.e. a single bit. */
10897 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10898 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10899 && integer_zerop (arg1)
10900 && integer_pow2p (TREE_OPERAND (arg0, 1))
10901 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10902 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10904 tree arg00 = TREE_OPERAND (arg0, 0);
10905 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10906 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10909 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10910 when is C is a power of two, i.e. a single bit. */
10911 if (TREE_CODE (arg0) == BIT_AND_EXPR
10912 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10913 && integer_zerop (arg1)
10914 && integer_pow2p (TREE_OPERAND (arg0, 1))
10915 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10916 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10918 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10919 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10920 arg000, TREE_OPERAND (arg0, 1));
10921 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10922 tem, build_int_cst (TREE_TYPE (tem), 0));
10925 if (integer_zerop (arg1)
10926 && tree_expr_nonzero_p (arg0))
10928 tree res = constant_boolean_node (code==NE_EXPR, type);
10929 return omit_one_operand_loc (loc, type, res, arg0);
10932 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10933 if (TREE_CODE (arg0) == BIT_AND_EXPR
10934 && TREE_CODE (arg1) == BIT_AND_EXPR)
10936 tree arg00 = TREE_OPERAND (arg0, 0);
10937 tree arg01 = TREE_OPERAND (arg0, 1);
10938 tree arg10 = TREE_OPERAND (arg1, 0);
10939 tree arg11 = TREE_OPERAND (arg1, 1);
10940 tree itype = TREE_TYPE (arg0);
10942 if (operand_equal_p (arg01, arg11, 0))
10943 return fold_build2_loc (loc, code, type,
10944 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10945 fold_build2_loc (loc,
10946 BIT_XOR_EXPR, itype,
10949 build_zero_cst (itype));
10951 if (operand_equal_p (arg01, arg10, 0))
10952 return fold_build2_loc (loc, code, type,
10953 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10954 fold_build2_loc (loc,
10955 BIT_XOR_EXPR, itype,
10958 build_zero_cst (itype));
10960 if (operand_equal_p (arg00, arg11, 0))
10961 return fold_build2_loc (loc, code, type,
10962 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10963 fold_build2_loc (loc,
10964 BIT_XOR_EXPR, itype,
10967 build_zero_cst (itype));
10969 if (operand_equal_p (arg00, arg10, 0))
10970 return fold_build2_loc (loc, code, type,
10971 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10972 fold_build2_loc (loc,
10973 BIT_XOR_EXPR, itype,
10976 build_zero_cst (itype));
10979 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10980 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10982 tree arg00 = TREE_OPERAND (arg0, 0);
10983 tree arg01 = TREE_OPERAND (arg0, 1);
10984 tree arg10 = TREE_OPERAND (arg1, 0);
10985 tree arg11 = TREE_OPERAND (arg1, 1);
10986 tree itype = TREE_TYPE (arg0);
10988 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10989 operand_equal_p guarantees no side-effects so we don't need
10990 to use omit_one_operand on Z. */
10991 if (operand_equal_p (arg01, arg11, 0))
10992 return fold_build2_loc (loc, code, type, arg00,
10993 fold_convert_loc (loc, TREE_TYPE (arg00),
10995 if (operand_equal_p (arg01, arg10, 0))
10996 return fold_build2_loc (loc, code, type, arg00,
10997 fold_convert_loc (loc, TREE_TYPE (arg00),
10999 if (operand_equal_p (arg00, arg11, 0))
11000 return fold_build2_loc (loc, code, type, arg01,
11001 fold_convert_loc (loc, TREE_TYPE (arg01),
11003 if (operand_equal_p (arg00, arg10, 0))
11004 return fold_build2_loc (loc, code, type, arg01,
11005 fold_convert_loc (loc, TREE_TYPE (arg01),
11008 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11009 if (TREE_CODE (arg01) == INTEGER_CST
11010 && TREE_CODE (arg11) == INTEGER_CST)
11012 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11013 fold_convert_loc (loc, itype, arg11));
11014 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11015 return fold_build2_loc (loc, code, type, tem,
11016 fold_convert_loc (loc, itype, arg10));
11020 /* Attempt to simplify equality/inequality comparisons of complex
11021 values. Only lower the comparison if the result is known or
11022 can be simplified to a single scalar comparison. */
11023 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11024 || TREE_CODE (arg0) == COMPLEX_CST)
11025 && (TREE_CODE (arg1) == COMPLEX_EXPR
11026 || TREE_CODE (arg1) == COMPLEX_CST))
11028 tree real0, imag0, real1, imag1;
11031 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11033 real0 = TREE_OPERAND (arg0, 0);
11034 imag0 = TREE_OPERAND (arg0, 1);
11038 real0 = TREE_REALPART (arg0);
11039 imag0 = TREE_IMAGPART (arg0);
11042 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11044 real1 = TREE_OPERAND (arg1, 0);
11045 imag1 = TREE_OPERAND (arg1, 1);
11049 real1 = TREE_REALPART (arg1);
11050 imag1 = TREE_IMAGPART (arg1);
11053 rcond = fold_binary_loc (loc, code, type, real0, real1);
11054 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11056 if (integer_zerop (rcond))
11058 if (code == EQ_EXPR)
11059 return omit_two_operands_loc (loc, type, boolean_false_node,
11061 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11065 if (code == NE_EXPR)
11066 return omit_two_operands_loc (loc, type, boolean_true_node,
11068 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11072 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11073 if (icond && TREE_CODE (icond) == INTEGER_CST)
11075 if (integer_zerop (icond))
11077 if (code == EQ_EXPR)
11078 return omit_two_operands_loc (loc, type, boolean_false_node,
11080 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11084 if (code == NE_EXPR)
11085 return omit_two_operands_loc (loc, type, boolean_true_node,
11087 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11098 tem = fold_comparison (loc, code, type, op0, op1);
11099 if (tem != NULL_TREE)
11102 /* Transform comparisons of the form X +- C CMP X. */
11103 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11105 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11106 && !HONOR_SNANS (arg0))
11107 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11110 tree arg01 = TREE_OPERAND (arg0, 1);
11111 enum tree_code code0 = TREE_CODE (arg0);
11114 if (TREE_CODE (arg01) == REAL_CST)
11115 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11117 is_positive = tree_int_cst_sgn (arg01);
11119 /* (X - c) > X becomes false. */
11120 if (code == GT_EXPR
11121 && ((code0 == MINUS_EXPR && is_positive >= 0)
11122 || (code0 == PLUS_EXPR && is_positive <= 0)))
11124 if (TREE_CODE (arg01) == INTEGER_CST
11125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11126 fold_overflow_warning (("assuming signed overflow does not "
11127 "occur when assuming that (X - c) > X "
11128 "is always false"),
11129 WARN_STRICT_OVERFLOW_ALL);
11130 return constant_boolean_node (0, type);
11133 /* Likewise (X + c) < X becomes false. */
11134 if (code == LT_EXPR
11135 && ((code0 == PLUS_EXPR && is_positive >= 0)
11136 || (code0 == MINUS_EXPR && is_positive <= 0)))
11138 if (TREE_CODE (arg01) == INTEGER_CST
11139 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11140 fold_overflow_warning (("assuming signed overflow does not "
11141 "occur when assuming that "
11142 "(X + c) < X is always false"),
11143 WARN_STRICT_OVERFLOW_ALL);
11144 return constant_boolean_node (0, type);
11147 /* Convert (X - c) <= X to true. */
11148 if (!HONOR_NANS (arg1)
11150 && ((code0 == MINUS_EXPR && is_positive >= 0)
11151 || (code0 == PLUS_EXPR && is_positive <= 0)))
11153 if (TREE_CODE (arg01) == INTEGER_CST
11154 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11155 fold_overflow_warning (("assuming signed overflow does not "
11156 "occur when assuming that "
11157 "(X - c) <= X is always true"),
11158 WARN_STRICT_OVERFLOW_ALL);
11159 return constant_boolean_node (1, type);
11162 /* Convert (X + c) >= X to true. */
11163 if (!HONOR_NANS (arg1)
11165 && ((code0 == PLUS_EXPR && is_positive >= 0)
11166 || (code0 == MINUS_EXPR && is_positive <= 0)))
11168 if (TREE_CODE (arg01) == INTEGER_CST
11169 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11170 fold_overflow_warning (("assuming signed overflow does not "
11171 "occur when assuming that "
11172 "(X + c) >= X is always true"),
11173 WARN_STRICT_OVERFLOW_ALL);
11174 return constant_boolean_node (1, type);
11177 if (TREE_CODE (arg01) == INTEGER_CST)
11179 /* Convert X + c > X and X - c < X to true for integers. */
11180 if (code == GT_EXPR
11181 && ((code0 == PLUS_EXPR && is_positive > 0)
11182 || (code0 == MINUS_EXPR && is_positive < 0)))
11184 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11185 fold_overflow_warning (("assuming signed overflow does "
11186 "not occur when assuming that "
11187 "(X + c) > X is always true"),
11188 WARN_STRICT_OVERFLOW_ALL);
11189 return constant_boolean_node (1, type);
11192 if (code == LT_EXPR
11193 && ((code0 == MINUS_EXPR && is_positive > 0)
11194 || (code0 == PLUS_EXPR && is_positive < 0)))
11196 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11197 fold_overflow_warning (("assuming signed overflow does "
11198 "not occur when assuming that "
11199 "(X - c) < X is always true"),
11200 WARN_STRICT_OVERFLOW_ALL);
11201 return constant_boolean_node (1, type);
11204 /* Convert X + c <= X and X - c >= X to false for integers. */
11205 if (code == LE_EXPR
11206 && ((code0 == PLUS_EXPR && is_positive > 0)
11207 || (code0 == MINUS_EXPR && is_positive < 0)))
11209 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11210 fold_overflow_warning (("assuming signed overflow does "
11211 "not occur when assuming that "
11212 "(X + c) <= X is always false"),
11213 WARN_STRICT_OVERFLOW_ALL);
11214 return constant_boolean_node (0, type);
11217 if (code == GE_EXPR
11218 && ((code0 == MINUS_EXPR && is_positive > 0)
11219 || (code0 == PLUS_EXPR && is_positive < 0)))
11221 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11222 fold_overflow_warning (("assuming signed overflow does "
11223 "not occur when assuming that "
11224 "(X - c) >= X is always false"),
11225 WARN_STRICT_OVERFLOW_ALL);
11226 return constant_boolean_node (0, type);
11231 /* If we are comparing an ABS_EXPR with a constant, we can
11232 convert all the cases into explicit comparisons, but they may
11233 well not be faster than doing the ABS and one comparison.
11234 But ABS (X) <= C is a range comparison, which becomes a subtraction
11235 and a comparison, and is probably faster. */
11236 if (code == LE_EXPR
11237 && TREE_CODE (arg1) == INTEGER_CST
11238 && TREE_CODE (arg0) == ABS_EXPR
11239 && ! TREE_SIDE_EFFECTS (arg0)
11240 && (0 != (tem = negate_expr (arg1)))
11241 && TREE_CODE (tem) == INTEGER_CST
11242 && !TREE_OVERFLOW (tem))
11243 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11244 build2 (GE_EXPR, type,
11245 TREE_OPERAND (arg0, 0), tem),
11246 build2 (LE_EXPR, type,
11247 TREE_OPERAND (arg0, 0), arg1));
11249 /* Convert ABS_EXPR<x> >= 0 to true. */
11250 strict_overflow_p = false;
11251 if (code == GE_EXPR
11252 && (integer_zerop (arg1)
11253 || (! HONOR_NANS (arg0)
11254 && real_zerop (arg1)))
11255 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11257 if (strict_overflow_p)
11258 fold_overflow_warning (("assuming signed overflow does not occur "
11259 "when simplifying comparison of "
11260 "absolute value and zero"),
11261 WARN_STRICT_OVERFLOW_CONDITIONAL);
11262 return omit_one_operand_loc (loc, type,
11263 constant_boolean_node (true, type),
11267 /* Convert ABS_EXPR<x> < 0 to false. */
11268 strict_overflow_p = false;
11269 if (code == LT_EXPR
11270 && (integer_zerop (arg1) || real_zerop (arg1))
11271 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11273 if (strict_overflow_p)
11274 fold_overflow_warning (("assuming signed overflow does not occur "
11275 "when simplifying comparison of "
11276 "absolute value and zero"),
11277 WARN_STRICT_OVERFLOW_CONDITIONAL);
11278 return omit_one_operand_loc (loc, type,
11279 constant_boolean_node (false, type),
11283 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11284 and similarly for >= into !=. */
11285 if ((code == LT_EXPR || code == GE_EXPR)
11286 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11287 && TREE_CODE (arg1) == LSHIFT_EXPR
11288 && integer_onep (TREE_OPERAND (arg1, 0)))
11289 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11290 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11291 TREE_OPERAND (arg1, 1)),
11292 build_zero_cst (TREE_TYPE (arg0)));
11294 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11295 otherwise Y might be >= # of bits in X's type and thus e.g.
11296 (unsigned char) (1 << Y) for Y 15 might be 0.
11297 If the cast is widening, then 1 << Y should have unsigned type,
11298 otherwise if Y is number of bits in the signed shift type minus 1,
11299 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11300 31 might be 0xffffffff80000000. */
11301 if ((code == LT_EXPR || code == GE_EXPR)
11302 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11303 && CONVERT_EXPR_P (arg1)
11304 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11305 && (element_precision (TREE_TYPE (arg1))
11306 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11307 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11308 || (element_precision (TREE_TYPE (arg1))
11309 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11310 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11312 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11313 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11314 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11315 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11316 build_zero_cst (TREE_TYPE (arg0)));
11321 case UNORDERED_EXPR:
11329 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11331 tree targ0 = strip_float_extensions (arg0);
11332 tree targ1 = strip_float_extensions (arg1);
11333 tree newtype = TREE_TYPE (targ0);
11335 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11336 newtype = TREE_TYPE (targ1);
11338 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11339 return fold_build2_loc (loc, code, type,
11340 fold_convert_loc (loc, newtype, targ0),
11341 fold_convert_loc (loc, newtype, targ1));
11346 case COMPOUND_EXPR:
11347 /* When pedantic, a compound expression can be neither an lvalue
11348 nor an integer constant expression. */
11349 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11351 /* Don't let (0, 0) be null pointer constant. */
11352 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11353 : fold_convert_loc (loc, type, arg1);
11354 return pedantic_non_lvalue_loc (loc, tem);
11357 /* An ASSERT_EXPR should never be passed to fold_binary. */
11358 gcc_unreachable ();
11362 } /* switch (code) */
11365 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11366 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11370 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11372 switch (TREE_CODE (*tp))
11378 *walk_subtrees = 0;
11380 /* ... fall through ... */
11387 /* Return whether the sub-tree ST contains a label which is accessible from
11388 outside the sub-tree. */
11391 contains_label_p (tree st)
11394 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11397 /* Fold a ternary expression of code CODE and type TYPE with operands
11398 OP0, OP1, and OP2. Return the folded expression if folding is
11399 successful. Otherwise, return NULL_TREE. */
11402 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11403 tree op0, tree op1, tree op2)
11406 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11407 enum tree_code_class kind = TREE_CODE_CLASS (code);
11409 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11410 && TREE_CODE_LENGTH (code) == 3);
11412 /* If this is a commutative operation, and OP0 is a constant, move it
11413 to OP1 to reduce the number of tests below. */
11414 if (commutative_ternary_tree_code (code)
11415 && tree_swap_operands_p (op0, op1, true))
11416 return fold_build3_loc (loc, code, type, op1, op0, op2);
11418 tem = generic_simplify (loc, code, type, op0, op1, op2);
11422 /* Strip any conversions that don't change the mode. This is safe
11423 for every expression, except for a comparison expression because
11424 its signedness is derived from its operands. So, in the latter
11425 case, only strip conversions that don't change the signedness.
11427 Note that this is done as an internal manipulation within the
11428 constant folder, in order to find the simplest representation of
11429 the arguments so that their form can be studied. In any cases,
11430 the appropriate type conversions should be put back in the tree
11431 that will get out of the constant folder. */
11452 case COMPONENT_REF:
11453 if (TREE_CODE (arg0) == CONSTRUCTOR
11454 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11456 unsigned HOST_WIDE_INT idx;
11458 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11465 case VEC_COND_EXPR:
11466 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11467 so all simple results must be passed through pedantic_non_lvalue. */
11468 if (TREE_CODE (arg0) == INTEGER_CST)
11470 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11471 tem = integer_zerop (arg0) ? op2 : op1;
11472 /* Only optimize constant conditions when the selected branch
11473 has the same type as the COND_EXPR. This avoids optimizing
11474 away "c ? x : throw", where the throw has a void type.
11475 Avoid throwing away that operand which contains label. */
11476 if ((!TREE_SIDE_EFFECTS (unused_op)
11477 || !contains_label_p (unused_op))
11478 && (! VOID_TYPE_P (TREE_TYPE (tem))
11479 || VOID_TYPE_P (type)))
11480 return pedantic_non_lvalue_loc (loc, tem);
11483 else if (TREE_CODE (arg0) == VECTOR_CST)
11485 if ((TREE_CODE (arg1) == VECTOR_CST
11486 || TREE_CODE (arg1) == CONSTRUCTOR)
11487 && (TREE_CODE (arg2) == VECTOR_CST
11488 || TREE_CODE (arg2) == CONSTRUCTOR))
11490 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11491 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11492 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11493 for (i = 0; i < nelts; i++)
11495 tree val = VECTOR_CST_ELT (arg0, i);
11496 if (integer_all_onesp (val))
11498 else if (integer_zerop (val))
11499 sel[i] = nelts + i;
11500 else /* Currently unreachable. */
11503 tree t = fold_vec_perm (type, arg1, arg2, sel);
11504 if (t != NULL_TREE)
11509 /* If we have A op B ? A : C, we may be able to convert this to a
11510 simpler expression, depending on the operation and the values
11511 of B and C. Signed zeros prevent all of these transformations,
11512 for reasons given above each one.
11514 Also try swapping the arguments and inverting the conditional. */
11515 if (COMPARISON_CLASS_P (arg0)
11516 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11517 arg1, TREE_OPERAND (arg0, 1))
11518 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11520 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11525 if (COMPARISON_CLASS_P (arg0)
11526 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11528 TREE_OPERAND (arg0, 1))
11529 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11531 location_t loc0 = expr_location_or (arg0, loc);
11532 tem = fold_invert_truthvalue (loc0, arg0);
11533 if (tem && COMPARISON_CLASS_P (tem))
11535 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11541 /* If the second operand is simpler than the third, swap them
11542 since that produces better jump optimization results. */
11543 if (truth_value_p (TREE_CODE (arg0))
11544 && tree_swap_operands_p (op1, op2, false))
11546 location_t loc0 = expr_location_or (arg0, loc);
11547 /* See if this can be inverted. If it can't, possibly because
11548 it was a floating-point inequality comparison, don't do
11550 tem = fold_invert_truthvalue (loc0, arg0);
11552 return fold_build3_loc (loc, code, type, tem, op2, op1);
11555 /* Convert A ? 1 : 0 to simply A. */
11556 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11557 : (integer_onep (op1)
11558 && !VECTOR_TYPE_P (type)))
11559 && integer_zerop (op2)
11560 /* If we try to convert OP0 to our type, the
11561 call to fold will try to move the conversion inside
11562 a COND, which will recurse. In that case, the COND_EXPR
11563 is probably the best choice, so leave it alone. */
11564 && type == TREE_TYPE (arg0))
11565 return pedantic_non_lvalue_loc (loc, arg0);
11567 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11568 over COND_EXPR in cases such as floating point comparisons. */
11569 if (integer_zerop (op1)
11570 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11571 : (integer_onep (op2)
11572 && !VECTOR_TYPE_P (type)))
11573 && truth_value_p (TREE_CODE (arg0)))
11574 return pedantic_non_lvalue_loc (loc,
11575 fold_convert_loc (loc, type,
11576 invert_truthvalue_loc (loc,
11579 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11580 if (TREE_CODE (arg0) == LT_EXPR
11581 && integer_zerop (TREE_OPERAND (arg0, 1))
11582 && integer_zerop (op2)
11583 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11585 /* sign_bit_p looks through both zero and sign extensions,
11586 but for this optimization only sign extensions are
11588 tree tem2 = TREE_OPERAND (arg0, 0);
11589 while (tem != tem2)
11591 if (TREE_CODE (tem2) != NOP_EXPR
11592 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11597 tem2 = TREE_OPERAND (tem2, 0);
11599 /* sign_bit_p only checks ARG1 bits within A's precision.
11600 If <sign bit of A> has wider type than A, bits outside
11601 of A's precision in <sign bit of A> need to be checked.
11602 If they are all 0, this optimization needs to be done
11603 in unsigned A's type, if they are all 1 in signed A's type,
11604 otherwise this can't be done. */
11606 && TYPE_PRECISION (TREE_TYPE (tem))
11607 < TYPE_PRECISION (TREE_TYPE (arg1))
11608 && TYPE_PRECISION (TREE_TYPE (tem))
11609 < TYPE_PRECISION (type))
11611 int inner_width, outer_width;
11614 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11615 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11616 if (outer_width > TYPE_PRECISION (type))
11617 outer_width = TYPE_PRECISION (type);
11619 wide_int mask = wi::shifted_mask
11620 (inner_width, outer_width - inner_width, false,
11621 TYPE_PRECISION (TREE_TYPE (arg1)));
11623 wide_int common = mask & arg1;
11624 if (common == mask)
11626 tem_type = signed_type_for (TREE_TYPE (tem));
11627 tem = fold_convert_loc (loc, tem_type, tem);
11629 else if (common == 0)
11631 tem_type = unsigned_type_for (TREE_TYPE (tem));
11632 tem = fold_convert_loc (loc, tem_type, tem);
11640 fold_convert_loc (loc, type,
11641 fold_build2_loc (loc, BIT_AND_EXPR,
11642 TREE_TYPE (tem), tem,
11643 fold_convert_loc (loc,
11648 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11649 already handled above. */
11650 if (TREE_CODE (arg0) == BIT_AND_EXPR
11651 && integer_onep (TREE_OPERAND (arg0, 1))
11652 && integer_zerop (op2)
11653 && integer_pow2p (arg1))
11655 tree tem = TREE_OPERAND (arg0, 0);
11657 if (TREE_CODE (tem) == RSHIFT_EXPR
11658 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11659 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11660 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11661 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11662 TREE_OPERAND (tem, 0), arg1);
11665 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11666 is probably obsolete because the first operand should be a
11667 truth value (that's why we have the two cases above), but let's
11668 leave it in until we can confirm this for all front-ends. */
11669 if (integer_zerop (op2)
11670 && TREE_CODE (arg0) == NE_EXPR
11671 && integer_zerop (TREE_OPERAND (arg0, 1))
11672 && integer_pow2p (arg1)
11673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11674 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11675 arg1, OEP_ONLY_CONST))
11676 return pedantic_non_lvalue_loc (loc,
11677 fold_convert_loc (loc, type,
11678 TREE_OPERAND (arg0, 0)));
11680 /* Disable the transformations below for vectors, since
11681 fold_binary_op_with_conditional_arg may undo them immediately,
11682 yielding an infinite loop. */
11683 if (code == VEC_COND_EXPR)
11686 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11687 if (integer_zerop (op2)
11688 && truth_value_p (TREE_CODE (arg0))
11689 && truth_value_p (TREE_CODE (arg1))
11690 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11691 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11692 : TRUTH_ANDIF_EXPR,
11693 type, fold_convert_loc (loc, type, arg0), arg1);
11695 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11696 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11697 && truth_value_p (TREE_CODE (arg0))
11698 && truth_value_p (TREE_CODE (arg1))
11699 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11701 location_t loc0 = expr_location_or (arg0, loc);
11702 /* Only perform transformation if ARG0 is easily inverted. */
11703 tem = fold_invert_truthvalue (loc0, arg0);
11705 return fold_build2_loc (loc, code == VEC_COND_EXPR
11708 type, fold_convert_loc (loc, type, tem),
11712 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11713 if (integer_zerop (arg1)
11714 && truth_value_p (TREE_CODE (arg0))
11715 && truth_value_p (TREE_CODE (op2))
11716 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11718 location_t loc0 = expr_location_or (arg0, loc);
11719 /* Only perform transformation if ARG0 is easily inverted. */
11720 tem = fold_invert_truthvalue (loc0, arg0);
11722 return fold_build2_loc (loc, code == VEC_COND_EXPR
11723 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11724 type, fold_convert_loc (loc, type, tem),
11728 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11729 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11730 && truth_value_p (TREE_CODE (arg0))
11731 && truth_value_p (TREE_CODE (op2))
11732 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11733 return fold_build2_loc (loc, code == VEC_COND_EXPR
11734 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11735 type, fold_convert_loc (loc, type, arg0), op2);
11740 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11741 of fold_ternary on them. */
11742 gcc_unreachable ();
11744 case BIT_FIELD_REF:
11745 if ((TREE_CODE (arg0) == VECTOR_CST
11746 || (TREE_CODE (arg0) == CONSTRUCTOR
11747 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11748 && (type == TREE_TYPE (TREE_TYPE (arg0))
11749 || (TREE_CODE (type) == VECTOR_TYPE
11750 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11752 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11753 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11754 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11755 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11758 && (idx % width) == 0
11759 && (n % width) == 0
11760 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11765 if (TREE_CODE (arg0) == VECTOR_CST)
11768 return VECTOR_CST_ELT (arg0, idx);
11770 tree *vals = XALLOCAVEC (tree, n);
11771 for (unsigned i = 0; i < n; ++i)
11772 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11773 return build_vector (type, vals);
11776 /* Constructor elements can be subvectors. */
11777 unsigned HOST_WIDE_INT k = 1;
11778 if (CONSTRUCTOR_NELTS (arg0) != 0)
11780 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11781 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11782 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11785 /* We keep an exact subset of the constructor elements. */
11786 if ((idx % k) == 0 && (n % k) == 0)
11788 if (CONSTRUCTOR_NELTS (arg0) == 0)
11789 return build_constructor (type, NULL);
11794 if (idx < CONSTRUCTOR_NELTS (arg0))
11795 return CONSTRUCTOR_ELT (arg0, idx)->value;
11796 return build_zero_cst (type);
11799 vec<constructor_elt, va_gc> *vals;
11800 vec_alloc (vals, n);
11801 for (unsigned i = 0;
11802 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11804 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11806 (arg0, idx + i)->value);
11807 return build_constructor (type, vals);
11809 /* The bitfield references a single constructor element. */
11810 else if (idx + n <= (idx / k + 1) * k)
11812 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11813 return build_zero_cst (type);
11815 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11817 return fold_build3_loc (loc, code, type,
11818 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11819 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11824 /* A bit-field-ref that referenced the full argument can be stripped. */
11825 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11826 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11827 && integer_zerop (op2))
11828 return fold_convert_loc (loc, type, arg0);
11830 /* On constants we can use native encode/interpret to constant
11831 fold (nearly) all BIT_FIELD_REFs. */
11832 if (CONSTANT_CLASS_P (arg0)
11833 && can_native_interpret_type_p (type)
11834 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11835 /* This limitation should not be necessary, we just need to
11836 round this up to mode size. */
11837 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11838 /* Need bit-shifting of the buffer to relax the following. */
11839 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11841 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11842 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11843 unsigned HOST_WIDE_INT clen;
11844 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11845 /* ??? We cannot tell native_encode_expr to start at
11846 some random byte only. So limit us to a reasonable amount
11850 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11851 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11853 && len * BITS_PER_UNIT >= bitpos + bitsize)
11855 tree v = native_interpret_expr (type,
11856 b + bitpos / BITS_PER_UNIT,
11857 bitsize / BITS_PER_UNIT);
11867 /* For integers we can decompose the FMA if possible. */
11868 if (TREE_CODE (arg0) == INTEGER_CST
11869 && TREE_CODE (arg1) == INTEGER_CST)
11870 return fold_build2_loc (loc, PLUS_EXPR, type,
11871 const_binop (MULT_EXPR, arg0, arg1), arg2);
11872 if (integer_zerop (arg2))
11873 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11875 return fold_fma (loc, type, arg0, arg1, arg2);
11877 case VEC_PERM_EXPR:
11878 if (TREE_CODE (arg2) == VECTOR_CST)
11880 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11881 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11882 unsigned char *sel2 = sel + nelts;
11883 bool need_mask_canon = false;
11884 bool need_mask_canon2 = false;
11885 bool all_in_vec0 = true;
11886 bool all_in_vec1 = true;
11887 bool maybe_identity = true;
11888 bool single_arg = (op0 == op1);
11889 bool changed = false;
11891 mask2 = 2 * nelts - 1;
11892 mask = single_arg ? (nelts - 1) : mask2;
11893 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11894 for (i = 0; i < nelts; i++)
11896 tree val = VECTOR_CST_ELT (arg2, i);
11897 if (TREE_CODE (val) != INTEGER_CST)
11900 /* Make sure that the perm value is in an acceptable
11903 need_mask_canon |= wi::gtu_p (t, mask);
11904 need_mask_canon2 |= wi::gtu_p (t, mask2);
11905 sel[i] = t.to_uhwi () & mask;
11906 sel2[i] = t.to_uhwi () & mask2;
11908 if (sel[i] < nelts)
11909 all_in_vec1 = false;
11911 all_in_vec0 = false;
11913 if ((sel[i] & (nelts-1)) != i)
11914 maybe_identity = false;
11917 if (maybe_identity)
11927 else if (all_in_vec1)
11930 for (i = 0; i < nelts; i++)
11932 need_mask_canon = true;
11935 if ((TREE_CODE (op0) == VECTOR_CST
11936 || TREE_CODE (op0) == CONSTRUCTOR)
11937 && (TREE_CODE (op1) == VECTOR_CST
11938 || TREE_CODE (op1) == CONSTRUCTOR))
11940 tree t = fold_vec_perm (type, op0, op1, sel);
11941 if (t != NULL_TREE)
11945 if (op0 == op1 && !single_arg)
11948 /* Some targets are deficient and fail to expand a single
11949 argument permutation while still allowing an equivalent
11950 2-argument version. */
11951 if (need_mask_canon && arg2 == op2
11952 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11953 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11955 need_mask_canon = need_mask_canon2;
11959 if (need_mask_canon && arg2 == op2)
11961 tree *tsel = XALLOCAVEC (tree, nelts);
11962 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11963 for (i = 0; i < nelts; i++)
11964 tsel[i] = build_int_cst (eltype, sel[i]);
11965 op2 = build_vector (TREE_TYPE (arg2), tsel);
11970 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11976 } /* switch (code) */
11979 /* Perform constant folding and related simplification of EXPR.
11980 The related simplifications include x*1 => x, x*0 => 0, etc.,
11981 and application of the associative law.
11982 NOP_EXPR conversions may be removed freely (as long as we
11983 are careful not to change the type of the overall expression).
11984 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11985 but we can constant-fold them if they have constant operands. */
11987 #ifdef ENABLE_FOLD_CHECKING
11988 # define fold(x) fold_1 (x)
11989 static tree fold_1 (tree);
11995 const tree t = expr;
11996 enum tree_code code = TREE_CODE (t);
11997 enum tree_code_class kind = TREE_CODE_CLASS (code);
11999 location_t loc = EXPR_LOCATION (expr);
12001 /* Return right away if a constant. */
12002 if (kind == tcc_constant)
12005 /* CALL_EXPR-like objects with variable numbers of operands are
12006 treated specially. */
12007 if (kind == tcc_vl_exp)
12009 if (code == CALL_EXPR)
12011 tem = fold_call_expr (loc, expr, false);
12012 return tem ? tem : expr;
12017 if (IS_EXPR_CODE_CLASS (kind))
12019 tree type = TREE_TYPE (t);
12020 tree op0, op1, op2;
12022 switch (TREE_CODE_LENGTH (code))
12025 op0 = TREE_OPERAND (t, 0);
12026 tem = fold_unary_loc (loc, code, type, op0);
12027 return tem ? tem : expr;
12029 op0 = TREE_OPERAND (t, 0);
12030 op1 = TREE_OPERAND (t, 1);
12031 tem = fold_binary_loc (loc, code, type, op0, op1);
12032 return tem ? tem : expr;
12034 op0 = TREE_OPERAND (t, 0);
12035 op1 = TREE_OPERAND (t, 1);
12036 op2 = TREE_OPERAND (t, 2);
12037 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12038 return tem ? tem : expr;
12048 tree op0 = TREE_OPERAND (t, 0);
12049 tree op1 = TREE_OPERAND (t, 1);
12051 if (TREE_CODE (op1) == INTEGER_CST
12052 && TREE_CODE (op0) == CONSTRUCTOR
12053 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12055 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12056 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12057 unsigned HOST_WIDE_INT begin = 0;
12059 /* Find a matching index by means of a binary search. */
12060 while (begin != end)
12062 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12063 tree index = (*elts)[middle].index;
12065 if (TREE_CODE (index) == INTEGER_CST
12066 && tree_int_cst_lt (index, op1))
12067 begin = middle + 1;
12068 else if (TREE_CODE (index) == INTEGER_CST
12069 && tree_int_cst_lt (op1, index))
12071 else if (TREE_CODE (index) == RANGE_EXPR
12072 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12073 begin = middle + 1;
12074 else if (TREE_CODE (index) == RANGE_EXPR
12075 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12078 return (*elts)[middle].value;
12085 /* Return a VECTOR_CST if possible. */
12088 tree type = TREE_TYPE (t);
12089 if (TREE_CODE (type) != VECTOR_TYPE)
12092 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12093 unsigned HOST_WIDE_INT idx, pos = 0;
12096 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12098 if (!CONSTANT_CLASS_P (value))
12100 if (TREE_CODE (value) == VECTOR_CST)
12102 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12103 vec[pos++] = VECTOR_CST_ELT (value, i);
12106 vec[pos++] = value;
12108 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12109 vec[pos] = build_zero_cst (TREE_TYPE (type));
12111 return build_vector (type, vec);
12115 return fold (DECL_INITIAL (t));
12119 } /* switch (code) */
12122 #ifdef ENABLE_FOLD_CHECKING
12125 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12126 hash_table<nofree_ptr_hash<const tree_node> > *);
12127 static void fold_check_failed (const_tree, const_tree);
12128 void print_fold_checksum (const_tree);
12130 /* When --enable-checking=fold, compute a digest of expr before
12131 and after actual fold call to see if fold did not accidentally
12132 change original expr. */
12138 struct md5_ctx ctx;
12139 unsigned char checksum_before[16], checksum_after[16];
12140 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12142 md5_init_ctx (&ctx);
12143 fold_checksum_tree (expr, &ctx, &ht);
12144 md5_finish_ctx (&ctx, checksum_before);
12147 ret = fold_1 (expr);
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (expr, &ctx, &ht);
12151 md5_finish_ctx (&ctx, checksum_after);
12153 if (memcmp (checksum_before, checksum_after, 16))
12154 fold_check_failed (expr, ret);
12160 print_fold_checksum (const_tree expr)
12162 struct md5_ctx ctx;
12163 unsigned char checksum[16], cnt;
12164 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12166 md5_init_ctx (&ctx);
12167 fold_checksum_tree (expr, &ctx, &ht);
12168 md5_finish_ctx (&ctx, checksum);
12169 for (cnt = 0; cnt < 16; ++cnt)
12170 fprintf (stderr, "%02x", checksum[cnt]);
12171 putc ('\n', stderr);
12175 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12177 internal_error ("fold check: original tree changed by fold");
12181 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12182 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12184 const tree_node **slot;
12185 enum tree_code code;
12186 union tree_node buf;
12192 slot = ht->find_slot (expr, INSERT);
12196 code = TREE_CODE (expr);
12197 if (TREE_CODE_CLASS (code) == tcc_declaration
12198 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12200 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12201 memcpy ((char *) &buf, expr, tree_size (expr));
12202 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12203 buf.decl_with_vis.symtab_node = NULL;
12204 expr = (tree) &buf;
12206 else if (TREE_CODE_CLASS (code) == tcc_type
12207 && (TYPE_POINTER_TO (expr)
12208 || TYPE_REFERENCE_TO (expr)
12209 || TYPE_CACHED_VALUES_P (expr)
12210 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12211 || TYPE_NEXT_VARIANT (expr)))
12213 /* Allow these fields to be modified. */
12215 memcpy ((char *) &buf, expr, tree_size (expr));
12216 expr = tmp = (tree) &buf;
12217 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12218 TYPE_POINTER_TO (tmp) = NULL;
12219 TYPE_REFERENCE_TO (tmp) = NULL;
12220 TYPE_NEXT_VARIANT (tmp) = NULL;
12221 if (TYPE_CACHED_VALUES_P (tmp))
12223 TYPE_CACHED_VALUES_P (tmp) = 0;
12224 TYPE_CACHED_VALUES (tmp) = NULL;
12227 md5_process_bytes (expr, tree_size (expr), ctx);
12228 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12229 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12230 if (TREE_CODE_CLASS (code) != tcc_type
12231 && TREE_CODE_CLASS (code) != tcc_declaration
12232 && code != TREE_LIST
12233 && code != SSA_NAME
12234 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12235 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12236 switch (TREE_CODE_CLASS (code))
12242 md5_process_bytes (TREE_STRING_POINTER (expr),
12243 TREE_STRING_LENGTH (expr), ctx);
12246 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12247 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12250 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12251 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12257 case tcc_exceptional:
12261 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12262 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12263 expr = TREE_CHAIN (expr);
12264 goto recursive_label;
12267 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12268 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12274 case tcc_expression:
12275 case tcc_reference:
12276 case tcc_comparison:
12279 case tcc_statement:
12281 len = TREE_OPERAND_LENGTH (expr);
12282 for (i = 0; i < len; ++i)
12283 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12285 case tcc_declaration:
12286 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12287 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12288 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12290 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12291 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12292 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12293 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12294 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12297 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12299 if (TREE_CODE (expr) == FUNCTION_DECL)
12301 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12302 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12304 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12308 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12309 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12310 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12311 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12312 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12313 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12314 if (INTEGRAL_TYPE_P (expr)
12315 || SCALAR_FLOAT_TYPE_P (expr))
12317 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12318 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12320 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12321 if (TREE_CODE (expr) == RECORD_TYPE
12322 || TREE_CODE (expr) == UNION_TYPE
12323 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12324 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12325 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12332 /* Helper function for outputting the checksum of a tree T. When
12333 debugging with gdb, you can "define mynext" to be "next" followed
12334 by "call debug_fold_checksum (op0)", then just trace down till the
12337 DEBUG_FUNCTION void
12338 debug_fold_checksum (const_tree t)
12341 unsigned char checksum[16];
12342 struct md5_ctx ctx;
12343 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12345 md5_init_ctx (&ctx);
12346 fold_checksum_tree (t, &ctx, &ht);
12347 md5_finish_ctx (&ctx, checksum);
12350 for (i = 0; i < 16; i++)
12351 fprintf (stderr, "%d ", checksum[i]);
12353 fprintf (stderr, "\n");
12358 /* Fold a unary tree expression with code CODE of type TYPE with an
12359 operand OP0. LOC is the location of the resulting expression.
12360 Return a folded expression if successful. Otherwise, return a tree
12361 expression with code CODE of type TYPE with an operand OP0. */
12364 fold_build1_stat_loc (location_t loc,
12365 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12368 #ifdef ENABLE_FOLD_CHECKING
12369 unsigned char checksum_before[16], checksum_after[16];
12370 struct md5_ctx ctx;
12371 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12373 md5_init_ctx (&ctx);
12374 fold_checksum_tree (op0, &ctx, &ht);
12375 md5_finish_ctx (&ctx, checksum_before);
12379 tem = fold_unary_loc (loc, code, type, op0);
12381 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12383 #ifdef ENABLE_FOLD_CHECKING
12384 md5_init_ctx (&ctx);
12385 fold_checksum_tree (op0, &ctx, &ht);
12386 md5_finish_ctx (&ctx, checksum_after);
12388 if (memcmp (checksum_before, checksum_after, 16))
12389 fold_check_failed (op0, tem);
12394 /* Fold a binary tree expression with code CODE of type TYPE with
12395 operands OP0 and OP1. LOC is the location of the resulting
12396 expression. Return a folded expression if successful. Otherwise,
12397 return a tree expression with code CODE of type TYPE with operands
12401 fold_build2_stat_loc (location_t loc,
12402 enum tree_code code, tree type, tree op0, tree op1
12406 #ifdef ENABLE_FOLD_CHECKING
12407 unsigned char checksum_before_op0[16],
12408 checksum_before_op1[16],
12409 checksum_after_op0[16],
12410 checksum_after_op1[16];
12411 struct md5_ctx ctx;
12412 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12414 md5_init_ctx (&ctx);
12415 fold_checksum_tree (op0, &ctx, &ht);
12416 md5_finish_ctx (&ctx, checksum_before_op0);
12419 md5_init_ctx (&ctx);
12420 fold_checksum_tree (op1, &ctx, &ht);
12421 md5_finish_ctx (&ctx, checksum_before_op1);
12425 tem = fold_binary_loc (loc, code, type, op0, op1);
12427 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12429 #ifdef ENABLE_FOLD_CHECKING
12430 md5_init_ctx (&ctx);
12431 fold_checksum_tree (op0, &ctx, &ht);
12432 md5_finish_ctx (&ctx, checksum_after_op0);
12435 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12436 fold_check_failed (op0, tem);
12438 md5_init_ctx (&ctx);
12439 fold_checksum_tree (op1, &ctx, &ht);
12440 md5_finish_ctx (&ctx, checksum_after_op1);
12442 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12443 fold_check_failed (op1, tem);
12448 /* Fold a ternary tree expression with code CODE of type TYPE with
12449 operands OP0, OP1, and OP2. Return a folded expression if
12450 successful. Otherwise, return a tree expression with code CODE of
12451 type TYPE with operands OP0, OP1, and OP2. */
12454 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12455 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12458 #ifdef ENABLE_FOLD_CHECKING
12459 unsigned char checksum_before_op0[16],
12460 checksum_before_op1[16],
12461 checksum_before_op2[16],
12462 checksum_after_op0[16],
12463 checksum_after_op1[16],
12464 checksum_after_op2[16];
12465 struct md5_ctx ctx;
12466 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12468 md5_init_ctx (&ctx);
12469 fold_checksum_tree (op0, &ctx, &ht);
12470 md5_finish_ctx (&ctx, checksum_before_op0);
12473 md5_init_ctx (&ctx);
12474 fold_checksum_tree (op1, &ctx, &ht);
12475 md5_finish_ctx (&ctx, checksum_before_op1);
12478 md5_init_ctx (&ctx);
12479 fold_checksum_tree (op2, &ctx, &ht);
12480 md5_finish_ctx (&ctx, checksum_before_op2);
12484 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12485 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12487 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12489 #ifdef ENABLE_FOLD_CHECKING
12490 md5_init_ctx (&ctx);
12491 fold_checksum_tree (op0, &ctx, &ht);
12492 md5_finish_ctx (&ctx, checksum_after_op0);
12495 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12496 fold_check_failed (op0, tem);
12498 md5_init_ctx (&ctx);
12499 fold_checksum_tree (op1, &ctx, &ht);
12500 md5_finish_ctx (&ctx, checksum_after_op1);
12503 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12504 fold_check_failed (op1, tem);
12506 md5_init_ctx (&ctx);
12507 fold_checksum_tree (op2, &ctx, &ht);
12508 md5_finish_ctx (&ctx, checksum_after_op2);
12510 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12511 fold_check_failed (op2, tem);
12516 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12517 arguments in ARGARRAY, and a null static chain.
12518 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12519 of type TYPE from the given operands as constructed by build_call_array. */
12522 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12523 int nargs, tree *argarray)
12526 #ifdef ENABLE_FOLD_CHECKING
12527 unsigned char checksum_before_fn[16],
12528 checksum_before_arglist[16],
12529 checksum_after_fn[16],
12530 checksum_after_arglist[16];
12531 struct md5_ctx ctx;
12532 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12535 md5_init_ctx (&ctx);
12536 fold_checksum_tree (fn, &ctx, &ht);
12537 md5_finish_ctx (&ctx, checksum_before_fn);
12540 md5_init_ctx (&ctx);
12541 for (i = 0; i < nargs; i++)
12542 fold_checksum_tree (argarray[i], &ctx, &ht);
12543 md5_finish_ctx (&ctx, checksum_before_arglist);
12547 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12549 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12551 #ifdef ENABLE_FOLD_CHECKING
12552 md5_init_ctx (&ctx);
12553 fold_checksum_tree (fn, &ctx, &ht);
12554 md5_finish_ctx (&ctx, checksum_after_fn);
12557 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12558 fold_check_failed (fn, tem);
12560 md5_init_ctx (&ctx);
12561 for (i = 0; i < nargs; i++)
12562 fold_checksum_tree (argarray[i], &ctx, &ht);
12563 md5_finish_ctx (&ctx, checksum_after_arglist);
12565 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12566 fold_check_failed (NULL_TREE, tem);
12571 /* Perform constant folding and related simplification of initializer
12572 expression EXPR. These behave identically to "fold_buildN" but ignore
12573 potential run-time traps and exceptions that fold must preserve. */
12575 #define START_FOLD_INIT \
12576 int saved_signaling_nans = flag_signaling_nans;\
12577 int saved_trapping_math = flag_trapping_math;\
12578 int saved_rounding_math = flag_rounding_math;\
12579 int saved_trapv = flag_trapv;\
12580 int saved_folding_initializer = folding_initializer;\
12581 flag_signaling_nans = 0;\
12582 flag_trapping_math = 0;\
12583 flag_rounding_math = 0;\
12585 folding_initializer = 1;
12587 #define END_FOLD_INIT \
12588 flag_signaling_nans = saved_signaling_nans;\
12589 flag_trapping_math = saved_trapping_math;\
12590 flag_rounding_math = saved_rounding_math;\
12591 flag_trapv = saved_trapv;\
12592 folding_initializer = saved_folding_initializer;
12595 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12596 tree type, tree op)
12601 result = fold_build1_loc (loc, code, type, op);
12608 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12609 tree type, tree op0, tree op1)
12614 result = fold_build2_loc (loc, code, type, op0, op1);
12621 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12622 int nargs, tree *argarray)
12627 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12633 #undef START_FOLD_INIT
12634 #undef END_FOLD_INIT
12636 /* Determine if first argument is a multiple of second argument. Return 0 if
12637 it is not, or we cannot easily determined it to be.
12639 An example of the sort of thing we care about (at this point; this routine
12640 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12641 fold cases do now) is discovering that
12643 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12649 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12651 This code also handles discovering that
12653 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12655 is a multiple of 8 so we don't have to worry about dealing with a
12656 possible remainder.
12658 Note that we *look* inside a SAVE_EXPR only to determine how it was
12659 calculated; it is not safe for fold to do much of anything else with the
12660 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12661 at run time. For example, the latter example above *cannot* be implemented
12662 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12663 evaluation time of the original SAVE_EXPR is not necessarily the same at
12664 the time the new expression is evaluated. The only optimization of this
12665 sort that would be valid is changing
12667 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12671 SAVE_EXPR (I) * SAVE_EXPR (J)
12673 (where the same SAVE_EXPR (J) is used in the original and the
12674 transformed version). */
12677 multiple_of_p (tree type, const_tree top, const_tree bottom)
12679 if (operand_equal_p (top, bottom, 0))
12682 if (TREE_CODE (type) != INTEGER_TYPE)
12685 switch (TREE_CODE (top))
12688 /* Bitwise and provides a power of two multiple. If the mask is
12689 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12690 if (!integer_pow2p (bottom))
12695 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12696 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12700 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12701 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12704 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12708 op1 = TREE_OPERAND (top, 1);
12709 /* const_binop may not detect overflow correctly,
12710 so check for it explicitly here. */
12711 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12712 && 0 != (t1 = fold_convert (type,
12713 const_binop (LSHIFT_EXPR,
12716 && !TREE_OVERFLOW (t1))
12717 return multiple_of_p (type, t1, bottom);
12722 /* Can't handle conversions from non-integral or wider integral type. */
12723 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12724 || (TYPE_PRECISION (type)
12725 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12728 /* .. fall through ... */
12731 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12734 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12735 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12738 if (TREE_CODE (bottom) != INTEGER_CST
12739 || integer_zerop (bottom)
12740 || (TYPE_UNSIGNED (type)
12741 && (tree_int_cst_sgn (top) < 0
12742 || tree_int_cst_sgn (bottom) < 0)))
12744 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12752 /* Return true if CODE or TYPE is known to be non-negative. */
12755 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12757 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12758 && truth_value_p (code))
12759 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12760 have a signed:1 type (where the value is -1 and 0). */
12765 /* Return true if (CODE OP0) is known to be non-negative. If the return
12766 value is based on the assumption that signed overflow is undefined,
12767 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12768 *STRICT_OVERFLOW_P. */
12771 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12772 bool *strict_overflow_p)
12774 if (TYPE_UNSIGNED (type))
12780 /* We can't return 1 if flag_wrapv is set because
12781 ABS_EXPR<INT_MIN> = INT_MIN. */
12782 if (!ANY_INTEGRAL_TYPE_P (type))
12784 if (TYPE_OVERFLOW_UNDEFINED (type))
12786 *strict_overflow_p = true;
12791 case NON_LVALUE_EXPR:
12793 case FIX_TRUNC_EXPR:
12794 return tree_expr_nonnegative_warnv_p (op0,
12795 strict_overflow_p);
12799 tree inner_type = TREE_TYPE (op0);
12800 tree outer_type = type;
12802 if (TREE_CODE (outer_type) == REAL_TYPE)
12804 if (TREE_CODE (inner_type) == REAL_TYPE)
12805 return tree_expr_nonnegative_warnv_p (op0,
12806 strict_overflow_p);
12807 if (INTEGRAL_TYPE_P (inner_type))
12809 if (TYPE_UNSIGNED (inner_type))
12811 return tree_expr_nonnegative_warnv_p (op0,
12812 strict_overflow_p);
12815 else if (INTEGRAL_TYPE_P (outer_type))
12817 if (TREE_CODE (inner_type) == REAL_TYPE)
12818 return tree_expr_nonnegative_warnv_p (op0,
12819 strict_overflow_p);
12820 if (INTEGRAL_TYPE_P (inner_type))
12821 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12822 && TYPE_UNSIGNED (inner_type);
12828 return tree_simple_nonnegative_warnv_p (code, type);
12831 /* We don't know sign of `t', so be conservative and return false. */
12835 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12836 value is based on the assumption that signed overflow is undefined,
12837 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12838 *STRICT_OVERFLOW_P. */
12841 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12842 tree op1, bool *strict_overflow_p)
12844 if (TYPE_UNSIGNED (type))
12849 case POINTER_PLUS_EXPR:
12851 if (FLOAT_TYPE_P (type))
12852 return (tree_expr_nonnegative_warnv_p (op0,
12854 && tree_expr_nonnegative_warnv_p (op1,
12855 strict_overflow_p));
12857 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12858 both unsigned and at least 2 bits shorter than the result. */
12859 if (TREE_CODE (type) == INTEGER_TYPE
12860 && TREE_CODE (op0) == NOP_EXPR
12861 && TREE_CODE (op1) == NOP_EXPR)
12863 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12864 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12865 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12866 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12868 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12869 TYPE_PRECISION (inner2)) + 1;
12870 return prec < TYPE_PRECISION (type);
12876 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12878 /* x * x is always non-negative for floating point x
12879 or without overflow. */
12880 if (operand_equal_p (op0, op1, 0)
12881 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
12882 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
12884 if (ANY_INTEGRAL_TYPE_P (type)
12885 && TYPE_OVERFLOW_UNDEFINED (type))
12886 *strict_overflow_p = true;
12891 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12892 both unsigned and their total bits is shorter than the result. */
12893 if (TREE_CODE (type) == INTEGER_TYPE
12894 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12895 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12897 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12898 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12900 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12901 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12904 bool unsigned0 = TYPE_UNSIGNED (inner0);
12905 bool unsigned1 = TYPE_UNSIGNED (inner1);
12907 if (TREE_CODE (op0) == INTEGER_CST)
12908 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12910 if (TREE_CODE (op1) == INTEGER_CST)
12911 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12913 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12914 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12916 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12917 ? tree_int_cst_min_precision (op0, UNSIGNED)
12918 : TYPE_PRECISION (inner0);
12920 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12921 ? tree_int_cst_min_precision (op1, UNSIGNED)
12922 : TYPE_PRECISION (inner1);
12924 return precision0 + precision1 < TYPE_PRECISION (type);
12931 return (tree_expr_nonnegative_warnv_p (op0,
12933 || tree_expr_nonnegative_warnv_p (op1,
12934 strict_overflow_p));
12940 case TRUNC_DIV_EXPR:
12941 case CEIL_DIV_EXPR:
12942 case FLOOR_DIV_EXPR:
12943 case ROUND_DIV_EXPR:
12944 return (tree_expr_nonnegative_warnv_p (op0,
12946 && tree_expr_nonnegative_warnv_p (op1,
12947 strict_overflow_p));
12949 case TRUNC_MOD_EXPR:
12950 case CEIL_MOD_EXPR:
12951 case FLOOR_MOD_EXPR:
12952 case ROUND_MOD_EXPR:
12953 return tree_expr_nonnegative_warnv_p (op0,
12954 strict_overflow_p);
12956 return tree_simple_nonnegative_warnv_p (code, type);
12959 /* We don't know sign of `t', so be conservative and return false. */
12963 /* Return true if T is known to be non-negative. If the return
12964 value is based on the assumption that signed overflow is undefined,
12965 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12966 *STRICT_OVERFLOW_P. */
12969 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12971 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12974 switch (TREE_CODE (t))
12977 return tree_int_cst_sgn (t) >= 0;
12980 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12983 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12986 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12988 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12989 strict_overflow_p));
12991 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
12994 /* We don't know sign of `t', so be conservative and return false. */
12998 /* Return true if T is known to be non-negative. If the return
12999 value is based on the assumption that signed overflow is undefined,
13000 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13001 *STRICT_OVERFLOW_P. */
13004 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13005 tree arg0, tree arg1, bool *strict_overflow_p)
13007 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13008 switch (DECL_FUNCTION_CODE (fndecl))
13010 CASE_FLT_FN (BUILT_IN_ACOS):
13011 CASE_FLT_FN (BUILT_IN_ACOSH):
13012 CASE_FLT_FN (BUILT_IN_CABS):
13013 CASE_FLT_FN (BUILT_IN_COSH):
13014 CASE_FLT_FN (BUILT_IN_ERFC):
13015 CASE_FLT_FN (BUILT_IN_EXP):
13016 CASE_FLT_FN (BUILT_IN_EXP10):
13017 CASE_FLT_FN (BUILT_IN_EXP2):
13018 CASE_FLT_FN (BUILT_IN_FABS):
13019 CASE_FLT_FN (BUILT_IN_FDIM):
13020 CASE_FLT_FN (BUILT_IN_HYPOT):
13021 CASE_FLT_FN (BUILT_IN_POW10):
13022 CASE_INT_FN (BUILT_IN_FFS):
13023 CASE_INT_FN (BUILT_IN_PARITY):
13024 CASE_INT_FN (BUILT_IN_POPCOUNT):
13025 CASE_INT_FN (BUILT_IN_CLZ):
13026 CASE_INT_FN (BUILT_IN_CLRSB):
13027 case BUILT_IN_BSWAP32:
13028 case BUILT_IN_BSWAP64:
13032 CASE_FLT_FN (BUILT_IN_SQRT):
13033 /* sqrt(-0.0) is -0.0. */
13034 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13036 return tree_expr_nonnegative_warnv_p (arg0,
13037 strict_overflow_p);
13039 CASE_FLT_FN (BUILT_IN_ASINH):
13040 CASE_FLT_FN (BUILT_IN_ATAN):
13041 CASE_FLT_FN (BUILT_IN_ATANH):
13042 CASE_FLT_FN (BUILT_IN_CBRT):
13043 CASE_FLT_FN (BUILT_IN_CEIL):
13044 CASE_FLT_FN (BUILT_IN_ERF):
13045 CASE_FLT_FN (BUILT_IN_EXPM1):
13046 CASE_FLT_FN (BUILT_IN_FLOOR):
13047 CASE_FLT_FN (BUILT_IN_FMOD):
13048 CASE_FLT_FN (BUILT_IN_FREXP):
13049 CASE_FLT_FN (BUILT_IN_ICEIL):
13050 CASE_FLT_FN (BUILT_IN_IFLOOR):
13051 CASE_FLT_FN (BUILT_IN_IRINT):
13052 CASE_FLT_FN (BUILT_IN_IROUND):
13053 CASE_FLT_FN (BUILT_IN_LCEIL):
13054 CASE_FLT_FN (BUILT_IN_LDEXP):
13055 CASE_FLT_FN (BUILT_IN_LFLOOR):
13056 CASE_FLT_FN (BUILT_IN_LLCEIL):
13057 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13058 CASE_FLT_FN (BUILT_IN_LLRINT):
13059 CASE_FLT_FN (BUILT_IN_LLROUND):
13060 CASE_FLT_FN (BUILT_IN_LRINT):
13061 CASE_FLT_FN (BUILT_IN_LROUND):
13062 CASE_FLT_FN (BUILT_IN_MODF):
13063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13064 CASE_FLT_FN (BUILT_IN_RINT):
13065 CASE_FLT_FN (BUILT_IN_ROUND):
13066 CASE_FLT_FN (BUILT_IN_SCALB):
13067 CASE_FLT_FN (BUILT_IN_SCALBLN):
13068 CASE_FLT_FN (BUILT_IN_SCALBN):
13069 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13070 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13071 CASE_FLT_FN (BUILT_IN_SINH):
13072 CASE_FLT_FN (BUILT_IN_TANH):
13073 CASE_FLT_FN (BUILT_IN_TRUNC):
13074 /* True if the 1st argument is nonnegative. */
13075 return tree_expr_nonnegative_warnv_p (arg0,
13076 strict_overflow_p);
13078 CASE_FLT_FN (BUILT_IN_FMAX):
13079 /* True if the 1st OR 2nd arguments are nonnegative. */
13080 return (tree_expr_nonnegative_warnv_p (arg0,
13082 || (tree_expr_nonnegative_warnv_p (arg1,
13083 strict_overflow_p)));
13085 CASE_FLT_FN (BUILT_IN_FMIN):
13086 /* True if the 1st AND 2nd arguments are nonnegative. */
13087 return (tree_expr_nonnegative_warnv_p (arg0,
13089 && (tree_expr_nonnegative_warnv_p (arg1,
13090 strict_overflow_p)));
13092 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13093 /* True if the 2nd argument is nonnegative. */
13094 return tree_expr_nonnegative_warnv_p (arg1,
13095 strict_overflow_p);
13097 CASE_FLT_FN (BUILT_IN_POWI):
13098 /* True if the 1st argument is nonnegative or the second
13099 argument is an even integer. */
13100 if (TREE_CODE (arg1) == INTEGER_CST
13101 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13103 return tree_expr_nonnegative_warnv_p (arg0,
13104 strict_overflow_p);
13106 CASE_FLT_FN (BUILT_IN_POW):
13107 /* True if the 1st argument is nonnegative or the second
13108 argument is an even integer valued real. */
13109 if (TREE_CODE (arg1) == REAL_CST)
13114 c = TREE_REAL_CST (arg1);
13115 n = real_to_integer (&c);
13118 REAL_VALUE_TYPE cint;
13119 real_from_integer (&cint, VOIDmode, n, SIGNED);
13120 if (real_identical (&c, &cint))
13124 return tree_expr_nonnegative_warnv_p (arg0,
13125 strict_overflow_p);
13130 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
13134 /* Return true if T is known to be non-negative. If the return
13135 value is based on the assumption that signed overflow is undefined,
13136 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13137 *STRICT_OVERFLOW_P. */
13140 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13142 enum tree_code code = TREE_CODE (t);
13143 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13150 tree temp = TARGET_EXPR_SLOT (t);
13151 t = TARGET_EXPR_INITIAL (t);
13153 /* If the initializer is non-void, then it's a normal expression
13154 that will be assigned to the slot. */
13155 if (!VOID_TYPE_P (t))
13156 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13158 /* Otherwise, the initializer sets the slot in some way. One common
13159 way is an assignment statement at the end of the initializer. */
13162 if (TREE_CODE (t) == BIND_EXPR)
13163 t = expr_last (BIND_EXPR_BODY (t));
13164 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13165 || TREE_CODE (t) == TRY_CATCH_EXPR)
13166 t = expr_last (TREE_OPERAND (t, 0));
13167 else if (TREE_CODE (t) == STATEMENT_LIST)
13172 if (TREE_CODE (t) == MODIFY_EXPR
13173 && TREE_OPERAND (t, 0) == temp)
13174 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13175 strict_overflow_p);
13182 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13183 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13185 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13186 get_callee_fndecl (t),
13189 strict_overflow_p);
13191 case COMPOUND_EXPR:
13193 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13194 strict_overflow_p);
13196 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13197 strict_overflow_p);
13199 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13200 strict_overflow_p);
13203 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13207 /* We don't know sign of `t', so be conservative and return false. */
13211 /* Return true if T is known to be non-negative. If the return
13212 value is based on the assumption that signed overflow is undefined,
13213 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13214 *STRICT_OVERFLOW_P. */
13217 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13219 enum tree_code code;
13220 if (t == error_mark_node)
13223 code = TREE_CODE (t);
13224 switch (TREE_CODE_CLASS (code))
13227 case tcc_comparison:
13228 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13230 TREE_OPERAND (t, 0),
13231 TREE_OPERAND (t, 1),
13232 strict_overflow_p);
13235 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13237 TREE_OPERAND (t, 0),
13238 strict_overflow_p);
13241 case tcc_declaration:
13242 case tcc_reference:
13243 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
13251 case TRUTH_AND_EXPR:
13252 case TRUTH_OR_EXPR:
13253 case TRUTH_XOR_EXPR:
13254 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13256 TREE_OPERAND (t, 0),
13257 TREE_OPERAND (t, 1),
13258 strict_overflow_p);
13259 case TRUTH_NOT_EXPR:
13260 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13262 TREE_OPERAND (t, 0),
13263 strict_overflow_p);
13270 case WITH_SIZE_EXPR:
13272 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
13275 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
13279 /* Return true if `t' is known to be non-negative. Handle warnings
13280 about undefined signed overflow. */
13283 tree_expr_nonnegative_p (tree t)
13285 bool ret, strict_overflow_p;
13287 strict_overflow_p = false;
13288 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13289 if (strict_overflow_p)
13290 fold_overflow_warning (("assuming signed overflow does not occur when "
13291 "determining that expression is always "
13293 WARN_STRICT_OVERFLOW_MISC);
13298 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13299 For floating point we further ensure that T is not denormal.
13300 Similar logic is present in nonzero_address in rtlanal.h.
13302 If the return value is based on the assumption that signed overflow
13303 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13304 change *STRICT_OVERFLOW_P. */
13307 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13308 bool *strict_overflow_p)
13313 return tree_expr_nonzero_warnv_p (op0,
13314 strict_overflow_p);
13318 tree inner_type = TREE_TYPE (op0);
13319 tree outer_type = type;
13321 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13322 && tree_expr_nonzero_warnv_p (op0,
13323 strict_overflow_p));
13327 case NON_LVALUE_EXPR:
13328 return tree_expr_nonzero_warnv_p (op0,
13329 strict_overflow_p);
13338 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13339 For floating point we further ensure that T is not denormal.
13340 Similar logic is present in nonzero_address in rtlanal.h.
13342 If the return value is based on the assumption that signed overflow
13343 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13344 change *STRICT_OVERFLOW_P. */
13347 tree_binary_nonzero_warnv_p (enum tree_code code,
13350 tree op1, bool *strict_overflow_p)
13352 bool sub_strict_overflow_p;
13355 case POINTER_PLUS_EXPR:
13357 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13359 /* With the presence of negative values it is hard
13360 to say something. */
13361 sub_strict_overflow_p = false;
13362 if (!tree_expr_nonnegative_warnv_p (op0,
13363 &sub_strict_overflow_p)
13364 || !tree_expr_nonnegative_warnv_p (op1,
13365 &sub_strict_overflow_p))
13367 /* One of operands must be positive and the other non-negative. */
13368 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13369 overflows, on a twos-complement machine the sum of two
13370 nonnegative numbers can never be zero. */
13371 return (tree_expr_nonzero_warnv_p (op0,
13373 || tree_expr_nonzero_warnv_p (op1,
13374 strict_overflow_p));
13379 if (TYPE_OVERFLOW_UNDEFINED (type))
13381 if (tree_expr_nonzero_warnv_p (op0,
13383 && tree_expr_nonzero_warnv_p (op1,
13384 strict_overflow_p))
13386 *strict_overflow_p = true;
13393 sub_strict_overflow_p = false;
13394 if (tree_expr_nonzero_warnv_p (op0,
13395 &sub_strict_overflow_p)
13396 && tree_expr_nonzero_warnv_p (op1,
13397 &sub_strict_overflow_p))
13399 if (sub_strict_overflow_p)
13400 *strict_overflow_p = true;
13405 sub_strict_overflow_p = false;
13406 if (tree_expr_nonzero_warnv_p (op0,
13407 &sub_strict_overflow_p))
13409 if (sub_strict_overflow_p)
13410 *strict_overflow_p = true;
13412 /* When both operands are nonzero, then MAX must be too. */
13413 if (tree_expr_nonzero_warnv_p (op1,
13414 strict_overflow_p))
13417 /* MAX where operand 0 is positive is positive. */
13418 return tree_expr_nonnegative_warnv_p (op0,
13419 strict_overflow_p);
13421 /* MAX where operand 1 is positive is positive. */
13422 else if (tree_expr_nonzero_warnv_p (op1,
13423 &sub_strict_overflow_p)
13424 && tree_expr_nonnegative_warnv_p (op1,
13425 &sub_strict_overflow_p))
13427 if (sub_strict_overflow_p)
13428 *strict_overflow_p = true;
13434 return (tree_expr_nonzero_warnv_p (op1,
13436 || tree_expr_nonzero_warnv_p (op0,
13437 strict_overflow_p));
13446 /* Return true when T is an address and is known to be nonzero.
13447 For floating point we further ensure that T is not denormal.
13448 Similar logic is present in nonzero_address in rtlanal.h.
13450 If the return value is based on the assumption that signed overflow
13451 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13452 change *STRICT_OVERFLOW_P. */
13455 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13457 bool sub_strict_overflow_p;
13458 switch (TREE_CODE (t))
13461 return !integer_zerop (t);
13465 tree base = TREE_OPERAND (t, 0);
13467 if (!DECL_P (base))
13468 base = get_base_address (base);
13473 /* For objects in symbol table check if we know they are non-zero.
13474 Don't do anything for variables and functions before symtab is built;
13475 it is quite possible that they will be declared weak later. */
13476 if (DECL_P (base) && decl_in_symtab_p (base))
13478 struct symtab_node *symbol;
13480 symbol = symtab_node::get_create (base);
13482 return symbol->nonzero_address ();
13487 /* Function local objects are never NULL. */
13489 && (DECL_CONTEXT (base)
13490 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13491 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13494 /* Constants are never weak. */
13495 if (CONSTANT_CLASS_P (base))
13502 sub_strict_overflow_p = false;
13503 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13504 &sub_strict_overflow_p)
13505 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13506 &sub_strict_overflow_p))
13508 if (sub_strict_overflow_p)
13509 *strict_overflow_p = true;
13520 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13521 attempt to fold the expression to a constant without modifying TYPE,
13524 If the expression could be simplified to a constant, then return
13525 the constant. If the expression would not be simplified to a
13526 constant, then return NULL_TREE. */
13529 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13531 tree tem = fold_binary (code, type, op0, op1);
13532 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13535 /* Given the components of a unary expression CODE, TYPE and OP0,
13536 attempt to fold the expression to a constant without modifying
13539 If the expression could be simplified to a constant, then return
13540 the constant. If the expression would not be simplified to a
13541 constant, then return NULL_TREE. */
13544 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13546 tree tem = fold_unary (code, type, op0);
13547 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13550 /* If EXP represents referencing an element in a constant string
13551 (either via pointer arithmetic or array indexing), return the
13552 tree representing the value accessed, otherwise return NULL. */
13555 fold_read_from_constant_string (tree exp)
13557 if ((TREE_CODE (exp) == INDIRECT_REF
13558 || TREE_CODE (exp) == ARRAY_REF)
13559 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13561 tree exp1 = TREE_OPERAND (exp, 0);
13564 location_t loc = EXPR_LOCATION (exp);
13566 if (TREE_CODE (exp) == INDIRECT_REF)
13567 string = string_constant (exp1, &index);
13570 tree low_bound = array_ref_low_bound (exp);
13571 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13573 /* Optimize the special-case of a zero lower bound.
13575 We convert the low_bound to sizetype to avoid some problems
13576 with constant folding. (E.g. suppose the lower bound is 1,
13577 and its mode is QI. Without the conversion,l (ARRAY
13578 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13579 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13580 if (! integer_zerop (low_bound))
13581 index = size_diffop_loc (loc, index,
13582 fold_convert_loc (loc, sizetype, low_bound));
13588 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13589 && TREE_CODE (string) == STRING_CST
13590 && TREE_CODE (index) == INTEGER_CST
13591 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13592 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13594 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13595 return build_int_cst_type (TREE_TYPE (exp),
13596 (TREE_STRING_POINTER (string)
13597 [TREE_INT_CST_LOW (index)]));
13602 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13603 an integer constant, real, or fixed-point constant.
13605 TYPE is the type of the result. */
13608 fold_negate_const (tree arg0, tree type)
13610 tree t = NULL_TREE;
13612 switch (TREE_CODE (arg0))
13617 wide_int val = wi::neg (arg0, &overflow);
13618 t = force_fit_type (type, val, 1,
13619 (overflow | TREE_OVERFLOW (arg0))
13620 && !TYPE_UNSIGNED (type));
13625 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13630 FIXED_VALUE_TYPE f;
13631 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13632 &(TREE_FIXED_CST (arg0)), NULL,
13633 TYPE_SATURATING (type));
13634 t = build_fixed (type, f);
13635 /* Propagate overflow flags. */
13636 if (overflow_p | TREE_OVERFLOW (arg0))
13637 TREE_OVERFLOW (t) = 1;
13642 gcc_unreachable ();
13648 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13649 an integer constant or real constant.
13651 TYPE is the type of the result. */
13654 fold_abs_const (tree arg0, tree type)
13656 tree t = NULL_TREE;
13658 switch (TREE_CODE (arg0))
13662 /* If the value is unsigned or non-negative, then the absolute value
13663 is the same as the ordinary value. */
13664 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13667 /* If the value is negative, then the absolute value is
13672 wide_int val = wi::neg (arg0, &overflow);
13673 t = force_fit_type (type, val, -1,
13674 overflow | TREE_OVERFLOW (arg0));
13680 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13681 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13687 gcc_unreachable ();
13693 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13694 constant. TYPE is the type of the result. */
13697 fold_not_const (const_tree arg0, tree type)
13699 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13701 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13704 /* Given CODE, a relational operator, the target type, TYPE and two
13705 constant operands OP0 and OP1, return the result of the
13706 relational operation. If the result is not a compile time
13707 constant, then return NULL_TREE. */
13710 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13712 int result, invert;
13714 /* From here on, the only cases we handle are when the result is
13715 known to be a constant. */
13717 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13719 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13720 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13722 /* Handle the cases where either operand is a NaN. */
13723 if (real_isnan (c0) || real_isnan (c1))
13733 case UNORDERED_EXPR:
13747 if (flag_trapping_math)
13753 gcc_unreachable ();
13756 return constant_boolean_node (result, type);
13759 return constant_boolean_node (real_compare (code, c0, c1), type);
13762 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13764 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13765 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13766 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13769 /* Handle equality/inequality of complex constants. */
13770 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13772 tree rcond = fold_relational_const (code, type,
13773 TREE_REALPART (op0),
13774 TREE_REALPART (op1));
13775 tree icond = fold_relational_const (code, type,
13776 TREE_IMAGPART (op0),
13777 TREE_IMAGPART (op1));
13778 if (code == EQ_EXPR)
13779 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13780 else if (code == NE_EXPR)
13781 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13786 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13788 unsigned count = VECTOR_CST_NELTS (op0);
13789 tree *elts = XALLOCAVEC (tree, count);
13790 gcc_assert (VECTOR_CST_NELTS (op1) == count
13791 && TYPE_VECTOR_SUBPARTS (type) == count);
13793 for (unsigned i = 0; i < count; i++)
13795 tree elem_type = TREE_TYPE (type);
13796 tree elem0 = VECTOR_CST_ELT (op0, i);
13797 tree elem1 = VECTOR_CST_ELT (op1, i);
13799 tree tem = fold_relational_const (code, elem_type,
13802 if (tem == NULL_TREE)
13805 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13808 return build_vector (type, elts);
13811 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13813 To compute GT, swap the arguments and do LT.
13814 To compute GE, do LT and invert the result.
13815 To compute LE, swap the arguments, do LT and invert the result.
13816 To compute NE, do EQ and invert the result.
13818 Therefore, the code below must handle only EQ and LT. */
13820 if (code == LE_EXPR || code == GT_EXPR)
13822 std::swap (op0, op1);
13823 code = swap_tree_comparison (code);
13826 /* Note that it is safe to invert for real values here because we
13827 have already handled the one case that it matters. */
13830 if (code == NE_EXPR || code == GE_EXPR)
13833 code = invert_tree_comparison (code, false);
13836 /* Compute a result for LT or EQ if args permit;
13837 Otherwise return T. */
13838 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13840 if (code == EQ_EXPR)
13841 result = tree_int_cst_equal (op0, op1);
13843 result = tree_int_cst_lt (op0, op1);
13850 return constant_boolean_node (result, type);
13853 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13854 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13858 fold_build_cleanup_point_expr (tree type, tree expr)
13860 /* If the expression does not have side effects then we don't have to wrap
13861 it with a cleanup point expression. */
13862 if (!TREE_SIDE_EFFECTS (expr))
13865 /* If the expression is a return, check to see if the expression inside the
13866 return has no side effects or the right hand side of the modify expression
13867 inside the return. If either don't have side effects set we don't need to
13868 wrap the expression in a cleanup point expression. Note we don't check the
13869 left hand side of the modify because it should always be a return decl. */
13870 if (TREE_CODE (expr) == RETURN_EXPR)
13872 tree op = TREE_OPERAND (expr, 0);
13873 if (!op || !TREE_SIDE_EFFECTS (op))
13875 op = TREE_OPERAND (op, 1);
13876 if (!TREE_SIDE_EFFECTS (op))
13880 return build1 (CLEANUP_POINT_EXPR, type, expr);
13883 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13884 of an indirection through OP0, or NULL_TREE if no simplification is
13888 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13894 subtype = TREE_TYPE (sub);
13895 if (!POINTER_TYPE_P (subtype))
13898 if (TREE_CODE (sub) == ADDR_EXPR)
13900 tree op = TREE_OPERAND (sub, 0);
13901 tree optype = TREE_TYPE (op);
13902 /* *&CONST_DECL -> to the value of the const decl. */
13903 if (TREE_CODE (op) == CONST_DECL)
13904 return DECL_INITIAL (op);
13905 /* *&p => p; make sure to handle *&"str"[cst] here. */
13906 if (type == optype)
13908 tree fop = fold_read_from_constant_string (op);
13914 /* *(foo *)&fooarray => fooarray[0] */
13915 else if (TREE_CODE (optype) == ARRAY_TYPE
13916 && type == TREE_TYPE (optype)
13917 && (!in_gimple_form
13918 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13920 tree type_domain = TYPE_DOMAIN (optype);
13921 tree min_val = size_zero_node;
13922 if (type_domain && TYPE_MIN_VALUE (type_domain))
13923 min_val = TYPE_MIN_VALUE (type_domain);
13925 && TREE_CODE (min_val) != INTEGER_CST)
13927 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13928 NULL_TREE, NULL_TREE);
13930 /* *(foo *)&complexfoo => __real__ complexfoo */
13931 else if (TREE_CODE (optype) == COMPLEX_TYPE
13932 && type == TREE_TYPE (optype))
13933 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13934 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13935 else if (TREE_CODE (optype) == VECTOR_TYPE
13936 && type == TREE_TYPE (optype))
13938 tree part_width = TYPE_SIZE (type);
13939 tree index = bitsize_int (0);
13940 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13944 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13945 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13947 tree op00 = TREE_OPERAND (sub, 0);
13948 tree op01 = TREE_OPERAND (sub, 1);
13951 if (TREE_CODE (op00) == ADDR_EXPR)
13954 op00 = TREE_OPERAND (op00, 0);
13955 op00type = TREE_TYPE (op00);
13957 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13958 if (TREE_CODE (op00type) == VECTOR_TYPE
13959 && type == TREE_TYPE (op00type))
13961 HOST_WIDE_INT offset = tree_to_shwi (op01);
13962 tree part_width = TYPE_SIZE (type);
13963 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
13964 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13965 tree index = bitsize_int (indexi);
13967 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
13968 return fold_build3_loc (loc,
13969 BIT_FIELD_REF, type, op00,
13970 part_width, index);
13973 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13974 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13975 && type == TREE_TYPE (op00type))
13977 tree size = TYPE_SIZE_UNIT (type);
13978 if (tree_int_cst_equal (size, op01))
13979 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13981 /* ((foo *)&fooarray)[1] => fooarray[1] */
13982 else if (TREE_CODE (op00type) == ARRAY_TYPE
13983 && type == TREE_TYPE (op00type))
13985 tree type_domain = TYPE_DOMAIN (op00type);
13986 tree min_val = size_zero_node;
13987 if (type_domain && TYPE_MIN_VALUE (type_domain))
13988 min_val = TYPE_MIN_VALUE (type_domain);
13989 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
13990 TYPE_SIZE_UNIT (type));
13991 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
13992 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13993 NULL_TREE, NULL_TREE);
13998 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13999 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14000 && type == TREE_TYPE (TREE_TYPE (subtype))
14001 && (!in_gimple_form
14002 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14005 tree min_val = size_zero_node;
14006 sub = build_fold_indirect_ref_loc (loc, sub);
14007 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14008 if (type_domain && TYPE_MIN_VALUE (type_domain))
14009 min_val = TYPE_MIN_VALUE (type_domain);
14011 && TREE_CODE (min_val) != INTEGER_CST)
14013 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14020 /* Builds an expression for an indirection through T, simplifying some
14024 build_fold_indirect_ref_loc (location_t loc, tree t)
14026 tree type = TREE_TYPE (TREE_TYPE (t));
14027 tree sub = fold_indirect_ref_1 (loc, type, t);
14032 return build1_loc (loc, INDIRECT_REF, type, t);
14035 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14038 fold_indirect_ref_loc (location_t loc, tree t)
14040 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14048 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14049 whose result is ignored. The type of the returned tree need not be
14050 the same as the original expression. */
14053 fold_ignored_result (tree t)
14055 if (!TREE_SIDE_EFFECTS (t))
14056 return integer_zero_node;
14059 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14062 t = TREE_OPERAND (t, 0);
14066 case tcc_comparison:
14067 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14068 t = TREE_OPERAND (t, 0);
14069 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14070 t = TREE_OPERAND (t, 1);
14075 case tcc_expression:
14076 switch (TREE_CODE (t))
14078 case COMPOUND_EXPR:
14079 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14081 t = TREE_OPERAND (t, 0);
14085 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14086 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14088 t = TREE_OPERAND (t, 0);
14101 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14104 round_up_loc (location_t loc, tree value, unsigned int divisor)
14106 tree div = NULL_TREE;
14111 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14112 have to do anything. Only do this when we are not given a const,
14113 because in that case, this check is more expensive than just
14115 if (TREE_CODE (value) != INTEGER_CST)
14117 div = build_int_cst (TREE_TYPE (value), divisor);
14119 if (multiple_of_p (TREE_TYPE (value), value, div))
14123 /* If divisor is a power of two, simplify this to bit manipulation. */
14124 if (divisor == (divisor & -divisor))
14126 if (TREE_CODE (value) == INTEGER_CST)
14128 wide_int val = value;
14131 if ((val & (divisor - 1)) == 0)
14134 overflow_p = TREE_OVERFLOW (value);
14135 val += divisor - 1;
14136 val &= - (int) divisor;
14140 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14146 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14147 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14148 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14149 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14155 div = build_int_cst (TREE_TYPE (value), divisor);
14156 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14157 value = size_binop_loc (loc, MULT_EXPR, value, div);
14163 /* Likewise, but round down. */
14166 round_down_loc (location_t loc, tree value, int divisor)
14168 tree div = NULL_TREE;
14170 gcc_assert (divisor > 0);
14174 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14175 have to do anything. Only do this when we are not given a const,
14176 because in that case, this check is more expensive than just
14178 if (TREE_CODE (value) != INTEGER_CST)
14180 div = build_int_cst (TREE_TYPE (value), divisor);
14182 if (multiple_of_p (TREE_TYPE (value), value, div))
14186 /* If divisor is a power of two, simplify this to bit manipulation. */
14187 if (divisor == (divisor & -divisor))
14191 t = build_int_cst (TREE_TYPE (value), -divisor);
14192 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14197 div = build_int_cst (TREE_TYPE (value), divisor);
14198 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14199 value = size_binop_loc (loc, MULT_EXPR, value, div);
14205 /* Returns the pointer to the base of the object addressed by EXP and
14206 extracts the information about the offset of the access, storing it
14207 to PBITPOS and POFFSET. */
14210 split_address_to_core_and_offset (tree exp,
14211 HOST_WIDE_INT *pbitpos, tree *poffset)
14215 int unsignedp, volatilep;
14216 HOST_WIDE_INT bitsize;
14217 location_t loc = EXPR_LOCATION (exp);
14219 if (TREE_CODE (exp) == ADDR_EXPR)
14221 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14222 poffset, &mode, &unsignedp, &volatilep,
14224 core = build_fold_addr_expr_loc (loc, core);
14230 *poffset = NULL_TREE;
14236 /* Returns true if addresses of E1 and E2 differ by a constant, false
14237 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14240 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14243 HOST_WIDE_INT bitpos1, bitpos2;
14244 tree toffset1, toffset2, tdiff, type;
14246 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14247 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14249 if (bitpos1 % BITS_PER_UNIT != 0
14250 || bitpos2 % BITS_PER_UNIT != 0
14251 || !operand_equal_p (core1, core2, 0))
14254 if (toffset1 && toffset2)
14256 type = TREE_TYPE (toffset1);
14257 if (type != TREE_TYPE (toffset2))
14258 toffset2 = fold_convert (type, toffset2);
14260 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14261 if (!cst_and_fits_in_hwi (tdiff))
14264 *diff = int_cst_value (tdiff);
14266 else if (toffset1 || toffset2)
14268 /* If only one of the offsets is non-constant, the difference cannot
14275 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14279 /* Simplify the floating point expression EXP when the sign of the
14280 result is not significant. Return NULL_TREE if no simplification
14284 fold_strip_sign_ops (tree exp)
14287 location_t loc = EXPR_LOCATION (exp);
14289 switch (TREE_CODE (exp))
14293 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14294 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14298 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
14300 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14301 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14302 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14303 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
14304 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14305 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14308 case COMPOUND_EXPR:
14309 arg0 = TREE_OPERAND (exp, 0);
14310 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14312 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14316 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14317 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14319 return fold_build3_loc (loc,
14320 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14321 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14322 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14327 const enum built_in_function fcode = builtin_mathfn_code (exp);
14330 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14331 /* Strip copysign function call, return the 1st argument. */
14332 arg0 = CALL_EXPR_ARG (exp, 0);
14333 arg1 = CALL_EXPR_ARG (exp, 1);
14334 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
14337 /* Strip sign ops from the argument of "odd" math functions. */
14338 if (negate_mathfn_p (fcode))
14340 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14342 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
14355 /* Return OFF converted to a pointer offset type suitable as offset for
14356 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14358 convert_to_ptrofftype_loc (location_t loc, tree off)
14360 return fold_convert_loc (loc, sizetype, off);
14363 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14365 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14367 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14368 ptr, convert_to_ptrofftype_loc (loc, off));
14371 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14373 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14375 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14376 ptr, size_int (off));