1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 static tree fold_negate_const (tree, tree);
117 static tree fold_abs_const (tree, tree);
119 /* The following constants represent a bit based encoding of GCC's
120 comparison operators. This encoding simplifies transformations
121 on relational comparison operators, such as AND and OR. */
122 #define COMPCODE_FALSE 0
123 #define COMPCODE_LT 1
124 #define COMPCODE_EQ 2
125 #define COMPCODE_LE 3
126 #define COMPCODE_GT 4
127 #define COMPCODE_NE 5
128 #define COMPCODE_GE 6
129 #define COMPCODE_TRUE 7
131 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
132 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
133 and SUM1. Then this yields nonzero if overflow occurred during the
136 Overflow occurs if A and B have the same sign, but A and SUM differ in
137 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
139 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
141 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
142 We do that by representing the two-word integer in 4 words, with only
143 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
144 number. The value of the word is LOWPART + HIGHPART * BASE. */
147 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
148 #define HIGHPART(x) \
149 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
150 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
152 /* Unpack a two-word integer into 4 words.
153 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
154 WORDS points to the array of HOST_WIDE_INTs. */
157 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
159 words[0] = LOWPART (low);
160 words[1] = HIGHPART (low);
161 words[2] = LOWPART (hi);
162 words[3] = HIGHPART (hi);
165 /* Pack an array of 4 words into a two-word integer.
166 WORDS points to the array of words.
167 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
170 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
173 *low = words[0] + words[1] * BASE;
174 *hi = words[2] + words[3] * BASE;
177 /* Make the integer constant T valid for its type by setting to 0 or 1 all
178 the bits in the constant that don't belong in the type.
180 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
181 nonzero, a signed overflow has already occurred in calculating T, so
185 force_fit_type (tree t, int overflow)
187 unsigned HOST_WIDE_INT low;
191 if (TREE_CODE (t) == REAL_CST)
193 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
194 Consider doing it via real_convert now. */
198 else if (TREE_CODE (t) != INTEGER_CST)
201 low = TREE_INT_CST_LOW (t);
202 high = TREE_INT_CST_HIGH (t);
204 if (POINTER_TYPE_P (TREE_TYPE (t))
205 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
208 prec = TYPE_PRECISION (TREE_TYPE (t));
210 /* First clear all bits that are beyond the type's precision. */
212 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214 else if (prec > HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_HIGH (t)
216 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
219 TREE_INT_CST_HIGH (t) = 0;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
224 /* Unsigned types do not suffer sign extension or overflow unless they
226 if (TREE_UNSIGNED (TREE_TYPE (t))
227 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
231 /* If the value's sign bit is set, extend the sign. */
232 if (prec != 2 * HOST_BITS_PER_WIDE_INT
233 && (prec > HOST_BITS_PER_WIDE_INT
234 ? 0 != (TREE_INT_CST_HIGH (t)
236 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
237 : 0 != (TREE_INT_CST_LOW (t)
238 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240 /* Value is negative:
241 set to 1 all the bits that are outside this type's precision. */
242 if (prec > HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_HIGH (t)
244 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
247 TREE_INT_CST_HIGH (t) = -1;
248 if (prec < HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
253 /* Return nonzero if signed overflow occurred. */
255 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
259 /* Add two doubleword integers with doubleword result.
260 Each argument is given as two `HOST_WIDE_INT' pieces.
261 One argument is L1 and H1; the other, L2 and H2.
262 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
265 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
266 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
267 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
269 unsigned HOST_WIDE_INT l;
273 h = h1 + h2 + (l < l1);
277 return OVERFLOW_SUM_SIGN (h1, h2, h);
280 /* Negate a doubleword integer with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
286 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
287 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 return (*hv & h1) < 0;
303 /* Multiply two doubleword integers with doubleword result.
304 Return nonzero if the operation overflows, assuming it's signed.
305 Each argument is given as two `HOST_WIDE_INT' pieces.
306 One argument is L1 and H1; the other, L2 and H2.
307 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
310 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
311 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 HOST_WIDE_INT arg1[4];
315 HOST_WIDE_INT arg2[4];
316 HOST_WIDE_INT prod[4 * 2];
317 unsigned HOST_WIDE_INT carry;
319 unsigned HOST_WIDE_INT toplow, neglow;
320 HOST_WIDE_INT tophigh, neghigh;
322 encode (arg1, l1, h1);
323 encode (arg2, l2, h2);
325 memset (prod, 0, sizeof prod);
327 for (i = 0; i < 4; i++)
330 for (j = 0; j < 4; j++)
333 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
334 carry += arg1[i] * arg2[j];
335 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 prod[k] = LOWPART (carry);
338 carry = HIGHPART (carry);
343 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
345 /* Check for overflow by calculating the top half of the answer in full;
346 it should agree with the low half's sign bit. */
347 decode (prod + 4, &toplow, &tophigh);
350 neg_double (l2, h2, &neglow, &neghigh);
351 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 neg_double (l1, h1, &neglow, &neghigh);
356 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
358 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
361 /* Shift the doubleword integer in L1, H1 left by COUNT places
362 keeping only PREC bits of result.
363 Shift right if COUNT is negative.
364 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
365 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
369 HOST_WIDE_INT count, unsigned int prec,
370 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
372 unsigned HOST_WIDE_INT signmask;
376 rshift_double (l1, h1, -count, prec, lv, hv, arith);
380 if (SHIFT_COUNT_TRUNCATED)
383 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
385 /* Shifting by the host word size is undefined according to the
386 ANSI standard, so we must handle this as a special case. */
390 else if (count >= HOST_BITS_PER_WIDE_INT)
392 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
397 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
398 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
402 /* Sign extend all bits that are beyond the precision. */
404 signmask = -((prec > HOST_BITS_PER_WIDE_INT
405 ? ((unsigned HOST_WIDE_INT) *hv
406 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
407 : (*lv >> (prec - 1))) & 1);
409 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
411 else if (prec >= HOST_BITS_PER_WIDE_INT)
413 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
414 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
419 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
420 *lv |= signmask << prec;
424 /* Shift the doubleword integer in L1, H1 right by COUNT places
425 keeping only PREC bits of result. COUNT must be positive.
426 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
427 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
430 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
431 HOST_WIDE_INT count, unsigned int prec,
432 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
435 unsigned HOST_WIDE_INT signmask;
438 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
441 if (SHIFT_COUNT_TRUNCATED)
444 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
451 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
458 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
463 /* Zero / sign extend all bits that are beyond the precision. */
465 if (count >= (HOST_WIDE_INT)prec)
470 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
475 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
480 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
481 *lv |= signmask << (prec - count);
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
492 HOST_WIDE_INT count, unsigned int prec,
493 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495 unsigned HOST_WIDE_INT s1l, s2l;
496 HOST_WIDE_INT s1h, s2h;
502 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
503 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
524 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (enum tree_code code, int uns,
541 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig,
543 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig,
545 unsigned HOST_WIDE_INT *lquo,
546 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
550 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den[4], quo[4];
553 unsigned HOST_WIDE_INT work;
554 unsigned HOST_WIDE_INT carry = 0;
555 unsigned HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 unsigned HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
561 if (hden == 0 && lden == 0)
562 overflow = 1, lden = 1;
564 /* Calculate quotient sign and convert operands to unsigned. */
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum)
572 && ((HOST_WIDE_INT) lden & hden) == -1)
578 neg_double (lden, hden, &lden, &hden);
582 if (hnum == 0 && hden == 0)
583 { /* single precision */
585 /* This unsigned division rounds toward zero. */
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
599 memset (quo, 0, sizeof quo);
601 memset (num, 0, sizeof num); /* to zero 9th element */
602 memset (den, 0, sizeof den);
604 encode (num, lnum, hnum);
605 encode (den, lden, hden);
607 /* Special code for when the divisor < BASE. */
608 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610 /* hnum != 0 already checked. */
611 for (i = 4 - 1; i >= 0; i--)
613 work = num[i] + carry * BASE;
614 quo[i] = work / lden;
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig, den_hi_sig;
623 unsigned HOST_WIDE_INT quo_est, scale;
625 /* Find the highest nonzero divisor digit. */
626 for (i = 4 - 1;; i--)
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
636 scale = BASE / (den[den_hi_sig] + 1);
638 { /* scale divisor and dividend */
640 for (i = 0; i <= 4 - 1; i++)
642 work = (num[i] * scale) + carry;
643 num[i] = LOWPART (work);
644 carry = HIGHPART (work);
649 for (i = 0; i <= 4 - 1; i++)
651 work = (den[i] * scale) + carry;
652 den[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 if (den[i] != 0) den_hi_sig = i;
661 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp;
668 num_hi_sig = i + den_hi_sig + 1;
669 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
670 if (num[num_hi_sig] != den[den_hi_sig])
671 quo_est = work / den[den_hi_sig];
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp = work - quo_est * den[den_hi_sig];
678 && (den[den_hi_sig - 1] * quo_est
679 > (tmp * BASE + num[num_hi_sig - 2])))
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
687 for (j = 0; j <= den_hi_sig; j++)
689 work = quo_est * den[j] + carry;
690 carry = HIGHPART (work);
691 work = num[i + j] - LOWPART (work);
692 num[i + j] = LOWPART (work);
693 carry += HIGHPART (work) != 0;
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 carry = 0; /* add divisor back in */
702 for (j = 0; j <= den_hi_sig; j++)
704 work = num[i + j] + den[j] + carry;
705 carry = HIGHPART (work);
706 num[i + j] = LOWPART (work);
709 num [num_hi_sig] += carry;
712 /* Store the quotient digit. */
717 decode (quo, lquo, hquo);
720 /* If result is negative, make it so. */
722 neg_double (*lquo, *hquo, lquo, hquo);
724 /* Compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
726 neg_double (*lrem, *hrem, lrem, hrem);
727 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
732 case TRUNC_MOD_EXPR: /* round toward zero */
733 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
737 case FLOOR_MOD_EXPR: /* round toward negative infinity */
738 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
749 case CEIL_MOD_EXPR: /* round toward positive infinity */
750 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
760 case ROUND_MOD_EXPR: /* round to closest integer */
762 unsigned HOST_WIDE_INT labs_rem = *lrem;
763 HOST_WIDE_INT habs_rem = *hrem;
764 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
765 HOST_WIDE_INT habs_den = hden, htwice;
767 /* Get absolute values. */
769 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 neg_double (lden, hden, &labs_den, &habs_den);
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
775 labs_rem, habs_rem, <wice, &htwice);
777 if (((unsigned HOST_WIDE_INT) habs_den
778 < (unsigned HOST_WIDE_INT) htwice)
779 || (((unsigned HOST_WIDE_INT) habs_den
780 == (unsigned HOST_WIDE_INT) htwice)
781 && (labs_den < ltwice)))
785 add_double (*lquo, *hquo,
786 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
789 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
801 /* Compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 negate_mathfn_p (enum built_in_function code)
836 /* Determine whether an expression T can be cheaply negated using
837 the function negate_expr. */
840 negate_expr_p (tree t)
842 unsigned HOST_WIDE_INT val;
849 type = TREE_TYPE (t);
852 switch (TREE_CODE (t))
855 if (TREE_UNSIGNED (type) || ! flag_trapv)
858 /* Check that -CST will not overflow type. */
859 prec = TYPE_PRECISION (type);
860 if (prec > HOST_BITS_PER_WIDE_INT)
862 if (TREE_INT_CST_LOW (t) != 0)
864 prec -= HOST_BITS_PER_WIDE_INT;
865 val = TREE_INT_CST_HIGH (t);
868 val = TREE_INT_CST_LOW (t);
869 if (prec < HOST_BITS_PER_WIDE_INT)
870 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
871 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
878 return negate_expr_p (TREE_REALPART (t))
879 && negate_expr_p (TREE_IMAGPART (t));
882 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
884 /* -(A + B) -> (-B) - A. */
885 if (negate_expr_p (TREE_OPERAND (t, 1))
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1)))
889 /* -(A + B) -> (-A) - B. */
890 return negate_expr_p (TREE_OPERAND (t, 0));
893 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
894 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1));
899 if (TREE_UNSIGNED (TREE_TYPE (t)))
905 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
906 return negate_expr_p (TREE_OPERAND (t, 1))
907 || negate_expr_p (TREE_OPERAND (t, 0));
911 /* Negate -((double)float) as (double)(-float). */
912 if (TREE_CODE (type) == REAL_TYPE)
914 tree tem = strip_float_extensions (t);
916 return negate_expr_p (tem);
921 /* Negate -f(x) as f(-x). */
922 if (negate_mathfn_p (builtin_mathfn_code (t)))
923 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
927 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
928 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
930 tree op1 = TREE_OPERAND (t, 1);
931 if (TREE_INT_CST_HIGH (op1) == 0
932 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
933 == TREE_INT_CST_LOW (op1))
944 /* Given T, an expression, return the negation of T. Allow for T to be
945 null, in which case return null. */
956 type = TREE_TYPE (t);
959 switch (TREE_CODE (t))
962 tem = fold_negate_const (t, type);
963 if (! TREE_OVERFLOW (tem)
964 || TREE_UNSIGNED (type)
970 tem = fold_negate_const (t, type);
971 /* Two's complement FP formats, such as c4x, may overflow. */
972 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
973 return fold_convert (type, tem);
978 tree rpart = negate_expr (TREE_REALPART (t));
979 tree ipart = negate_expr (TREE_IMAGPART (t));
981 if ((TREE_CODE (rpart) == REAL_CST
982 && TREE_CODE (ipart) == REAL_CST)
983 || (TREE_CODE (rpart) == INTEGER_CST
984 && TREE_CODE (ipart) == INTEGER_CST))
985 return build_complex (type, rpart, ipart);
990 return fold_convert (type, TREE_OPERAND (t, 0));
993 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
995 /* -(A + B) -> (-B) - A. */
996 if (negate_expr_p (TREE_OPERAND (t, 1))
997 && reorder_operands_p (TREE_OPERAND (t, 0),
998 TREE_OPERAND (t, 1)))
999 return fold_convert (type,
1000 fold (build (MINUS_EXPR, TREE_TYPE (t),
1001 negate_expr (TREE_OPERAND (t, 1)),
1002 TREE_OPERAND (t, 0))));
1003 /* -(A + B) -> (-A) - B. */
1004 if (negate_expr_p (TREE_OPERAND (t, 0)))
1005 return fold_convert (type,
1006 fold (build (MINUS_EXPR, TREE_TYPE (t),
1007 negate_expr (TREE_OPERAND (t, 0)),
1008 TREE_OPERAND (t, 1))));
1013 /* - (A - B) -> B - A */
1014 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1015 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1016 return fold_convert (type,
1017 fold (build (MINUS_EXPR, TREE_TYPE (t),
1018 TREE_OPERAND (t, 1),
1019 TREE_OPERAND (t, 0))));
1023 if (TREE_UNSIGNED (TREE_TYPE (t)))
1029 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1031 tem = TREE_OPERAND (t, 1);
1032 if (negate_expr_p (tem))
1033 return fold_convert (type,
1034 fold (build (TREE_CODE (t), TREE_TYPE (t),
1035 TREE_OPERAND (t, 0),
1036 negate_expr (tem))));
1037 tem = TREE_OPERAND (t, 0);
1038 if (negate_expr_p (tem))
1039 return fold_convert (type,
1040 fold (build (TREE_CODE (t), TREE_TYPE (t),
1042 TREE_OPERAND (t, 1))));
1047 /* Convert -((double)float) into (double)(-float). */
1048 if (TREE_CODE (type) == REAL_TYPE)
1050 tem = strip_float_extensions (t);
1051 if (tem != t && negate_expr_p (tem))
1052 return fold_convert (type, negate_expr (tem));
1057 /* Negate -f(x) as f(-x). */
1058 if (negate_mathfn_p (builtin_mathfn_code (t))
1059 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1061 tree fndecl, arg, arglist;
1063 fndecl = get_callee_fndecl (t);
1064 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1065 arglist = build_tree_list (NULL_TREE, arg);
1066 return build_function_call_expr (fndecl, arglist);
1071 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1072 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1074 tree op1 = TREE_OPERAND (t, 1);
1075 if (TREE_INT_CST_HIGH (op1) == 0
1076 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1077 == TREE_INT_CST_LOW (op1))
1079 tree ntype = TREE_UNSIGNED (type)
1080 ? lang_hooks.types.signed_type (type)
1081 : lang_hooks.types.unsigned_type (type);
1082 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1083 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1084 return fold_convert (type, temp);
1093 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1094 return fold_convert (type, tem);
1097 /* Split a tree IN into a constant, literal and variable parts that could be
1098 combined with CODE to make IN. "constant" means an expression with
1099 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1100 commutative arithmetic operation. Store the constant part into *CONP,
1101 the literal in *LITP and return the variable part. If a part isn't
1102 present, set it to null. If the tree does not decompose in this way,
1103 return the entire tree as the variable part and the other parts as null.
1105 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1106 case, we negate an operand that was subtracted. Except if it is a
1107 literal for which we use *MINUS_LITP instead.
1109 If NEGATE_P is true, we are negating all of IN, again except a literal
1110 for which we use *MINUS_LITP instead.
1112 If IN is itself a literal or constant, return it as appropriate.
1114 Note that we do not guarantee that any of the three values will be the
1115 same type as IN, but they will have the same signedness and mode. */
1118 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1119 tree *minus_litp, int negate_p)
1127 /* Strip any conversions that don't change the machine mode or signedness. */
1128 STRIP_SIGN_NOPS (in);
1130 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1132 else if (TREE_CODE (in) == code
1133 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1134 /* We can associate addition and subtraction together (even
1135 though the C standard doesn't say so) for integers because
1136 the value is not affected. For reals, the value might be
1137 affected, so we can't. */
1138 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1139 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1141 tree op0 = TREE_OPERAND (in, 0);
1142 tree op1 = TREE_OPERAND (in, 1);
1143 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1144 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1146 /* First see if either of the operands is a literal, then a constant. */
1147 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1148 *litp = op0, op0 = 0;
1149 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1150 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1152 if (op0 != 0 && TREE_CONSTANT (op0))
1153 *conp = op0, op0 = 0;
1154 else if (op1 != 0 && TREE_CONSTANT (op1))
1155 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1157 /* If we haven't dealt with either operand, this is not a case we can
1158 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1159 if (op0 != 0 && op1 != 0)
1164 var = op1, neg_var_p = neg1_p;
1166 /* Now do any needed negations. */
1168 *minus_litp = *litp, *litp = 0;
1170 *conp = negate_expr (*conp);
1172 var = negate_expr (var);
1174 else if (TREE_CONSTANT (in))
1182 *minus_litp = *litp, *litp = 0;
1183 else if (*minus_litp)
1184 *litp = *minus_litp, *minus_litp = 0;
1185 *conp = negate_expr (*conp);
1186 var = negate_expr (var);
1192 /* Re-associate trees split by the above function. T1 and T2 are either
1193 expressions to associate or null. Return the new expression, if any. If
1194 we build an operation, do it in TYPE and with CODE. */
1197 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1204 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1205 try to fold this since we will have infinite recursion. But do
1206 deal with any NEGATE_EXPRs. */
1207 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1208 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1210 if (code == PLUS_EXPR)
1212 if (TREE_CODE (t1) == NEGATE_EXPR)
1213 return build (MINUS_EXPR, type, fold_convert (type, t2),
1214 fold_convert (type, TREE_OPERAND (t1, 0)));
1215 else if (TREE_CODE (t2) == NEGATE_EXPR)
1216 return build (MINUS_EXPR, type, fold_convert (type, t1),
1217 fold_convert (type, TREE_OPERAND (t2, 0)));
1219 return build (code, type, fold_convert (type, t1),
1220 fold_convert (type, t2));
1223 return fold (build (code, type, fold_convert (type, t1),
1224 fold_convert (type, t2)));
1227 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1228 to produce a new constant.
1230 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1235 unsigned HOST_WIDE_INT int1l, int2l;
1236 HOST_WIDE_INT int1h, int2h;
1237 unsigned HOST_WIDE_INT low;
1239 unsigned HOST_WIDE_INT garbagel;
1240 HOST_WIDE_INT garbageh;
1242 tree type = TREE_TYPE (arg1);
1243 int uns = TREE_UNSIGNED (type);
1245 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1247 int no_overflow = 0;
1249 int1l = TREE_INT_CST_LOW (arg1);
1250 int1h = TREE_INT_CST_HIGH (arg1);
1251 int2l = TREE_INT_CST_LOW (arg2);
1252 int2h = TREE_INT_CST_HIGH (arg2);
1257 low = int1l | int2l, hi = int1h | int2h;
1261 low = int1l ^ int2l, hi = int1h ^ int2h;
1265 low = int1l & int2l, hi = int1h & int2h;
1271 /* It's unclear from the C standard whether shifts can overflow.
1272 The following code ignores overflow; perhaps a C standard
1273 interpretation ruling is needed. */
1274 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1282 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1287 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1291 neg_double (int2l, int2h, &low, &hi);
1292 add_double (int1l, int1h, low, hi, &low, &hi);
1293 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1297 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1300 case TRUNC_DIV_EXPR:
1301 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1302 case EXACT_DIV_EXPR:
1303 /* This is a shortcut for a common special case. */
1304 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1305 && ! TREE_CONSTANT_OVERFLOW (arg1)
1306 && ! TREE_CONSTANT_OVERFLOW (arg2)
1307 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1309 if (code == CEIL_DIV_EXPR)
1312 low = int1l / int2l, hi = 0;
1316 /* ... fall through ... */
1318 case ROUND_DIV_EXPR:
1319 if (int2h == 0 && int2l == 1)
1321 low = int1l, hi = int1h;
1324 if (int1l == int2l && int1h == int2h
1325 && ! (int1l == 0 && int1h == 0))
1330 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1331 &low, &hi, &garbagel, &garbageh);
1334 case TRUNC_MOD_EXPR:
1335 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1336 /* This is a shortcut for a common special case. */
1337 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1338 && ! TREE_CONSTANT_OVERFLOW (arg1)
1339 && ! TREE_CONSTANT_OVERFLOW (arg2)
1340 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1342 if (code == CEIL_MOD_EXPR)
1344 low = int1l % int2l, hi = 0;
1348 /* ... fall through ... */
1350 case ROUND_MOD_EXPR:
1351 overflow = div_and_round_double (code, uns,
1352 int1l, int1h, int2l, int2h,
1353 &garbagel, &garbageh, &low, &hi);
1359 low = (((unsigned HOST_WIDE_INT) int1h
1360 < (unsigned HOST_WIDE_INT) int2h)
1361 || (((unsigned HOST_WIDE_INT) int1h
1362 == (unsigned HOST_WIDE_INT) int2h)
1365 low = (int1h < int2h
1366 || (int1h == int2h && int1l < int2l));
1368 if (low == (code == MIN_EXPR))
1369 low = int1l, hi = int1h;
1371 low = int2l, hi = int2h;
1378 /* If this is for a sizetype, can be represented as one (signed)
1379 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1382 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1383 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1384 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1385 return size_int_type_wide (low, type);
1388 t = build_int_2 (low, hi);
1389 TREE_TYPE (t) = TREE_TYPE (arg1);
1394 ? (!uns || is_sizetype) && overflow
1395 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1397 | TREE_OVERFLOW (arg1)
1398 | TREE_OVERFLOW (arg2));
1400 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1401 So check if force_fit_type truncated the value. */
1403 && ! TREE_OVERFLOW (t)
1404 && (TREE_INT_CST_HIGH (t) != hi
1405 || TREE_INT_CST_LOW (t) != low))
1406 TREE_OVERFLOW (t) = 1;
1408 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1409 | TREE_CONSTANT_OVERFLOW (arg1)
1410 | TREE_CONSTANT_OVERFLOW (arg2));
1414 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1415 constant. We assume ARG1 and ARG2 have the same data type, or at least
1416 are the same kind of constant and the same machine mode.
1418 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1421 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1426 if (TREE_CODE (arg1) == INTEGER_CST)
1427 return int_const_binop (code, arg1, arg2, notrunc);
1429 if (TREE_CODE (arg1) == REAL_CST)
1431 enum machine_mode mode;
1434 REAL_VALUE_TYPE value;
1437 d1 = TREE_REAL_CST (arg1);
1438 d2 = TREE_REAL_CST (arg2);
1440 type = TREE_TYPE (arg1);
1441 mode = TYPE_MODE (type);
1443 /* Don't perform operation if we honor signaling NaNs and
1444 either operand is a NaN. */
1445 if (HONOR_SNANS (mode)
1446 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1449 /* Don't perform operation if it would raise a division
1450 by zero exception. */
1451 if (code == RDIV_EXPR
1452 && REAL_VALUES_EQUAL (d2, dconst0)
1453 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1456 /* If either operand is a NaN, just return it. Otherwise, set up
1457 for floating-point trap; we return an overflow. */
1458 if (REAL_VALUE_ISNAN (d1))
1460 else if (REAL_VALUE_ISNAN (d2))
1463 REAL_ARITHMETIC (value, code, d1, d2);
1465 t = build_real (type, real_value_truncate (mode, value));
1468 = (force_fit_type (t, 0)
1469 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1470 TREE_CONSTANT_OVERFLOW (t)
1472 | TREE_CONSTANT_OVERFLOW (arg1)
1473 | TREE_CONSTANT_OVERFLOW (arg2);
1476 if (TREE_CODE (arg1) == COMPLEX_CST)
1478 tree type = TREE_TYPE (arg1);
1479 tree r1 = TREE_REALPART (arg1);
1480 tree i1 = TREE_IMAGPART (arg1);
1481 tree r2 = TREE_REALPART (arg2);
1482 tree i2 = TREE_IMAGPART (arg2);
1488 t = build_complex (type,
1489 const_binop (PLUS_EXPR, r1, r2, notrunc),
1490 const_binop (PLUS_EXPR, i1, i2, notrunc));
1494 t = build_complex (type,
1495 const_binop (MINUS_EXPR, r1, r2, notrunc),
1496 const_binop (MINUS_EXPR, i1, i2, notrunc));
1500 t = build_complex (type,
1501 const_binop (MINUS_EXPR,
1502 const_binop (MULT_EXPR,
1504 const_binop (MULT_EXPR,
1507 const_binop (PLUS_EXPR,
1508 const_binop (MULT_EXPR,
1510 const_binop (MULT_EXPR,
1518 = const_binop (PLUS_EXPR,
1519 const_binop (MULT_EXPR, r2, r2, notrunc),
1520 const_binop (MULT_EXPR, i2, i2, notrunc),
1523 t = build_complex (type,
1525 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1526 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1527 const_binop (PLUS_EXPR,
1528 const_binop (MULT_EXPR, r1, r2,
1530 const_binop (MULT_EXPR, i1, i2,
1533 magsquared, notrunc),
1535 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1536 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1537 const_binop (MINUS_EXPR,
1538 const_binop (MULT_EXPR, i1, r2,
1540 const_binop (MULT_EXPR, r1, i2,
1543 magsquared, notrunc));
1555 /* These are the hash table functions for the hash table of INTEGER_CST
1556 nodes of a sizetype. */
1558 /* Return the hash code code X, an INTEGER_CST. */
1561 size_htab_hash (const void *x)
1565 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1566 ^ htab_hash_pointer (TREE_TYPE (t))
1567 ^ (TREE_OVERFLOW (t) << 20));
1570 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1571 is the same as that given by *Y, which is the same. */
1574 size_htab_eq (const void *x, const void *y)
1579 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1580 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1581 && TREE_TYPE (xt) == TREE_TYPE (yt)
1582 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1585 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1586 bits are given by NUMBER and of the sizetype represented by KIND. */
1589 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1591 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1594 /* Likewise, but the desired type is specified explicitly. */
1596 static GTY (()) tree new_const;
1597 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1601 size_int_type_wide (HOST_WIDE_INT number, tree type)
1607 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1608 new_const = make_node (INTEGER_CST);
1611 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1612 hash table, we return the value from the hash table. Otherwise, we
1613 place that in the hash table and make a new node for the next time. */
1614 TREE_INT_CST_LOW (new_const) = number;
1615 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1616 TREE_TYPE (new_const) = type;
1617 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1618 = force_fit_type (new_const, 0);
1620 slot = htab_find_slot (size_htab, new_const, INSERT);
1626 new_const = make_node (INTEGER_CST);
1630 return (tree) *slot;
1633 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1634 is a tree code. The type of the result is taken from the operands.
1635 Both must be the same type integer type and it must be a size type.
1636 If the operands are constant, so is the result. */
1639 size_binop (enum tree_code code, tree arg0, tree arg1)
1641 tree type = TREE_TYPE (arg0);
1643 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1644 || type != TREE_TYPE (arg1))
1647 /* Handle the special case of two integer constants faster. */
1648 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1650 /* And some specific cases even faster than that. */
1651 if (code == PLUS_EXPR && integer_zerop (arg0))
1653 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1654 && integer_zerop (arg1))
1656 else if (code == MULT_EXPR && integer_onep (arg0))
1659 /* Handle general case of two integer constants. */
1660 return int_const_binop (code, arg0, arg1, 0);
1663 if (arg0 == error_mark_node || arg1 == error_mark_node)
1664 return error_mark_node;
1666 return fold (build (code, type, arg0, arg1));
1669 /* Given two values, either both of sizetype or both of bitsizetype,
1670 compute the difference between the two values. Return the value
1671 in signed type corresponding to the type of the operands. */
1674 size_diffop (tree arg0, tree arg1)
1676 tree type = TREE_TYPE (arg0);
1679 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1680 || type != TREE_TYPE (arg1))
1683 /* If the type is already signed, just do the simple thing. */
1684 if (! TREE_UNSIGNED (type))
1685 return size_binop (MINUS_EXPR, arg0, arg1);
1687 ctype = (type == bitsizetype || type == ubitsizetype
1688 ? sbitsizetype : ssizetype);
1690 /* If either operand is not a constant, do the conversions to the signed
1691 type and subtract. The hardware will do the right thing with any
1692 overflow in the subtraction. */
1693 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1694 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1695 fold_convert (ctype, arg1));
1697 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1698 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1699 overflow) and negate (which can't either). Special-case a result
1700 of zero while we're here. */
1701 if (tree_int_cst_equal (arg0, arg1))
1702 return fold_convert (ctype, integer_zero_node);
1703 else if (tree_int_cst_lt (arg1, arg0))
1704 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1706 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1707 fold_convert (ctype, size_binop (MINUS_EXPR,
1712 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1713 type TYPE. If no simplification can be done return NULL_TREE. */
1716 fold_convert_const (enum tree_code code, tree type, tree arg1)
1721 if (TREE_TYPE (arg1) == type)
1724 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1726 if (TREE_CODE (arg1) == INTEGER_CST)
1728 /* If we would build a constant wider than GCC supports,
1729 leave the conversion unfolded. */
1730 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1733 /* If we are trying to make a sizetype for a small integer, use
1734 size_int to pick up cached types to reduce duplicate nodes. */
1735 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && !TREE_CONSTANT_OVERFLOW (arg1)
1737 && compare_tree_int (arg1, 10000) < 0)
1738 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1740 /* Given an integer constant, make new constant with new type,
1741 appropriately sign-extended or truncated. */
1742 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1743 TREE_INT_CST_HIGH (arg1));
1744 TREE_TYPE (t) = type;
1745 /* Indicate an overflow if (1) ARG1 already overflowed,
1746 or (2) force_fit_type indicates an overflow.
1747 Tell force_fit_type that an overflow has already occurred
1748 if ARG1 is a too-large unsigned value and T is signed.
1749 But don't indicate an overflow if converting a pointer. */
1751 = ((force_fit_type (t,
1752 (TREE_INT_CST_HIGH (arg1) < 0
1753 && (TREE_UNSIGNED (type)
1754 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1755 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1756 || TREE_OVERFLOW (arg1));
1757 TREE_CONSTANT_OVERFLOW (t)
1758 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1761 else if (TREE_CODE (arg1) == REAL_CST)
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1772 HOST_WIDE_INT high, low;
1775 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1779 case FIX_TRUNC_EXPR:
1780 real_trunc (&r, VOIDmode, &x);
1784 real_ceil (&r, VOIDmode, &x);
1787 case FIX_FLOOR_EXPR:
1788 real_floor (&r, VOIDmode, &x);
1795 /* If R is NaN, return zero and show we have an overflow. */
1796 if (REAL_VALUE_ISNAN (r))
1803 /* See if R is less than the lower bound or greater than the
1808 tree lt = TYPE_MIN_VALUE (type);
1809 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1810 if (REAL_VALUES_LESS (r, l))
1813 high = TREE_INT_CST_HIGH (lt);
1814 low = TREE_INT_CST_LOW (lt);
1820 tree ut = TYPE_MAX_VALUE (type);
1823 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1824 if (REAL_VALUES_LESS (u, r))
1827 high = TREE_INT_CST_HIGH (ut);
1828 low = TREE_INT_CST_LOW (ut);
1834 REAL_VALUE_TO_INT (&low, &high, r);
1836 t = build_int_2 (low, high);
1837 TREE_TYPE (t) = type;
1839 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1840 TREE_CONSTANT_OVERFLOW (t)
1841 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 if (TREE_CODE (arg1) == REAL_CST)
1851 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1853 /* We make a copy of ARG1 so that we don't modify an
1854 existing constant tree. */
1855 t = copy_node (arg1);
1856 TREE_TYPE (t) = type;
1860 t = build_real (type,
1861 real_value_truncate (TYPE_MODE (type),
1862 TREE_REAL_CST (arg1)));
1865 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1866 TREE_CONSTANT_OVERFLOW (t)
1867 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1874 /* Convert expression ARG to type TYPE. Used by the middle-end for
1875 simple conversions in preference to calling the front-end's convert. */
1878 fold_convert (tree type, tree arg)
1880 tree orig = TREE_TYPE (arg);
1886 if (TREE_CODE (arg) == ERROR_MARK
1887 || TREE_CODE (type) == ERROR_MARK
1888 || TREE_CODE (orig) == ERROR_MARK)
1889 return error_mark_node;
1891 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1892 return fold (build1 (NOP_EXPR, type, arg));
1894 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1896 if (TREE_CODE (arg) == INTEGER_CST)
1898 tem = fold_convert_const (NOP_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1902 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1903 return fold (build1 (NOP_EXPR, type, arg));
1904 if (TREE_CODE (orig) == COMPLEX_TYPE)
1906 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1907 return fold_convert (type, tem);
1909 if (TREE_CODE (orig) == VECTOR_TYPE
1910 && GET_MODE_SIZE (TYPE_MODE (type))
1911 == GET_MODE_SIZE (TYPE_MODE (orig)))
1912 return fold (build1 (NOP_EXPR, type, arg));
1914 else if (TREE_CODE (type) == REAL_TYPE)
1916 if (TREE_CODE (arg) == INTEGER_CST)
1918 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1922 else if (TREE_CODE (arg) == REAL_CST)
1924 tem = fold_convert_const (NOP_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1929 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1930 return fold (build1 (FLOAT_EXPR, type, arg));
1931 if (TREE_CODE (orig) == REAL_TYPE)
1932 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1934 if (TREE_CODE (orig) == COMPLEX_TYPE)
1936 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1937 return fold_convert (type, tem);
1940 else if (TREE_CODE (type) == COMPLEX_TYPE)
1942 if (INTEGRAL_TYPE_P (orig)
1943 || POINTER_TYPE_P (orig)
1944 || TREE_CODE (orig) == REAL_TYPE)
1945 return build (COMPLEX_EXPR, type,
1946 fold_convert (TREE_TYPE (type), arg),
1947 fold_convert (TREE_TYPE (type), integer_zero_node));
1948 if (TREE_CODE (orig) == COMPLEX_TYPE)
1952 if (TREE_CODE (arg) == COMPLEX_EXPR)
1954 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1955 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1956 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1959 arg = save_expr (arg);
1960 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1961 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1962 rpart = fold_convert (TREE_TYPE (type), rpart);
1963 ipart = fold_convert (TREE_TYPE (type), ipart);
1964 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1967 else if (TREE_CODE (type) == VECTOR_TYPE)
1969 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1970 && GET_MODE_SIZE (TYPE_MODE (type))
1971 == GET_MODE_SIZE (TYPE_MODE (orig)))
1972 return fold (build1 (NOP_EXPR, type, arg));
1973 if (TREE_CODE (orig) == VECTOR_TYPE
1974 && GET_MODE_SIZE (TYPE_MODE (type))
1975 == GET_MODE_SIZE (TYPE_MODE (orig)))
1976 return fold (build1 (NOP_EXPR, type, arg));
1978 else if (VOID_TYPE_P (type))
1979 return fold (build1 (CONVERT_EXPR, type, arg));
1983 /* Return an expr equal to X but certainly not valid as an lvalue. */
1990 /* These things are certainly not lvalues. */
1991 if (TREE_CODE (x) == NON_LVALUE_EXPR
1992 || TREE_CODE (x) == INTEGER_CST
1993 || TREE_CODE (x) == REAL_CST
1994 || TREE_CODE (x) == STRING_CST
1995 || TREE_CODE (x) == ADDR_EXPR)
1998 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1999 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2003 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2004 Zero means allow extended lvalues. */
2006 int pedantic_lvalues;
2008 /* When pedantic, return an expr equal to X but certainly not valid as a
2009 pedantic lvalue. Otherwise, return X. */
2012 pedantic_non_lvalue (tree x)
2014 if (pedantic_lvalues)
2015 return non_lvalue (x);
2020 /* Given a tree comparison code, return the code that is the logical inverse
2021 of the given code. It is not safe to do this for floating-point
2022 comparisons, except for NE_EXPR and EQ_EXPR. */
2024 static enum tree_code
2025 invert_tree_comparison (enum tree_code code)
2046 /* Similar, but return the comparison that results if the operands are
2047 swapped. This is safe for floating-point. */
2049 static enum tree_code
2050 swap_tree_comparison (enum tree_code code)
2071 /* Convert a comparison tree code from an enum tree_code representation
2072 into a compcode bit-based encoding. This function is the inverse of
2073 compcode_to_comparison. */
2076 comparison_to_compcode (enum tree_code code)
2097 /* Convert a compcode bit-based encoding of a comparison operator back
2098 to GCC's enum tree_code representation. This function is the
2099 inverse of comparison_to_compcode. */
2101 static enum tree_code
2102 compcode_to_comparison (int code)
2123 /* Return nonzero if CODE is a tree code that represents a truth value. */
2126 truth_value_p (enum tree_code code)
2128 return (TREE_CODE_CLASS (code) == '<'
2129 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2130 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2131 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2134 /* Return nonzero if two operands (typically of the same tree node)
2135 are necessarily equal. If either argument has side-effects this
2136 function returns zero.
2138 If ONLY_CONST is nonzero, only return nonzero for constants.
2139 This function tests whether the operands are indistinguishable;
2140 it does not test whether they are equal using C's == operation.
2141 The distinction is important for IEEE floating point, because
2142 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2143 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2145 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2146 even though it may hold multiple values during a function.
2147 This is because a GCC tree node guarantees that nothing else is
2148 executed between the evaluation of its "operands" (which may often
2149 be evaluated in arbitrary order). Hence if the operands themselves
2150 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2151 same value in each operand/subexpression. Hence a zero value for
2152 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2153 If comparing arbitrary expression trees, such as from different
2154 statements, ONLY_CONST must usually be nonzero. */
2157 operand_equal_p (tree arg0, tree arg1, int only_const)
2161 /* If both types don't have the same signedness, then we can't consider
2162 them equal. We must check this before the STRIP_NOPS calls
2163 because they may change the signedness of the arguments. */
2164 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2170 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2171 /* This is needed for conversions and for COMPONENT_REF.
2172 Might as well play it safe and always test this. */
2173 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2174 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2175 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2178 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2179 We don't care about side effects in that case because the SAVE_EXPR
2180 takes care of that for us. In all other cases, two expressions are
2181 equal if they have no side effects. If we have two identical
2182 expressions with side effects that should be treated the same due
2183 to the only side effects being identical SAVE_EXPR's, that will
2184 be detected in the recursive calls below. */
2185 if (arg0 == arg1 && ! only_const
2186 && (TREE_CODE (arg0) == SAVE_EXPR
2187 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2190 /* Next handle constant cases, those for which we can return 1 even
2191 if ONLY_CONST is set. */
2192 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2193 switch (TREE_CODE (arg0))
2196 return (! TREE_CONSTANT_OVERFLOW (arg0)
2197 && ! TREE_CONSTANT_OVERFLOW (arg1)
2198 && tree_int_cst_equal (arg0, arg1));
2201 return (! TREE_CONSTANT_OVERFLOW (arg0)
2202 && ! TREE_CONSTANT_OVERFLOW (arg1)
2203 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2204 TREE_REAL_CST (arg1)));
2210 if (TREE_CONSTANT_OVERFLOW (arg0)
2211 || TREE_CONSTANT_OVERFLOW (arg1))
2214 v1 = TREE_VECTOR_CST_ELTS (arg0);
2215 v2 = TREE_VECTOR_CST_ELTS (arg1);
2218 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2221 v1 = TREE_CHAIN (v1);
2222 v2 = TREE_CHAIN (v2);
2229 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2231 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2235 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2236 && ! memcmp (TREE_STRING_POINTER (arg0),
2237 TREE_STRING_POINTER (arg1),
2238 TREE_STRING_LENGTH (arg0)));
2241 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2250 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2253 /* Two conversions are equal only if signedness and modes match. */
2254 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2255 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2256 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2259 return operand_equal_p (TREE_OPERAND (arg0, 0),
2260 TREE_OPERAND (arg1, 0), 0);
2264 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2265 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2269 /* For commutative ops, allow the other order. */
2270 return (commutative_tree_code (TREE_CODE (arg0))
2271 && operand_equal_p (TREE_OPERAND (arg0, 0),
2272 TREE_OPERAND (arg1, 1), 0)
2273 && operand_equal_p (TREE_OPERAND (arg0, 1),
2274 TREE_OPERAND (arg1, 0), 0));
2277 /* If either of the pointer (or reference) expressions we are
2278 dereferencing contain a side effect, these cannot be equal. */
2279 if (TREE_SIDE_EFFECTS (arg0)
2280 || TREE_SIDE_EFFECTS (arg1))
2283 switch (TREE_CODE (arg0))
2286 return operand_equal_p (TREE_OPERAND (arg0, 0),
2287 TREE_OPERAND (arg1, 0), 0);
2291 case ARRAY_RANGE_REF:
2292 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2293 TREE_OPERAND (arg1, 0), 0)
2294 && operand_equal_p (TREE_OPERAND (arg0, 1),
2295 TREE_OPERAND (arg1, 1), 0));
2298 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2299 TREE_OPERAND (arg1, 0), 0)
2300 && operand_equal_p (TREE_OPERAND (arg0, 1),
2301 TREE_OPERAND (arg1, 1), 0)
2302 && operand_equal_p (TREE_OPERAND (arg0, 2),
2303 TREE_OPERAND (arg1, 2), 0));
2309 switch (TREE_CODE (arg0))
2312 case TRUTH_NOT_EXPR:
2313 return operand_equal_p (TREE_OPERAND (arg0, 0),
2314 TREE_OPERAND (arg1, 0), 0);
2317 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2320 /* If the CALL_EXPRs call different functions, then they
2321 clearly can not be equal. */
2322 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2323 TREE_OPERAND (arg1, 0), 0))
2326 /* Only consider const functions equivalent. */
2327 fndecl = get_callee_fndecl (arg0);
2328 if (fndecl == NULL_TREE
2329 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2332 /* Now see if all the arguments are the same. operand_equal_p
2333 does not handle TREE_LIST, so we walk the operands here
2334 feeding them to operand_equal_p. */
2335 arg0 = TREE_OPERAND (arg0, 1);
2336 arg1 = TREE_OPERAND (arg1, 1);
2337 while (arg0 && arg1)
2339 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2342 arg0 = TREE_CHAIN (arg0);
2343 arg1 = TREE_CHAIN (arg1);
2346 /* If we get here and both argument lists are exhausted
2347 then the CALL_EXPRs are equal. */
2348 return ! (arg0 || arg1);
2355 /* Consider __builtin_sqrt equal to sqrt. */
2356 return TREE_CODE (arg0) == FUNCTION_DECL
2357 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2358 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2359 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2366 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2367 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2369 When in doubt, return 0. */
2372 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2374 int unsignedp1, unsignedpo;
2375 tree primarg0, primarg1, primother;
2376 unsigned int correct_width;
2378 if (operand_equal_p (arg0, arg1, 0))
2381 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2382 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2385 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2386 and see if the inner values are the same. This removes any
2387 signedness comparison, which doesn't matter here. */
2388 primarg0 = arg0, primarg1 = arg1;
2389 STRIP_NOPS (primarg0);
2390 STRIP_NOPS (primarg1);
2391 if (operand_equal_p (primarg0, primarg1, 0))
2394 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2395 actual comparison operand, ARG0.
2397 First throw away any conversions to wider types
2398 already present in the operands. */
2400 primarg1 = get_narrower (arg1, &unsignedp1);
2401 primother = get_narrower (other, &unsignedpo);
2403 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2404 if (unsignedp1 == unsignedpo
2405 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2406 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2408 tree type = TREE_TYPE (arg0);
2410 /* Make sure shorter operand is extended the right way
2411 to match the longer operand. */
2412 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2413 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2415 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2422 /* See if ARG is an expression that is either a comparison or is performing
2423 arithmetic on comparisons. The comparisons must only be comparing
2424 two different values, which will be stored in *CVAL1 and *CVAL2; if
2425 they are nonzero it means that some operands have already been found.
2426 No variables may be used anywhere else in the expression except in the
2427 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2428 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2430 If this is true, return 1. Otherwise, return zero. */
2433 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2435 enum tree_code code = TREE_CODE (arg);
2436 char class = TREE_CODE_CLASS (code);
2438 /* We can handle some of the 'e' cases here. */
2439 if (class == 'e' && code == TRUTH_NOT_EXPR)
2441 else if (class == 'e'
2442 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2443 || code == COMPOUND_EXPR))
2446 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2447 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2449 /* If we've already found a CVAL1 or CVAL2, this expression is
2450 two complex to handle. */
2451 if (*cval1 || *cval2)
2461 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2464 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2465 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2466 cval1, cval2, save_p));
2472 if (code == COND_EXPR)
2473 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2474 cval1, cval2, save_p)
2475 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2476 cval1, cval2, save_p)
2477 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2478 cval1, cval2, save_p));
2482 /* First see if we can handle the first operand, then the second. For
2483 the second operand, we know *CVAL1 can't be zero. It must be that
2484 one side of the comparison is each of the values; test for the
2485 case where this isn't true by failing if the two operands
2488 if (operand_equal_p (TREE_OPERAND (arg, 0),
2489 TREE_OPERAND (arg, 1), 0))
2493 *cval1 = TREE_OPERAND (arg, 0);
2494 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2496 else if (*cval2 == 0)
2497 *cval2 = TREE_OPERAND (arg, 0);
2498 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2503 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2505 else if (*cval2 == 0)
2506 *cval2 = TREE_OPERAND (arg, 1);
2507 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2519 /* ARG is a tree that is known to contain just arithmetic operations and
2520 comparisons. Evaluate the operations in the tree substituting NEW0 for
2521 any occurrence of OLD0 as an operand of a comparison and likewise for
2525 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2527 tree type = TREE_TYPE (arg);
2528 enum tree_code code = TREE_CODE (arg);
2529 char class = TREE_CODE_CLASS (code);
2531 /* We can handle some of the 'e' cases here. */
2532 if (class == 'e' && code == TRUTH_NOT_EXPR)
2534 else if (class == 'e'
2535 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2541 return fold (build1 (code, type,
2542 eval_subst (TREE_OPERAND (arg, 0),
2543 old0, new0, old1, new1)));
2546 return fold (build (code, type,
2547 eval_subst (TREE_OPERAND (arg, 0),
2548 old0, new0, old1, new1),
2549 eval_subst (TREE_OPERAND (arg, 1),
2550 old0, new0, old1, new1)));
2556 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2559 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2562 return fold (build (code, type,
2563 eval_subst (TREE_OPERAND (arg, 0),
2564 old0, new0, old1, new1),
2565 eval_subst (TREE_OPERAND (arg, 1),
2566 old0, new0, old1, new1),
2567 eval_subst (TREE_OPERAND (arg, 2),
2568 old0, new0, old1, new1)));
2572 /* Fall through - ??? */
2576 tree arg0 = TREE_OPERAND (arg, 0);
2577 tree arg1 = TREE_OPERAND (arg, 1);
2579 /* We need to check both for exact equality and tree equality. The
2580 former will be true if the operand has a side-effect. In that
2581 case, we know the operand occurred exactly once. */
2583 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2585 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2588 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2590 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2593 return fold (build (code, type, arg0, arg1));
2601 /* Return a tree for the case when the result of an expression is RESULT
2602 converted to TYPE and OMITTED was previously an operand of the expression
2603 but is now not needed (e.g., we folded OMITTED * 0).
2605 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2606 the conversion of RESULT to TYPE. */
2609 omit_one_operand (tree type, tree result, tree omitted)
2611 tree t = fold_convert (type, result);
2613 if (TREE_SIDE_EFFECTS (omitted))
2614 return build (COMPOUND_EXPR, type, omitted, t);
2616 return non_lvalue (t);
2619 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2622 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2624 tree t = fold_convert (type, result);
2626 if (TREE_SIDE_EFFECTS (omitted))
2627 return build (COMPOUND_EXPR, type, omitted, t);
2629 return pedantic_non_lvalue (t);
2632 /* Return a simplified tree node for the truth-negation of ARG. This
2633 never alters ARG itself. We assume that ARG is an operation that
2634 returns a truth value (0 or 1). */
2637 invert_truthvalue (tree arg)
2639 tree type = TREE_TYPE (arg);
2640 enum tree_code code = TREE_CODE (arg);
2642 if (code == ERROR_MARK)
2645 /* If this is a comparison, we can simply invert it, except for
2646 floating-point non-equality comparisons, in which case we just
2647 enclose a TRUTH_NOT_EXPR around what we have. */
2649 if (TREE_CODE_CLASS (code) == '<')
2651 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2652 && !flag_unsafe_math_optimizations
2655 return build1 (TRUTH_NOT_EXPR, type, arg);
2656 else if (code == UNORDERED_EXPR
2657 || code == ORDERED_EXPR
2658 || code == UNEQ_EXPR
2659 || code == UNLT_EXPR
2660 || code == UNLE_EXPR
2661 || code == UNGT_EXPR
2662 || code == UNGE_EXPR)
2663 return build1 (TRUTH_NOT_EXPR, type, arg);
2665 return build (invert_tree_comparison (code), type,
2666 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2672 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2674 case TRUTH_AND_EXPR:
2675 return build (TRUTH_OR_EXPR, type,
2676 invert_truthvalue (TREE_OPERAND (arg, 0)),
2677 invert_truthvalue (TREE_OPERAND (arg, 1)));
2680 return build (TRUTH_AND_EXPR, type,
2681 invert_truthvalue (TREE_OPERAND (arg, 0)),
2682 invert_truthvalue (TREE_OPERAND (arg, 1)));
2684 case TRUTH_XOR_EXPR:
2685 /* Here we can invert either operand. We invert the first operand
2686 unless the second operand is a TRUTH_NOT_EXPR in which case our
2687 result is the XOR of the first operand with the inside of the
2688 negation of the second operand. */
2690 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2691 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2692 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2694 return build (TRUTH_XOR_EXPR, type,
2695 invert_truthvalue (TREE_OPERAND (arg, 0)),
2696 TREE_OPERAND (arg, 1));
2698 case TRUTH_ANDIF_EXPR:
2699 return build (TRUTH_ORIF_EXPR, type,
2700 invert_truthvalue (TREE_OPERAND (arg, 0)),
2701 invert_truthvalue (TREE_OPERAND (arg, 1)));
2703 case TRUTH_ORIF_EXPR:
2704 return build (TRUTH_ANDIF_EXPR, type,
2705 invert_truthvalue (TREE_OPERAND (arg, 0)),
2706 invert_truthvalue (TREE_OPERAND (arg, 1)));
2708 case TRUTH_NOT_EXPR:
2709 return TREE_OPERAND (arg, 0);
2712 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2713 invert_truthvalue (TREE_OPERAND (arg, 1)),
2714 invert_truthvalue (TREE_OPERAND (arg, 2)));
2717 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2718 invert_truthvalue (TREE_OPERAND (arg, 1)));
2720 case WITH_RECORD_EXPR:
2721 return build (WITH_RECORD_EXPR, type,
2722 invert_truthvalue (TREE_OPERAND (arg, 0)),
2723 TREE_OPERAND (arg, 1));
2725 case NON_LVALUE_EXPR:
2726 return invert_truthvalue (TREE_OPERAND (arg, 0));
2731 return build1 (TREE_CODE (arg), type,
2732 invert_truthvalue (TREE_OPERAND (arg, 0)));
2735 if (!integer_onep (TREE_OPERAND (arg, 1)))
2737 return build (EQ_EXPR, type, arg,
2738 fold_convert (type, integer_zero_node));
2741 return build1 (TRUTH_NOT_EXPR, type, arg);
2743 case CLEANUP_POINT_EXPR:
2744 return build1 (CLEANUP_POINT_EXPR, type,
2745 invert_truthvalue (TREE_OPERAND (arg, 0)));
2750 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2752 return build1 (TRUTH_NOT_EXPR, type, arg);
2755 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2756 operands are another bit-wise operation with a common input. If so,
2757 distribute the bit operations to save an operation and possibly two if
2758 constants are involved. For example, convert
2759 (A | B) & (A | C) into A | (B & C)
2760 Further simplification will occur if B and C are constants.
2762 If this optimization cannot be done, 0 will be returned. */
2765 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2770 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2771 || TREE_CODE (arg0) == code
2772 || (TREE_CODE (arg0) != BIT_AND_EXPR
2773 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2776 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2778 common = TREE_OPERAND (arg0, 0);
2779 left = TREE_OPERAND (arg0, 1);
2780 right = TREE_OPERAND (arg1, 1);
2782 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2784 common = TREE_OPERAND (arg0, 0);
2785 left = TREE_OPERAND (arg0, 1);
2786 right = TREE_OPERAND (arg1, 0);
2788 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2790 common = TREE_OPERAND (arg0, 1);
2791 left = TREE_OPERAND (arg0, 0);
2792 right = TREE_OPERAND (arg1, 1);
2794 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2796 common = TREE_OPERAND (arg0, 1);
2797 left = TREE_OPERAND (arg0, 0);
2798 right = TREE_OPERAND (arg1, 0);
2803 return fold (build (TREE_CODE (arg0), type, common,
2804 fold (build (code, type, left, right))));
2807 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2808 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2811 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2814 tree result = build (BIT_FIELD_REF, type, inner,
2815 size_int (bitsize), bitsize_int (bitpos));
2817 TREE_UNSIGNED (result) = unsignedp;
2822 /* Optimize a bit-field compare.
2824 There are two cases: First is a compare against a constant and the
2825 second is a comparison of two items where the fields are at the same
2826 bit position relative to the start of a chunk (byte, halfword, word)
2827 large enough to contain it. In these cases we can avoid the shift
2828 implicit in bitfield extractions.
2830 For constants, we emit a compare of the shifted constant with the
2831 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2832 compared. For two fields at the same position, we do the ANDs with the
2833 similar mask and compare the result of the ANDs.
2835 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2836 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2837 are the left and right operands of the comparison, respectively.
2839 If the optimization described above can be done, we return the resulting
2840 tree. Otherwise we return zero. */
2843 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2846 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2847 tree type = TREE_TYPE (lhs);
2848 tree signed_type, unsigned_type;
2849 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2850 enum machine_mode lmode, rmode, nmode;
2851 int lunsignedp, runsignedp;
2852 int lvolatilep = 0, rvolatilep = 0;
2853 tree linner, rinner = NULL_TREE;
2857 /* Get all the information about the extractions being done. If the bit size
2858 if the same as the size of the underlying object, we aren't doing an
2859 extraction at all and so can do nothing. We also don't want to
2860 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2861 then will no longer be able to replace it. */
2862 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2863 &lunsignedp, &lvolatilep);
2864 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2865 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2870 /* If this is not a constant, we can only do something if bit positions,
2871 sizes, and signedness are the same. */
2872 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2873 &runsignedp, &rvolatilep);
2875 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2876 || lunsignedp != runsignedp || offset != 0
2877 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2881 /* See if we can find a mode to refer to this field. We should be able to,
2882 but fail if we can't. */
2883 nmode = get_best_mode (lbitsize, lbitpos,
2884 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2885 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2886 TYPE_ALIGN (TREE_TYPE (rinner))),
2887 word_mode, lvolatilep || rvolatilep);
2888 if (nmode == VOIDmode)
2891 /* Set signed and unsigned types of the precision of this mode for the
2893 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2894 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2896 /* Compute the bit position and size for the new reference and our offset
2897 within it. If the new reference is the same size as the original, we
2898 won't optimize anything, so return zero. */
2899 nbitsize = GET_MODE_BITSIZE (nmode);
2900 nbitpos = lbitpos & ~ (nbitsize - 1);
2902 if (nbitsize == lbitsize)
2905 if (BYTES_BIG_ENDIAN)
2906 lbitpos = nbitsize - lbitsize - lbitpos;
2908 /* Make the mask to be used against the extracted field. */
2909 mask = build_int_2 (~0, ~0);
2910 TREE_TYPE (mask) = unsigned_type;
2911 force_fit_type (mask, 0);
2912 mask = fold_convert (unsigned_type, mask);
2913 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2914 mask = const_binop (RSHIFT_EXPR, mask,
2915 size_int (nbitsize - lbitsize - lbitpos), 0);
2918 /* If not comparing with constant, just rework the comparison
2920 return build (code, compare_type,
2921 build (BIT_AND_EXPR, unsigned_type,
2922 make_bit_field_ref (linner, unsigned_type,
2923 nbitsize, nbitpos, 1),
2925 build (BIT_AND_EXPR, unsigned_type,
2926 make_bit_field_ref (rinner, unsigned_type,
2927 nbitsize, nbitpos, 1),
2930 /* Otherwise, we are handling the constant case. See if the constant is too
2931 big for the field. Warn and return a tree of for 0 (false) if so. We do
2932 this not only for its own sake, but to avoid having to test for this
2933 error case below. If we didn't, we might generate wrong code.
2935 For unsigned fields, the constant shifted right by the field length should
2936 be all zero. For signed fields, the high-order bits should agree with
2941 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2942 fold_convert (unsigned_type, rhs),
2943 size_int (lbitsize), 0)))
2945 warning ("comparison is always %d due to width of bit-field",
2947 return fold_convert (compare_type,
2949 ? integer_one_node : integer_zero_node));
2954 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2955 size_int (lbitsize - 1), 0);
2956 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2958 warning ("comparison is always %d due to width of bit-field",
2960 return fold_convert (compare_type,
2962 ? integer_one_node : integer_zero_node));
2966 /* Single-bit compares should always be against zero. */
2967 if (lbitsize == 1 && ! integer_zerop (rhs))
2969 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2970 rhs = fold_convert (type, integer_zero_node);
2973 /* Make a new bitfield reference, shift the constant over the
2974 appropriate number of bits and mask it with the computed mask
2975 (in case this was a signed field). If we changed it, make a new one. */
2976 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2979 TREE_SIDE_EFFECTS (lhs) = 1;
2980 TREE_THIS_VOLATILE (lhs) = 1;
2983 rhs = fold (const_binop (BIT_AND_EXPR,
2984 const_binop (LSHIFT_EXPR,
2985 fold_convert (unsigned_type, rhs),
2986 size_int (lbitpos), 0),
2989 return build (code, compare_type,
2990 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2994 /* Subroutine for fold_truthop: decode a field reference.
2996 If EXP is a comparison reference, we return the innermost reference.
2998 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2999 set to the starting bit number.
3001 If the innermost field can be completely contained in a mode-sized
3002 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3004 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3005 otherwise it is not changed.
3007 *PUNSIGNEDP is set to the signedness of the field.
3009 *PMASK is set to the mask used. This is either contained in a
3010 BIT_AND_EXPR or derived from the width of the field.
3012 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3014 Return 0 if this is not a component reference or is one that we can't
3015 do anything with. */
3018 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3019 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3020 int *punsignedp, int *pvolatilep,
3021 tree *pmask, tree *pand_mask)
3023 tree outer_type = 0;
3025 tree mask, inner, offset;
3027 unsigned int precision;
3029 /* All the optimizations using this function assume integer fields.
3030 There are problems with FP fields since the type_for_size call
3031 below can fail for, e.g., XFmode. */
3032 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3035 /* We are interested in the bare arrangement of bits, so strip everything
3036 that doesn't affect the machine mode. However, record the type of the
3037 outermost expression if it may matter below. */
3038 if (TREE_CODE (exp) == NOP_EXPR
3039 || TREE_CODE (exp) == CONVERT_EXPR
3040 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3041 outer_type = TREE_TYPE (exp);
3044 if (TREE_CODE (exp) == BIT_AND_EXPR)
3046 and_mask = TREE_OPERAND (exp, 1);
3047 exp = TREE_OPERAND (exp, 0);
3048 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3049 if (TREE_CODE (and_mask) != INTEGER_CST)
3053 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3054 punsignedp, pvolatilep);
3055 if ((inner == exp && and_mask == 0)
3056 || *pbitsize < 0 || offset != 0
3057 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3060 /* If the number of bits in the reference is the same as the bitsize of
3061 the outer type, then the outer type gives the signedness. Otherwise
3062 (in case of a small bitfield) the signedness is unchanged. */
3063 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3064 *punsignedp = TREE_UNSIGNED (outer_type);
3066 /* Compute the mask to access the bitfield. */
3067 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3068 precision = TYPE_PRECISION (unsigned_type);
3070 mask = build_int_2 (~0, ~0);
3071 TREE_TYPE (mask) = unsigned_type;
3072 force_fit_type (mask, 0);
3073 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3074 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3076 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3078 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3079 fold_convert (unsigned_type, and_mask), mask));
3082 *pand_mask = and_mask;
3086 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3090 all_ones_mask_p (tree mask, int size)
3092 tree type = TREE_TYPE (mask);
3093 unsigned int precision = TYPE_PRECISION (type);
3096 tmask = build_int_2 (~0, ~0);
3097 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3098 force_fit_type (tmask, 0);
3100 tree_int_cst_equal (mask,
3101 const_binop (RSHIFT_EXPR,
3102 const_binop (LSHIFT_EXPR, tmask,
3103 size_int (precision - size),
3105 size_int (precision - size), 0));
3108 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3109 represents the sign bit of EXP's type. If EXP represents a sign
3110 or zero extension, also test VAL against the unextended type.
3111 The return value is the (sub)expression whose sign bit is VAL,
3112 or NULL_TREE otherwise. */
3115 sign_bit_p (tree exp, tree val)
3117 unsigned HOST_WIDE_INT mask_lo, lo;
3118 HOST_WIDE_INT mask_hi, hi;
3122 /* Tree EXP must have an integral type. */
3123 t = TREE_TYPE (exp);
3124 if (! INTEGRAL_TYPE_P (t))
3127 /* Tree VAL must be an integer constant. */
3128 if (TREE_CODE (val) != INTEGER_CST
3129 || TREE_CONSTANT_OVERFLOW (val))
3132 width = TYPE_PRECISION (t);
3133 if (width > HOST_BITS_PER_WIDE_INT)
3135 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3138 mask_hi = ((unsigned HOST_WIDE_INT) -1
3139 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3145 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3148 mask_lo = ((unsigned HOST_WIDE_INT) -1
3149 >> (HOST_BITS_PER_WIDE_INT - width));
3152 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3153 treat VAL as if it were unsigned. */
3154 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3155 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3158 /* Handle extension from a narrower type. */
3159 if (TREE_CODE (exp) == NOP_EXPR
3160 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3161 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3166 /* Subroutine for fold_truthop: determine if an operand is simple enough
3167 to be evaluated unconditionally. */
3170 simple_operand_p (tree exp)
3172 /* Strip any conversions that don't change the machine mode. */
3173 while ((TREE_CODE (exp) == NOP_EXPR
3174 || TREE_CODE (exp) == CONVERT_EXPR)
3175 && (TYPE_MODE (TREE_TYPE (exp))
3176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3177 exp = TREE_OPERAND (exp, 0);
3179 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3181 && ! TREE_ADDRESSABLE (exp)
3182 && ! TREE_THIS_VOLATILE (exp)
3183 && ! DECL_NONLOCAL (exp)
3184 /* Don't regard global variables as simple. They may be
3185 allocated in ways unknown to the compiler (shared memory,
3186 #pragma weak, etc). */
3187 && ! TREE_PUBLIC (exp)
3188 && ! DECL_EXTERNAL (exp)
3189 /* Loading a static variable is unduly expensive, but global
3190 registers aren't expensive. */
3191 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3194 /* The following functions are subroutines to fold_range_test and allow it to
3195 try to change a logical combination of comparisons into a range test.
3198 X == 2 || X == 3 || X == 4 || X == 5
3202 (unsigned) (X - 2) <= 3
3204 We describe each set of comparisons as being either inside or outside
3205 a range, using a variable named like IN_P, and then describe the
3206 range with a lower and upper bound. If one of the bounds is omitted,
3207 it represents either the highest or lowest value of the type.
3209 In the comments below, we represent a range by two numbers in brackets
3210 preceded by a "+" to designate being inside that range, or a "-" to
3211 designate being outside that range, so the condition can be inverted by
3212 flipping the prefix. An omitted bound is represented by a "-". For
3213 example, "- [-, 10]" means being outside the range starting at the lowest
3214 possible value and ending at 10, in other words, being greater than 10.
3215 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3218 We set up things so that the missing bounds are handled in a consistent
3219 manner so neither a missing bound nor "true" and "false" need to be
3220 handled using a special case. */
3222 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3223 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3224 and UPPER1_P are nonzero if the respective argument is an upper bound
3225 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3226 must be specified for a comparison. ARG1 will be converted to ARG0's
3227 type if both are specified. */
3230 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3231 tree arg1, int upper1_p)
3237 /* If neither arg represents infinity, do the normal operation.
3238 Else, if not a comparison, return infinity. Else handle the special
3239 comparison rules. Note that most of the cases below won't occur, but
3240 are handled for consistency. */
3242 if (arg0 != 0 && arg1 != 0)
3244 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3245 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3247 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3250 if (TREE_CODE_CLASS (code) != '<')
3253 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3254 for neither. In real maths, we cannot assume open ended ranges are
3255 the same. But, this is computer arithmetic, where numbers are finite.
3256 We can therefore make the transformation of any unbounded range with
3257 the value Z, Z being greater than any representable number. This permits
3258 us to treat unbounded ranges as equal. */
3259 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3260 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3264 result = sgn0 == sgn1;
3267 result = sgn0 != sgn1;
3270 result = sgn0 < sgn1;
3273 result = sgn0 <= sgn1;
3276 result = sgn0 > sgn1;
3279 result = sgn0 >= sgn1;
3285 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3288 /* Given EXP, a logical expression, set the range it is testing into
3289 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3290 actually being tested. *PLOW and *PHIGH will be made of the same type
3291 as the returned expression. If EXP is not a comparison, we will most
3292 likely not be returning a useful value and range. */
3295 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3297 enum tree_code code;
3298 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3299 tree orig_type = NULL_TREE;
3301 tree low, high, n_low, n_high;
3303 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3304 and see if we can refine the range. Some of the cases below may not
3305 happen, but it doesn't seem worth worrying about this. We "continue"
3306 the outer loop when we've changed something; otherwise we "break"
3307 the switch, which will "break" the while. */
3310 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3314 code = TREE_CODE (exp);
3316 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3318 if (first_rtl_op (code) > 0)
3319 arg0 = TREE_OPERAND (exp, 0);
3320 if (TREE_CODE_CLASS (code) == '<'
3321 || TREE_CODE_CLASS (code) == '1'
3322 || TREE_CODE_CLASS (code) == '2')
3323 type = TREE_TYPE (arg0);
3324 if (TREE_CODE_CLASS (code) == '2'
3325 || TREE_CODE_CLASS (code) == '<'
3326 || (TREE_CODE_CLASS (code) == 'e'
3327 && TREE_CODE_LENGTH (code) > 1))
3328 arg1 = TREE_OPERAND (exp, 1);
3331 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3332 lose a cast by accident. */
3333 if (type != NULL_TREE && orig_type == NULL_TREE)
3338 case TRUTH_NOT_EXPR:
3339 in_p = ! in_p, exp = arg0;
3342 case EQ_EXPR: case NE_EXPR:
3343 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3344 /* We can only do something if the range is testing for zero
3345 and if the second operand is an integer constant. Note that
3346 saying something is "in" the range we make is done by
3347 complementing IN_P since it will set in the initial case of
3348 being not equal to zero; "out" is leaving it alone. */
3349 if (low == 0 || high == 0
3350 || ! integer_zerop (low) || ! integer_zerop (high)
3351 || TREE_CODE (arg1) != INTEGER_CST)
3356 case NE_EXPR: /* - [c, c] */
3359 case EQ_EXPR: /* + [c, c] */
3360 in_p = ! in_p, low = high = arg1;
3362 case GT_EXPR: /* - [-, c] */
3363 low = 0, high = arg1;
3365 case GE_EXPR: /* + [c, -] */
3366 in_p = ! in_p, low = arg1, high = 0;
3368 case LT_EXPR: /* - [c, -] */
3369 low = arg1, high = 0;
3371 case LE_EXPR: /* + [-, c] */
3372 in_p = ! in_p, low = 0, high = arg1;
3380 /* If this is an unsigned comparison, we also know that EXP is
3381 greater than or equal to zero. We base the range tests we make
3382 on that fact, so we record it here so we can parse existing
3384 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3386 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3387 1, fold_convert (type, integer_zero_node),
3391 in_p = n_in_p, low = n_low, high = n_high;
3393 /* If the high bound is missing, but we have a nonzero low
3394 bound, reverse the range so it goes from zero to the low bound
3396 if (high == 0 && low && ! integer_zerop (low))
3399 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3400 integer_one_node, 0);
3401 low = fold_convert (type, integer_zero_node);
3407 /* (-x) IN [a,b] -> x in [-b, -a] */
3408 n_low = range_binop (MINUS_EXPR, type,
3409 fold_convert (type, integer_zero_node),
3411 n_high = range_binop (MINUS_EXPR, type,
3412 fold_convert (type, integer_zero_node),
3414 low = n_low, high = n_high;
3420 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3421 fold_convert (type, integer_one_node));
3424 case PLUS_EXPR: case MINUS_EXPR:
3425 if (TREE_CODE (arg1) != INTEGER_CST)
3428 /* If EXP is signed, any overflow in the computation is undefined,
3429 so we don't worry about it so long as our computations on
3430 the bounds don't overflow. For unsigned, overflow is defined
3431 and this is exactly the right thing. */
3432 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3433 type, low, 0, arg1, 0);
3434 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3435 type, high, 1, arg1, 0);
3436 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3437 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3440 /* Check for an unsigned range which has wrapped around the maximum
3441 value thus making n_high < n_low, and normalize it. */
3442 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3444 low = range_binop (PLUS_EXPR, type, n_high, 0,
3445 integer_one_node, 0);
3446 high = range_binop (MINUS_EXPR, type, n_low, 0,
3447 integer_one_node, 0);
3449 /* If the range is of the form +/- [ x+1, x ], we won't
3450 be able to normalize it. But then, it represents the
3451 whole range or the empty set, so make it
3453 if (tree_int_cst_equal (n_low, low)
3454 && tree_int_cst_equal (n_high, high))
3460 low = n_low, high = n_high;
3465 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3466 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3469 if (! INTEGRAL_TYPE_P (type)
3470 || (low != 0 && ! int_fits_type_p (low, type))
3471 || (high != 0 && ! int_fits_type_p (high, type)))
3474 n_low = low, n_high = high;
3477 n_low = fold_convert (type, n_low);
3480 n_high = fold_convert (type, n_high);
3482 /* If we're converting from an unsigned to a signed type,
3483 we will be doing the comparison as unsigned. The tests above
3484 have already verified that LOW and HIGH are both positive.
3486 So we have to make sure that the original unsigned value will
3487 be interpreted as positive. */
3488 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3490 tree equiv_type = lang_hooks.types.type_for_mode
3491 (TYPE_MODE (type), 1);
3494 /* A range without an upper bound is, naturally, unbounded.
3495 Since convert would have cropped a very large value, use
3496 the max value for the destination type. */
3498 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3499 : TYPE_MAX_VALUE (type);
3501 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3502 high_positive = fold (build (RSHIFT_EXPR, type,
3506 integer_one_node)));
3508 /* If the low bound is specified, "and" the range with the
3509 range for which the original unsigned value will be
3513 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3514 1, n_low, n_high, 1,
3515 fold_convert (type, integer_zero_node),
3519 in_p = (n_in_p == in_p);
3523 /* Otherwise, "or" the range with the range of the input
3524 that will be interpreted as negative. */
3525 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3526 0, n_low, n_high, 1,
3527 fold_convert (type, integer_zero_node),
3531 in_p = (in_p != n_in_p);
3536 low = n_low, high = n_high;
3546 /* If EXP is a constant, we can evaluate whether this is true or false. */
3547 if (TREE_CODE (exp) == INTEGER_CST)
3549 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3551 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3557 *pin_p = in_p, *plow = low, *phigh = high;
3561 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3562 type, TYPE, return an expression to test if EXP is in (or out of, depending
3563 on IN_P) the range. */
3566 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3568 tree etype = TREE_TYPE (exp);
3572 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3573 return invert_truthvalue (value);
3575 if (low == 0 && high == 0)
3576 return fold_convert (type, integer_one_node);
3579 return fold (build (LE_EXPR, type, exp, high));
3582 return fold (build (GE_EXPR, type, exp, low));
3584 if (operand_equal_p (low, high, 0))
3585 return fold (build (EQ_EXPR, type, exp, low));
3587 if (integer_zerop (low))
3589 if (! TREE_UNSIGNED (etype))
3591 etype = lang_hooks.types.unsigned_type (etype);
3592 high = fold_convert (etype, high);
3593 exp = fold_convert (etype, exp);
3595 return build_range_check (type, exp, 1, 0, high);
3598 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3599 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3601 unsigned HOST_WIDE_INT lo;
3605 prec = TYPE_PRECISION (etype);
3606 if (prec <= HOST_BITS_PER_WIDE_INT)
3609 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3613 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3614 lo = (unsigned HOST_WIDE_INT) -1;
3617 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3619 if (TREE_UNSIGNED (etype))
3621 etype = lang_hooks.types.signed_type (etype);
3622 exp = fold_convert (etype, exp);
3624 return fold (build (GT_EXPR, type, exp,
3625 fold_convert (etype, integer_zero_node)));
3629 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3630 && ! TREE_OVERFLOW (value))
3631 return build_range_check (type,
3632 fold (build (MINUS_EXPR, etype, exp, low)),
3633 1, fold_convert (etype, integer_zero_node),
3639 /* Given two ranges, see if we can merge them into one. Return 1 if we
3640 can, 0 if we can't. Set the output range into the specified parameters. */
3643 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3644 tree high0, int in1_p, tree low1, tree high1)
3652 int lowequal = ((low0 == 0 && low1 == 0)
3653 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3654 low0, 0, low1, 0)));
3655 int highequal = ((high0 == 0 && high1 == 0)
3656 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3657 high0, 1, high1, 1)));
3659 /* Make range 0 be the range that starts first, or ends last if they
3660 start at the same value. Swap them if it isn't. */
3661 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3664 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3665 high1, 1, high0, 1))))
3667 temp = in0_p, in0_p = in1_p, in1_p = temp;
3668 tem = low0, low0 = low1, low1 = tem;
3669 tem = high0, high0 = high1, high1 = tem;
3672 /* Now flag two cases, whether the ranges are disjoint or whether the
3673 second range is totally subsumed in the first. Note that the tests
3674 below are simplified by the ones above. */
3675 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3676 high0, 1, low1, 0));
3677 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3678 high1, 1, high0, 1));
3680 /* We now have four cases, depending on whether we are including or
3681 excluding the two ranges. */
3684 /* If they don't overlap, the result is false. If the second range
3685 is a subset it is the result. Otherwise, the range is from the start
3686 of the second to the end of the first. */
3688 in_p = 0, low = high = 0;
3690 in_p = 1, low = low1, high = high1;
3692 in_p = 1, low = low1, high = high0;
3695 else if (in0_p && ! in1_p)
3697 /* If they don't overlap, the result is the first range. If they are
3698 equal, the result is false. If the second range is a subset of the
3699 first, and the ranges begin at the same place, we go from just after
3700 the end of the first range to the end of the second. If the second
3701 range is not a subset of the first, or if it is a subset and both
3702 ranges end at the same place, the range starts at the start of the
3703 first range and ends just before the second range.
3704 Otherwise, we can't describe this as a single range. */
3706 in_p = 1, low = low0, high = high0;
3707 else if (lowequal && highequal)
3708 in_p = 0, low = high = 0;
3709 else if (subset && lowequal)
3711 in_p = 1, high = high0;
3712 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3713 integer_one_node, 0);
3715 else if (! subset || highequal)
3717 in_p = 1, low = low0;
3718 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3719 integer_one_node, 0);
3725 else if (! in0_p && in1_p)
3727 /* If they don't overlap, the result is the second range. If the second
3728 is a subset of the first, the result is false. Otherwise,
3729 the range starts just after the first range and ends at the
3730 end of the second. */
3732 in_p = 1, low = low1, high = high1;
3733 else if (subset || highequal)
3734 in_p = 0, low = high = 0;
3737 in_p = 1, high = high1;
3738 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3739 integer_one_node, 0);
3745 /* The case where we are excluding both ranges. Here the complex case
3746 is if they don't overlap. In that case, the only time we have a
3747 range is if they are adjacent. If the second is a subset of the
3748 first, the result is the first. Otherwise, the range to exclude
3749 starts at the beginning of the first range and ends at the end of the
3753 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3754 range_binop (PLUS_EXPR, NULL_TREE,
3756 integer_one_node, 1),
3758 in_p = 0, low = low0, high = high1;
3763 in_p = 0, low = low0, high = high0;
3765 in_p = 0, low = low0, high = high1;
3768 *pin_p = in_p, *plow = low, *phigh = high;
3772 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3773 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3776 /* EXP is some logical combination of boolean tests. See if we can
3777 merge it into some range test. Return the new tree if so. */
3780 fold_range_test (tree exp)
3782 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3783 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3784 int in0_p, in1_p, in_p;
3785 tree low0, low1, low, high0, high1, high;
3786 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3787 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3790 /* If this is an OR operation, invert both sides; we will invert
3791 again at the end. */
3793 in0_p = ! in0_p, in1_p = ! in1_p;
3795 /* If both expressions are the same, if we can merge the ranges, and we
3796 can build the range test, return it or it inverted. If one of the
3797 ranges is always true or always false, consider it to be the same
3798 expression as the other. */
3799 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3800 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3802 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3804 : rhs != 0 ? rhs : integer_zero_node,
3806 return or_op ? invert_truthvalue (tem) : tem;
3808 /* On machines where the branch cost is expensive, if this is a
3809 short-circuited branch and the underlying object on both sides
3810 is the same, make a non-short-circuit operation. */
3811 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3812 && lhs != 0 && rhs != 0
3813 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3814 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3815 && operand_equal_p (lhs, rhs, 0))
3817 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3818 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3819 which cases we can't do this. */
3820 if (simple_operand_p (lhs))
3821 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3822 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3823 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3824 TREE_OPERAND (exp, 1));
3826 else if (lang_hooks.decls.global_bindings_p () == 0
3827 && ! CONTAINS_PLACEHOLDER_P (lhs))
3829 tree common = save_expr (lhs);
3831 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3832 or_op ? ! in0_p : in0_p,
3834 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3835 or_op ? ! in1_p : in1_p,
3837 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3838 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3839 TREE_TYPE (exp), lhs, rhs);
3846 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3847 bit value. Arrange things so the extra bits will be set to zero if and
3848 only if C is signed-extended to its full width. If MASK is nonzero,
3849 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3852 unextend (tree c, int p, int unsignedp, tree mask)
3854 tree type = TREE_TYPE (c);
3855 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3858 if (p == modesize || unsignedp)
3861 /* We work by getting just the sign bit into the low-order bit, then
3862 into the high-order bit, then sign-extend. We then XOR that value
3864 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3865 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3867 /* We must use a signed type in order to get an arithmetic right shift.
3868 However, we must also avoid introducing accidental overflows, so that
3869 a subsequent call to integer_zerop will work. Hence we must
3870 do the type conversion here. At this point, the constant is either
3871 zero or one, and the conversion to a signed type can never overflow.
3872 We could get an overflow if this conversion is done anywhere else. */
3873 if (TREE_UNSIGNED (type))
3874 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3876 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3877 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3879 temp = const_binop (BIT_AND_EXPR, temp,
3880 fold_convert (TREE_TYPE (c), mask), 0);
3881 /* If necessary, convert the type back to match the type of C. */
3882 if (TREE_UNSIGNED (type))
3883 temp = fold_convert (type, temp);
3885 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3888 /* Find ways of folding logical expressions of LHS and RHS:
3889 Try to merge two comparisons to the same innermost item.
3890 Look for range tests like "ch >= '0' && ch <= '9'".
3891 Look for combinations of simple terms on machines with expensive branches
3892 and evaluate the RHS unconditionally.
3894 For example, if we have p->a == 2 && p->b == 4 and we can make an
3895 object large enough to span both A and B, we can do this with a comparison
3896 against the object ANDed with the a mask.
3898 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3899 operations to do this with one comparison.
3901 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3902 function and the one above.
3904 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3905 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3907 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3910 We return the simplified tree or 0 if no optimization is possible. */
3913 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3915 /* If this is the "or" of two comparisons, we can do something if
3916 the comparisons are NE_EXPR. If this is the "and", we can do something
3917 if the comparisons are EQ_EXPR. I.e.,
3918 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3920 WANTED_CODE is this operation code. For single bit fields, we can
3921 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3922 comparison for one-bit fields. */
3924 enum tree_code wanted_code;
3925 enum tree_code lcode, rcode;
3926 tree ll_arg, lr_arg, rl_arg, rr_arg;
3927 tree ll_inner, lr_inner, rl_inner, rr_inner;
3928 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3929 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3930 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3931 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3932 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3933 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3934 enum machine_mode lnmode, rnmode;
3935 tree ll_mask, lr_mask, rl_mask, rr_mask;
3936 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3937 tree l_const, r_const;
3938 tree lntype, rntype, result;
3939 int first_bit, end_bit;
3942 /* Start by getting the comparison codes. Fail if anything is volatile.
3943 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3944 it were surrounded with a NE_EXPR. */
3946 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3949 lcode = TREE_CODE (lhs);
3950 rcode = TREE_CODE (rhs);
3952 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3953 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3955 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3956 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3958 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3961 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3964 ll_arg = TREE_OPERAND (lhs, 0);
3965 lr_arg = TREE_OPERAND (lhs, 1);
3966 rl_arg = TREE_OPERAND (rhs, 0);
3967 rr_arg = TREE_OPERAND (rhs, 1);
3969 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3970 if (simple_operand_p (ll_arg)
3971 && simple_operand_p (lr_arg)
3972 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3976 if (operand_equal_p (ll_arg, rl_arg, 0)
3977 && operand_equal_p (lr_arg, rr_arg, 0))
3979 int lcompcode, rcompcode;
3981 lcompcode = comparison_to_compcode (lcode);
3982 rcompcode = comparison_to_compcode (rcode);
3983 compcode = (code == TRUTH_AND_EXPR)
3984 ? lcompcode & rcompcode
3985 : lcompcode | rcompcode;
3987 else if (operand_equal_p (ll_arg, rr_arg, 0)
3988 && operand_equal_p (lr_arg, rl_arg, 0))
3990 int lcompcode, rcompcode;
3992 rcode = swap_tree_comparison (rcode);
3993 lcompcode = comparison_to_compcode (lcode);
3994 rcompcode = comparison_to_compcode (rcode);
3995 compcode = (code == TRUTH_AND_EXPR)
3996 ? lcompcode & rcompcode
3997 : lcompcode | rcompcode;
4002 if (compcode == COMPCODE_TRUE)
4003 return fold_convert (truth_type, integer_one_node);
4004 else if (compcode == COMPCODE_FALSE)
4005 return fold_convert (truth_type, integer_zero_node);
4006 else if (compcode != -1)
4007 return build (compcode_to_comparison (compcode),
4008 truth_type, ll_arg, lr_arg);
4011 /* If the RHS can be evaluated unconditionally and its operands are
4012 simple, it wins to evaluate the RHS unconditionally on machines
4013 with expensive branches. In this case, this isn't a comparison
4014 that can be merged. Avoid doing this if the RHS is a floating-point
4015 comparison since those can trap. */
4017 if (BRANCH_COST >= 2
4018 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4019 && simple_operand_p (rl_arg)
4020 && simple_operand_p (rr_arg))
4022 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4023 if (code == TRUTH_OR_EXPR
4024 && lcode == NE_EXPR && integer_zerop (lr_arg)
4025 && rcode == NE_EXPR && integer_zerop (rr_arg)
4026 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4027 return build (NE_EXPR, truth_type,
4028 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4032 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4033 if (code == TRUTH_AND_EXPR
4034 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4035 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4036 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4037 return build (EQ_EXPR, truth_type,
4038 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4042 return build (code, truth_type, lhs, rhs);
4045 /* See if the comparisons can be merged. Then get all the parameters for
4048 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4049 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4053 ll_inner = decode_field_reference (ll_arg,
4054 &ll_bitsize, &ll_bitpos, &ll_mode,
4055 &ll_unsignedp, &volatilep, &ll_mask,
4057 lr_inner = decode_field_reference (lr_arg,
4058 &lr_bitsize, &lr_bitpos, &lr_mode,
4059 &lr_unsignedp, &volatilep, &lr_mask,
4061 rl_inner = decode_field_reference (rl_arg,
4062 &rl_bitsize, &rl_bitpos, &rl_mode,
4063 &rl_unsignedp, &volatilep, &rl_mask,
4065 rr_inner = decode_field_reference (rr_arg,
4066 &rr_bitsize, &rr_bitpos, &rr_mode,
4067 &rr_unsignedp, &volatilep, &rr_mask,
4070 /* It must be true that the inner operation on the lhs of each
4071 comparison must be the same if we are to be able to do anything.
4072 Then see if we have constants. If not, the same must be true for
4074 if (volatilep || ll_inner == 0 || rl_inner == 0
4075 || ! operand_equal_p (ll_inner, rl_inner, 0))
4078 if (TREE_CODE (lr_arg) == INTEGER_CST
4079 && TREE_CODE (rr_arg) == INTEGER_CST)
4080 l_const = lr_arg, r_const = rr_arg;
4081 else if (lr_inner == 0 || rr_inner == 0
4082 || ! operand_equal_p (lr_inner, rr_inner, 0))
4085 l_const = r_const = 0;
4087 /* If either comparison code is not correct for our logical operation,
4088 fail. However, we can convert a one-bit comparison against zero into
4089 the opposite comparison against that bit being set in the field. */
4091 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4092 if (lcode != wanted_code)
4094 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4096 /* Make the left operand unsigned, since we are only interested
4097 in the value of one bit. Otherwise we are doing the wrong
4106 /* This is analogous to the code for l_const above. */
4107 if (rcode != wanted_code)
4109 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4118 /* After this point all optimizations will generate bit-field
4119 references, which we might not want. */
4120 if (! lang_hooks.can_use_bit_fields_p ())
4123 /* See if we can find a mode that contains both fields being compared on
4124 the left. If we can't, fail. Otherwise, update all constants and masks
4125 to be relative to a field of that size. */
4126 first_bit = MIN (ll_bitpos, rl_bitpos);
4127 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4128 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4129 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4131 if (lnmode == VOIDmode)
4134 lnbitsize = GET_MODE_BITSIZE (lnmode);
4135 lnbitpos = first_bit & ~ (lnbitsize - 1);
4136 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4137 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4139 if (BYTES_BIG_ENDIAN)
4141 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4142 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4145 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4146 size_int (xll_bitpos), 0);
4147 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4148 size_int (xrl_bitpos), 0);
4152 l_const = fold_convert (lntype, l_const);
4153 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4154 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4155 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4156 fold (build1 (BIT_NOT_EXPR,
4160 warning ("comparison is always %d", wanted_code == NE_EXPR);
4162 return fold_convert (truth_type,
4163 wanted_code == NE_EXPR
4164 ? integer_one_node : integer_zero_node);
4169 r_const = fold_convert (lntype, r_const);
4170 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4171 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4172 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4173 fold (build1 (BIT_NOT_EXPR,
4177 warning ("comparison is always %d", wanted_code == NE_EXPR);
4179 return fold_convert (truth_type,
4180 wanted_code == NE_EXPR
4181 ? integer_one_node : integer_zero_node);
4185 /* If the right sides are not constant, do the same for it. Also,
4186 disallow this optimization if a size or signedness mismatch occurs
4187 between the left and right sides. */
4190 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4191 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4192 /* Make sure the two fields on the right
4193 correspond to the left without being swapped. */
4194 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4197 first_bit = MIN (lr_bitpos, rr_bitpos);
4198 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4199 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4200 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4202 if (rnmode == VOIDmode)
4205 rnbitsize = GET_MODE_BITSIZE (rnmode);
4206 rnbitpos = first_bit & ~ (rnbitsize - 1);
4207 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4208 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4210 if (BYTES_BIG_ENDIAN)
4212 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4213 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4216 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4217 size_int (xlr_bitpos), 0);
4218 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4219 size_int (xrr_bitpos), 0);
4221 /* Make a mask that corresponds to both fields being compared.
4222 Do this for both items being compared. If the operands are the
4223 same size and the bits being compared are in the same position
4224 then we can do this by masking both and comparing the masked
4226 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4227 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4228 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4230 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4231 ll_unsignedp || rl_unsignedp);
4232 if (! all_ones_mask_p (ll_mask, lnbitsize))
4233 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4235 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4236 lr_unsignedp || rr_unsignedp);
4237 if (! all_ones_mask_p (lr_mask, rnbitsize))
4238 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4240 return build (wanted_code, truth_type, lhs, rhs);
4243 /* There is still another way we can do something: If both pairs of
4244 fields being compared are adjacent, we may be able to make a wider
4245 field containing them both.
4247 Note that we still must mask the lhs/rhs expressions. Furthermore,
4248 the mask must be shifted to account for the shift done by
4249 make_bit_field_ref. */
4250 if ((ll_bitsize + ll_bitpos == rl_bitpos
4251 && lr_bitsize + lr_bitpos == rr_bitpos)
4252 || (ll_bitpos == rl_bitpos + rl_bitsize
4253 && lr_bitpos == rr_bitpos + rr_bitsize))
4257 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4258 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4259 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4260 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4262 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4263 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4264 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4265 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4267 /* Convert to the smaller type before masking out unwanted bits. */
4269 if (lntype != rntype)
4271 if (lnbitsize > rnbitsize)
4273 lhs = fold_convert (rntype, lhs);
4274 ll_mask = fold_convert (rntype, ll_mask);
4277 else if (lnbitsize < rnbitsize)
4279 rhs = fold_convert (lntype, rhs);
4280 lr_mask = fold_convert (lntype, lr_mask);
4285 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4286 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4288 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4289 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4291 return build (wanted_code, truth_type, lhs, rhs);
4297 /* Handle the case of comparisons with constants. If there is something in
4298 common between the masks, those bits of the constants must be the same.
4299 If not, the condition is always false. Test for this to avoid generating
4300 incorrect code below. */
4301 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4302 if (! integer_zerop (result)
4303 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4304 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4306 if (wanted_code == NE_EXPR)
4308 warning ("`or' of unmatched not-equal tests is always 1");
4309 return fold_convert (truth_type, integer_one_node);
4313 warning ("`and' of mutually exclusive equal-tests is always 0");
4314 return fold_convert (truth_type, integer_zero_node);
4318 /* Construct the expression we will return. First get the component
4319 reference we will make. Unless the mask is all ones the width of
4320 that field, perform the mask operation. Then compare with the
4322 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4323 ll_unsignedp || rl_unsignedp);
4325 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4326 if (! all_ones_mask_p (ll_mask, lnbitsize))
4327 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4329 return build (wanted_code, truth_type, result,
4330 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4333 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4337 optimize_minmax_comparison (tree t)
4339 tree type = TREE_TYPE (t);
4340 tree arg0 = TREE_OPERAND (t, 0);
4341 enum tree_code op_code;
4342 tree comp_const = TREE_OPERAND (t, 1);
4344 int consts_equal, consts_lt;
4347 STRIP_SIGN_NOPS (arg0);
4349 op_code = TREE_CODE (arg0);
4350 minmax_const = TREE_OPERAND (arg0, 1);
4351 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4352 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4353 inner = TREE_OPERAND (arg0, 0);
4355 /* If something does not permit us to optimize, return the original tree. */
4356 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4357 || TREE_CODE (comp_const) != INTEGER_CST
4358 || TREE_CONSTANT_OVERFLOW (comp_const)
4359 || TREE_CODE (minmax_const) != INTEGER_CST
4360 || TREE_CONSTANT_OVERFLOW (minmax_const))
4363 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4364 and GT_EXPR, doing the rest with recursive calls using logical
4366 switch (TREE_CODE (t))
4368 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4370 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4374 fold (build (TRUTH_ORIF_EXPR, type,
4375 optimize_minmax_comparison
4376 (build (EQ_EXPR, type, arg0, comp_const)),
4377 optimize_minmax_comparison
4378 (build (GT_EXPR, type, arg0, comp_const))));
4381 if (op_code == MAX_EXPR && consts_equal)
4382 /* MAX (X, 0) == 0 -> X <= 0 */
4383 return fold (build (LE_EXPR, type, inner, comp_const));
4385 else if (op_code == MAX_EXPR && consts_lt)
4386 /* MAX (X, 0) == 5 -> X == 5 */
4387 return fold (build (EQ_EXPR, type, inner, comp_const));
4389 else if (op_code == MAX_EXPR)
4390 /* MAX (X, 0) == -1 -> false */
4391 return omit_one_operand (type, integer_zero_node, inner);
4393 else if (consts_equal)
4394 /* MIN (X, 0) == 0 -> X >= 0 */
4395 return fold (build (GE_EXPR, type, inner, comp_const));
4398 /* MIN (X, 0) == 5 -> false */
4399 return omit_one_operand (type, integer_zero_node, inner);
4402 /* MIN (X, 0) == -1 -> X == -1 */
4403 return fold (build (EQ_EXPR, type, inner, comp_const));
4406 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4407 /* MAX (X, 0) > 0 -> X > 0
4408 MAX (X, 0) > 5 -> X > 5 */
4409 return fold (build (GT_EXPR, type, inner, comp_const));
4411 else if (op_code == MAX_EXPR)
4412 /* MAX (X, 0) > -1 -> true */
4413 return omit_one_operand (type, integer_one_node, inner);
4415 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4416 /* MIN (X, 0) > 0 -> false
4417 MIN (X, 0) > 5 -> false */
4418 return omit_one_operand (type, integer_zero_node, inner);
4421 /* MIN (X, 0) > -1 -> X > -1 */
4422 return fold (build (GT_EXPR, type, inner, comp_const));
4429 /* T is an integer expression that is being multiplied, divided, or taken a
4430 modulus (CODE says which and what kind of divide or modulus) by a
4431 constant C. See if we can eliminate that operation by folding it with
4432 other operations already in T. WIDE_TYPE, if non-null, is a type that
4433 should be used for the computation if wider than our type.
4435 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4436 (X * 2) + (Y * 4). We must, however, be assured that either the original
4437 expression would not overflow or that overflow is undefined for the type
4438 in the language in question.
4440 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4441 the machine has a multiply-accumulate insn or that this is part of an
4442 addressing calculation.
4444 If we return a non-null expression, it is an equivalent form of the
4445 original computation, but need not be in the original type. */
4448 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4450 /* To avoid exponential search depth, refuse to allow recursion past
4451 three levels. Beyond that (1) it's highly unlikely that we'll find
4452 something interesting and (2) we've probably processed it before
4453 when we built the inner expression. */
4462 ret = extract_muldiv_1 (t, c, code, wide_type);
4469 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4471 tree type = TREE_TYPE (t);
4472 enum tree_code tcode = TREE_CODE (t);
4473 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4474 > GET_MODE_SIZE (TYPE_MODE (type)))
4475 ? wide_type : type);
4477 int same_p = tcode == code;
4478 tree op0 = NULL_TREE, op1 = NULL_TREE;
4480 /* Don't deal with constants of zero here; they confuse the code below. */
4481 if (integer_zerop (c))
4484 if (TREE_CODE_CLASS (tcode) == '1')
4485 op0 = TREE_OPERAND (t, 0);
4487 if (TREE_CODE_CLASS (tcode) == '2')
4488 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4490 /* Note that we need not handle conditional operations here since fold
4491 already handles those cases. So just do arithmetic here. */
4495 /* For a constant, we can always simplify if we are a multiply
4496 or (for divide and modulus) if it is a multiple of our constant. */
4497 if (code == MULT_EXPR
4498 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4499 return const_binop (code, fold_convert (ctype, t),
4500 fold_convert (ctype, c), 0);
4503 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4504 /* If op0 is an expression ... */
4505 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4506 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4507 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4508 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4509 /* ... and is unsigned, and its type is smaller than ctype,
4510 then we cannot pass through as widening. */
4511 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4512 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4513 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4514 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4515 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4516 /* ... or its type is larger than ctype,
4517 then we cannot pass through this truncation. */
4518 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4519 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4520 /* ... or signedness changes for division or modulus,
4521 then we cannot pass through this conversion. */
4522 || (code != MULT_EXPR
4523 && (TREE_UNSIGNED (ctype)
4524 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4527 /* Pass the constant down and see if we can make a simplification. If
4528 we can, replace this expression with the inner simplification for
4529 possible later conversion to our or some other type. */
4530 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4531 && TREE_CODE (t2) == INTEGER_CST
4532 && ! TREE_CONSTANT_OVERFLOW (t2)
4533 && (0 != (t1 = extract_muldiv (op0, t2, code,
4535 ? ctype : NULL_TREE))))
4539 case NEGATE_EXPR: case ABS_EXPR:
4540 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4541 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4544 case MIN_EXPR: case MAX_EXPR:
4545 /* If widening the type changes the signedness, then we can't perform
4546 this optimization as that changes the result. */
4547 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4550 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4551 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4552 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4554 if (tree_int_cst_sgn (c) < 0)
4555 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4557 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4558 fold_convert (ctype, t2)));
4562 case WITH_RECORD_EXPR:
4563 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4564 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4565 TREE_OPERAND (t, 1));
4568 case LSHIFT_EXPR: case RSHIFT_EXPR:
4569 /* If the second operand is constant, this is a multiplication
4570 or floor division, by a power of two, so we can treat it that
4571 way unless the multiplier or divisor overflows. */
4572 if (TREE_CODE (op1) == INTEGER_CST
4573 /* const_binop may not detect overflow correctly,
4574 so check for it explicitly here. */
4575 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4576 && TREE_INT_CST_HIGH (op1) == 0
4577 && 0 != (t1 = fold_convert (ctype,
4578 const_binop (LSHIFT_EXPR,
4581 && ! TREE_OVERFLOW (t1))
4582 return extract_muldiv (build (tcode == LSHIFT_EXPR
4583 ? MULT_EXPR : FLOOR_DIV_EXPR,
4584 ctype, fold_convert (ctype, op0), t1),
4585 c, code, wide_type);
4588 case PLUS_EXPR: case MINUS_EXPR:
4589 /* See if we can eliminate the operation on both sides. If we can, we
4590 can return a new PLUS or MINUS. If we can't, the only remaining
4591 cases where we can do anything are if the second operand is a
4593 t1 = extract_muldiv (op0, c, code, wide_type);
4594 t2 = extract_muldiv (op1, c, code, wide_type);
4595 if (t1 != 0 && t2 != 0
4596 && (code == MULT_EXPR
4597 /* If not multiplication, we can only do this if both operands
4598 are divisible by c. */
4599 || (multiple_of_p (ctype, op0, c)
4600 && multiple_of_p (ctype, op1, c))))
4601 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4602 fold_convert (ctype, t2)));
4604 /* If this was a subtraction, negate OP1 and set it to be an addition.
4605 This simplifies the logic below. */
4606 if (tcode == MINUS_EXPR)
4607 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4609 if (TREE_CODE (op1) != INTEGER_CST)
4612 /* If either OP1 or C are negative, this optimization is not safe for
4613 some of the division and remainder types while for others we need
4614 to change the code. */
4615 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4617 if (code == CEIL_DIV_EXPR)
4618 code = FLOOR_DIV_EXPR;
4619 else if (code == FLOOR_DIV_EXPR)
4620 code = CEIL_DIV_EXPR;
4621 else if (code != MULT_EXPR
4622 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4626 /* If it's a multiply or a division/modulus operation of a multiple
4627 of our constant, do the operation and verify it doesn't overflow. */
4628 if (code == MULT_EXPR
4629 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4631 op1 = const_binop (code, fold_convert (ctype, op1),
4632 fold_convert (ctype, c), 0);
4633 /* We allow the constant to overflow with wrapping semantics. */
4635 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4641 /* If we have an unsigned type is not a sizetype, we cannot widen
4642 the operation since it will change the result if the original
4643 computation overflowed. */
4644 if (TREE_UNSIGNED (ctype)
4645 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4649 /* If we were able to eliminate our operation from the first side,
4650 apply our operation to the second side and reform the PLUS. */
4651 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4652 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4654 /* The last case is if we are a multiply. In that case, we can
4655 apply the distributive law to commute the multiply and addition
4656 if the multiplication of the constants doesn't overflow. */
4657 if (code == MULT_EXPR)
4658 return fold (build (tcode, ctype,
4659 fold (build (code, ctype,
4660 fold_convert (ctype, op0),
4661 fold_convert (ctype, c))),
4667 /* We have a special case here if we are doing something like
4668 (C * 8) % 4 since we know that's zero. */
4669 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4670 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4671 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4672 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4673 return omit_one_operand (type, integer_zero_node, op0);
4675 /* ... fall through ... */
4677 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4678 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4679 /* If we can extract our operation from the LHS, do so and return a
4680 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4681 do something only if the second operand is a constant. */
4683 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4684 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4685 fold_convert (ctype, op1)));
4686 else if (tcode == MULT_EXPR && code == MULT_EXPR
4687 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4688 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4689 fold_convert (ctype, t1)));
4690 else if (TREE_CODE (op1) != INTEGER_CST)
4693 /* If these are the same operation types, we can associate them
4694 assuming no overflow. */
4696 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4697 fold_convert (ctype, c), 0))
4698 && ! TREE_OVERFLOW (t1))
4699 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4701 /* If these operations "cancel" each other, we have the main
4702 optimizations of this pass, which occur when either constant is a
4703 multiple of the other, in which case we replace this with either an
4704 operation or CODE or TCODE.
4706 If we have an unsigned type that is not a sizetype, we cannot do
4707 this since it will change the result if the original computation
4709 if ((! TREE_UNSIGNED (ctype)
4710 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4712 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4713 || (tcode == MULT_EXPR
4714 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4715 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4717 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4718 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4719 fold_convert (ctype,
4720 const_binop (TRUNC_DIV_EXPR,
4722 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4723 return fold (build (code, ctype, fold_convert (ctype, op0),
4724 fold_convert (ctype,
4725 const_binop (TRUNC_DIV_EXPR,
4737 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4738 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4739 that we may sometimes modify the tree. */
4742 strip_compound_expr (tree t, tree s)
4744 enum tree_code code = TREE_CODE (t);
4746 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4747 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4748 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4749 return TREE_OPERAND (t, 1);
4751 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4752 don't bother handling any other types. */
4753 else if (code == COND_EXPR)
4755 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4756 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4757 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4759 else if (TREE_CODE_CLASS (code) == '1')
4760 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4761 else if (TREE_CODE_CLASS (code) == '<'
4762 || TREE_CODE_CLASS (code) == '2')
4764 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4765 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4771 /* Return a node which has the indicated constant VALUE (either 0 or
4772 1), and is of the indicated TYPE. */
4775 constant_boolean_node (int value, tree type)
4777 if (type == integer_type_node)
4778 return value ? integer_one_node : integer_zero_node;
4779 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4780 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4781 : integer_zero_node);
4784 tree t = build_int_2 (value, 0);
4786 TREE_TYPE (t) = type;
4791 /* Utility function for the following routine, to see how complex a nesting of
4792 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4793 we don't care (to avoid spending too much time on complex expressions.). */
4796 count_cond (tree expr, int lim)
4800 if (TREE_CODE (expr) != COND_EXPR)
4805 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4806 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4807 return MIN (lim, 1 + ctrue + cfalse);
4810 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4811 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4812 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4813 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4814 COND is the first argument to CODE; otherwise (as in the example
4815 given here), it is the second argument. TYPE is the type of the
4816 original expression. */
4819 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4820 tree cond, tree arg, int cond_first_p)
4822 tree test, true_value, false_value;
4823 tree lhs = NULL_TREE;
4824 tree rhs = NULL_TREE;
4825 /* In the end, we'll produce a COND_EXPR. Both arms of the
4826 conditional expression will be binary operations. The left-hand
4827 side of the expression to be executed if the condition is true
4828 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4829 of the expression to be executed if the condition is true will be
4830 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4831 but apply to the expression to be executed if the conditional is
4837 /* These are the codes to use for the left-hand side and right-hand
4838 side of the COND_EXPR. Normally, they are the same as CODE. */
4839 enum tree_code lhs_code = code;
4840 enum tree_code rhs_code = code;
4841 /* And these are the types of the expressions. */
4842 tree lhs_type = type;
4843 tree rhs_type = type;
4848 true_rhs = false_rhs = &arg;
4849 true_lhs = &true_value;
4850 false_lhs = &false_value;
4854 true_lhs = false_lhs = &arg;
4855 true_rhs = &true_value;
4856 false_rhs = &false_value;
4859 if (TREE_CODE (cond) == COND_EXPR)
4861 test = TREE_OPERAND (cond, 0);
4862 true_value = TREE_OPERAND (cond, 1);
4863 false_value = TREE_OPERAND (cond, 2);
4864 /* If this operand throws an expression, then it does not make
4865 sense to try to perform a logical or arithmetic operation
4866 involving it. Instead of building `a + throw 3' for example,
4867 we simply build `a, throw 3'. */
4868 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4872 lhs_code = COMPOUND_EXPR;
4873 lhs_type = void_type_node;
4878 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4882 rhs_code = COMPOUND_EXPR;
4883 rhs_type = void_type_node;
4891 tree testtype = TREE_TYPE (cond);
4893 true_value = fold_convert (testtype, integer_one_node);
4894 false_value = fold_convert (testtype, integer_zero_node);
4897 /* If ARG is complex we want to make sure we only evaluate it once. Though
4898 this is only required if it is volatile, it might be more efficient even
4899 if it is not. However, if we succeed in folding one part to a constant,
4900 we do not need to make this SAVE_EXPR. Since we do this optimization
4901 primarily to see if we do end up with constant and this SAVE_EXPR
4902 interferes with later optimizations, suppressing it when we can is
4905 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4906 do so. Don't try to see if the result is a constant if an arm is a
4907 COND_EXPR since we get exponential behavior in that case. */
4909 if (saved_expr_p (arg))
4911 else if (lhs == 0 && rhs == 0
4912 && !TREE_CONSTANT (arg)
4913 && lang_hooks.decls.global_bindings_p () == 0
4914 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4915 || TREE_SIDE_EFFECTS (arg)))
4917 if (TREE_CODE (true_value) != COND_EXPR)
4918 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4920 if (TREE_CODE (false_value) != COND_EXPR)
4921 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4923 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4924 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4926 arg = save_expr (arg);
4928 save = saved_expr_p (arg);
4933 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4935 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4937 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4939 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4940 ahead of the COND_EXPR we made. Otherwise we would have it only
4941 evaluated in one branch, with the other branch using the result
4942 but missing the evaluation code. Beware that the save_expr call
4943 above might not return a SAVE_EXPR, so testing the TREE_CODE
4944 of ARG is not enough to decide here. Â */
4946 return build (COMPOUND_EXPR, type,
4947 fold_convert (void_type_node, arg),
4948 strip_compound_expr (test, arg));
4950 return fold_convert (type, test);
4954 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4956 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4957 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4958 ADDEND is the same as X.
4960 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4961 and finite. The problematic cases are when X is zero, and its mode
4962 has signed zeros. In the case of rounding towards -infinity,
4963 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4964 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4967 fold_real_zero_addition_p (tree type, tree addend, int negate)
4969 if (!real_zerop (addend))
4972 /* Don't allow the fold with -fsignaling-nans. */
4973 if (HONOR_SNANS (TYPE_MODE (type)))
4976 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4977 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4980 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4981 if (TREE_CODE (addend) == REAL_CST
4982 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4985 /* The mode has signed zeros, and we have to honor their sign.
4986 In this situation, there is only one case we can return true for.
4987 X - 0 is the same as X unless rounding towards -infinity is
4989 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4992 /* Subroutine of fold() that checks comparisons of built-in math
4993 functions against real constants.
4995 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4996 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4997 is the type of the result and ARG0 and ARG1 are the operands of the
4998 comparison. ARG1 must be a TREE_REAL_CST.
5000 The function returns the constant folded tree if a simplification
5001 can be made, and NULL_TREE otherwise. */
5004 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5005 tree type, tree arg0, tree arg1)
5009 if (BUILTIN_SQRT_P (fcode))
5011 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5012 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5014 c = TREE_REAL_CST (arg1);
5015 if (REAL_VALUE_NEGATIVE (c))
5017 /* sqrt(x) < y is always false, if y is negative. */
5018 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5019 return omit_one_operand (type,
5020 fold_convert (type, integer_zero_node),
5023 /* sqrt(x) > y is always true, if y is negative and we
5024 don't care about NaNs, i.e. negative values of x. */
5025 if (code == NE_EXPR || !HONOR_NANS (mode))
5026 return omit_one_operand (type,
5027 fold_convert (type, integer_one_node),
5030 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5031 return fold (build (GE_EXPR, type, arg,
5032 build_real (TREE_TYPE (arg), dconst0)));
5034 else if (code == GT_EXPR || code == GE_EXPR)
5038 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5039 real_convert (&c2, mode, &c2);
5041 if (REAL_VALUE_ISINF (c2))
5043 /* sqrt(x) > y is x == +Inf, when y is very large. */
5044 if (HONOR_INFINITIES (mode))
5045 return fold (build (EQ_EXPR, type, arg,
5046 build_real (TREE_TYPE (arg), c2)));
5048 /* sqrt(x) > y is always false, when y is very large
5049 and we don't care about infinities. */
5050 return omit_one_operand (type,
5051 fold_convert (type, integer_zero_node),
5055 /* sqrt(x) > c is the same as x > c*c. */
5056 return fold (build (code, type, arg,
5057 build_real (TREE_TYPE (arg), c2)));
5059 else if (code == LT_EXPR || code == LE_EXPR)
5063 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5064 real_convert (&c2, mode, &c2);
5066 if (REAL_VALUE_ISINF (c2))
5068 /* sqrt(x) < y is always true, when y is a very large
5069 value and we don't care about NaNs or Infinities. */
5070 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5071 return omit_one_operand (type,
5072 fold_convert (type, integer_one_node),
5075 /* sqrt(x) < y is x != +Inf when y is very large and we
5076 don't care about NaNs. */
5077 if (! HONOR_NANS (mode))
5078 return fold (build (NE_EXPR, type, arg,
5079 build_real (TREE_TYPE (arg), c2)));
5081 /* sqrt(x) < y is x >= 0 when y is very large and we
5082 don't care about Infinities. */
5083 if (! HONOR_INFINITIES (mode))
5084 return fold (build (GE_EXPR, type, arg,
5085 build_real (TREE_TYPE (arg), dconst0)));
5087 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5088 if (lang_hooks.decls.global_bindings_p () != 0
5089 || CONTAINS_PLACEHOLDER_P (arg))
5092 arg = save_expr (arg);
5093 return fold (build (TRUTH_ANDIF_EXPR, type,
5094 fold (build (GE_EXPR, type, arg,
5095 build_real (TREE_TYPE (arg),
5097 fold (build (NE_EXPR, type, arg,
5098 build_real (TREE_TYPE (arg),
5102 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5103 if (! HONOR_NANS (mode))
5104 return fold (build (code, type, arg,
5105 build_real (TREE_TYPE (arg), c2)));
5107 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5108 if (lang_hooks.decls.global_bindings_p () == 0
5109 && ! CONTAINS_PLACEHOLDER_P (arg))
5111 arg = save_expr (arg);
5112 return fold (build (TRUTH_ANDIF_EXPR, type,
5113 fold (build (GE_EXPR, type, arg,
5114 build_real (TREE_TYPE (arg),
5116 fold (build (code, type, arg,
5117 build_real (TREE_TYPE (arg),
5126 /* Subroutine of fold() that optimizes comparisons against Infinities,
5127 either +Inf or -Inf.
5129 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5130 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5131 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5133 The function returns the constant folded tree if a simplification
5134 can be made, and NULL_TREE otherwise. */
5137 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5139 enum machine_mode mode;
5140 REAL_VALUE_TYPE max;
5144 mode = TYPE_MODE (TREE_TYPE (arg0));
5146 /* For negative infinity swap the sense of the comparison. */
5147 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5149 code = swap_tree_comparison (code);
5154 /* x > +Inf is always false, if with ignore sNANs. */
5155 if (HONOR_SNANS (mode))
5157 return omit_one_operand (type,
5158 fold_convert (type, integer_zero_node),
5162 /* x <= +Inf is always true, if we don't case about NaNs. */
5163 if (! HONOR_NANS (mode))
5164 return omit_one_operand (type,
5165 fold_convert (type, integer_one_node),
5168 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5169 if (lang_hooks.decls.global_bindings_p () == 0
5170 && ! CONTAINS_PLACEHOLDER_P (arg0))
5172 arg0 = save_expr (arg0);
5173 return fold (build (EQ_EXPR, type, arg0, arg0));
5179 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5180 real_maxval (&max, neg, mode);
5181 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5182 arg0, build_real (TREE_TYPE (arg0), max)));
5185 /* x < +Inf is always equal to x <= DBL_MAX. */
5186 real_maxval (&max, neg, mode);
5187 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5188 arg0, build_real (TREE_TYPE (arg0), max)));
5191 /* x != +Inf is always equal to !(x > DBL_MAX). */
5192 real_maxval (&max, neg, mode);
5193 if (! HONOR_NANS (mode))
5194 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5195 arg0, build_real (TREE_TYPE (arg0), max)));
5196 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5197 arg0, build_real (TREE_TYPE (arg0), max)));
5198 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5207 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5208 equality/inequality test, then return a simplified form of
5209 the test using shifts and logical operations. Otherwise return
5210 NULL. TYPE is the desired result type. */
5213 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5216 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5218 if (code == TRUTH_NOT_EXPR)
5220 code = TREE_CODE (arg0);
5221 if (code != NE_EXPR && code != EQ_EXPR)
5224 /* Extract the arguments of the EQ/NE. */
5225 arg1 = TREE_OPERAND (arg0, 1);
5226 arg0 = TREE_OPERAND (arg0, 0);
5228 /* This requires us to invert the code. */
5229 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5232 /* If this is testing a single bit, we can optimize the test. */
5233 if ((code == NE_EXPR || code == EQ_EXPR)
5234 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5235 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5237 tree inner = TREE_OPERAND (arg0, 0);
5238 tree type = TREE_TYPE (arg0);
5239 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5240 enum machine_mode operand_mode = TYPE_MODE (type);
5242 tree signed_type, unsigned_type, intermediate_type;
5245 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5246 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5247 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5248 if (arg00 != NULL_TREE)
5250 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5251 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5252 fold_convert (stype, arg00),
5253 fold_convert (stype, integer_zero_node)));
5256 /* At this point, we know that arg0 is not testing the sign bit. */
5257 if (TYPE_PRECISION (type) - 1 == bitnum)
5260 /* Otherwise we have (A & C) != 0 where C is a single bit,
5261 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5262 Similarly for (A & C) == 0. */
5264 /* If INNER is a right shift of a constant and it plus BITNUM does
5265 not overflow, adjust BITNUM and INNER. */
5266 if (TREE_CODE (inner) == RSHIFT_EXPR
5267 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5268 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5269 && bitnum < TYPE_PRECISION (type)
5270 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5271 bitnum - TYPE_PRECISION (type)))
5273 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5274 inner = TREE_OPERAND (inner, 0);
5277 /* If we are going to be able to omit the AND below, we must do our
5278 operations as unsigned. If we must use the AND, we have a choice.
5279 Normally unsigned is faster, but for some machines signed is. */
5280 #ifdef LOAD_EXTEND_OP
5281 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5286 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5287 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5288 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5289 inner = fold_convert (intermediate_type, inner);
5292 inner = build (RSHIFT_EXPR, intermediate_type,
5293 inner, size_int (bitnum));
5295 if (code == EQ_EXPR)
5296 inner = build (BIT_XOR_EXPR, intermediate_type,
5297 inner, integer_one_node);
5299 /* Put the AND last so it can combine with more things. */
5300 inner = build (BIT_AND_EXPR, intermediate_type,
5301 inner, integer_one_node);
5303 /* Make sure to return the proper type. */
5304 inner = fold_convert (result_type, inner);
5311 /* Check whether we are allowed to reorder operands arg0 and arg1,
5312 such that the evaluation of arg1 occurs before arg0. */
5315 reorder_operands_p (tree arg0, tree arg1)
5317 if (! flag_evaluation_order)
5319 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5321 return ! TREE_SIDE_EFFECTS (arg0)
5322 && ! TREE_SIDE_EFFECTS (arg1);
5325 /* Test whether it is preferable two swap two operands, ARG0 and
5326 ARG1, for example because ARG0 is an integer constant and ARG1
5327 isn't. If REORDER is true, only recommend swapping if we can
5328 evaluate the operands in reverse order. */
5331 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5333 STRIP_SIGN_NOPS (arg0);
5334 STRIP_SIGN_NOPS (arg1);
5336 if (TREE_CODE (arg1) == INTEGER_CST)
5338 if (TREE_CODE (arg0) == INTEGER_CST)
5341 if (TREE_CODE (arg1) == REAL_CST)
5343 if (TREE_CODE (arg0) == REAL_CST)
5346 if (TREE_CODE (arg1) == COMPLEX_CST)
5348 if (TREE_CODE (arg0) == COMPLEX_CST)
5351 if (TREE_CONSTANT (arg1))
5353 if (TREE_CONSTANT (arg0))
5359 if (reorder && flag_evaluation_order
5360 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5371 /* Perform constant folding and related simplification of EXPR.
5372 The related simplifications include x*1 => x, x*0 => 0, etc.,
5373 and application of the associative law.
5374 NOP_EXPR conversions may be removed freely (as long as we
5375 are careful not to change the C type of the overall expression)
5376 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5377 but we can constant-fold them if they have constant operands. */
5379 #ifdef ENABLE_FOLD_CHECKING
5380 # define fold(x) fold_1 (x)
5381 static tree fold_1 (tree);
5387 tree t = expr, orig_t;
5388 tree t1 = NULL_TREE;
5390 tree type = TREE_TYPE (expr);
5391 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5392 enum tree_code code = TREE_CODE (t);
5393 int kind = TREE_CODE_CLASS (code);
5395 /* WINS will be nonzero when the switch is done
5396 if all operands are constant. */
5399 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5400 Likewise for a SAVE_EXPR that's already been evaluated. */
5401 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5404 /* Return right away if a constant. */
5410 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5414 /* Special case for conversion ops that can have fixed point args. */
5415 arg0 = TREE_OPERAND (t, 0);
5417 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5419 STRIP_SIGN_NOPS (arg0);
5421 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5422 subop = TREE_REALPART (arg0);
5426 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5427 && TREE_CODE (subop) != REAL_CST)
5428 /* Note that TREE_CONSTANT isn't enough:
5429 static var addresses are constant but we can't
5430 do arithmetic on them. */
5433 else if (IS_EXPR_CODE_CLASS (kind))
5435 int len = first_rtl_op (code);
5437 for (i = 0; i < len; i++)
5439 tree op = TREE_OPERAND (t, i);
5443 continue; /* Valid for CALL_EXPR, at least. */
5445 /* Strip any conversions that don't change the mode. This is
5446 safe for every expression, except for a comparison expression
5447 because its signedness is derived from its operands. So, in
5448 the latter case, only strip conversions that don't change the
5451 Note that this is done as an internal manipulation within the
5452 constant folder, in order to find the simplest representation
5453 of the arguments so that their form can be studied. In any
5454 cases, the appropriate type conversions should be put back in
5455 the tree that will get out of the constant folder. */
5457 STRIP_SIGN_NOPS (op);
5461 if (TREE_CODE (op) == COMPLEX_CST)
5462 subop = TREE_REALPART (op);
5466 if (TREE_CODE (subop) != INTEGER_CST
5467 && TREE_CODE (subop) != REAL_CST)
5468 /* Note that TREE_CONSTANT isn't enough:
5469 static var addresses are constant but we can't
5470 do arithmetic on them. */
5480 /* If this is a commutative operation, and ARG0 is a constant, move it
5481 to ARG1 to reduce the number of tests below. */
5482 if (commutative_tree_code (code)
5483 && tree_swap_operands_p (arg0, arg1, true))
5484 return fold (build (code, type, TREE_OPERAND (t, 1),
5485 TREE_OPERAND (t, 0)));
5487 /* Now WINS is set as described above,
5488 ARG0 is the first operand of EXPR,
5489 and ARG1 is the second operand (if it has more than one operand).
5491 First check for cases where an arithmetic operation is applied to a
5492 compound, conditional, or comparison operation. Push the arithmetic
5493 operation inside the compound or conditional to see if any folding
5494 can then be done. Convert comparison to conditional for this purpose.
5495 The also optimizes non-constant cases that used to be done in
5498 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5499 one of the operands is a comparison and the other is a comparison, a
5500 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5501 code below would make the expression more complex. Change it to a
5502 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5503 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5505 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5506 || code == EQ_EXPR || code == NE_EXPR)
5507 && ((truth_value_p (TREE_CODE (arg0))
5508 && (truth_value_p (TREE_CODE (arg1))
5509 || (TREE_CODE (arg1) == BIT_AND_EXPR
5510 && integer_onep (TREE_OPERAND (arg1, 1)))))
5511 || (truth_value_p (TREE_CODE (arg1))
5512 && (truth_value_p (TREE_CODE (arg0))
5513 || (TREE_CODE (arg0) == BIT_AND_EXPR
5514 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5516 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5517 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5521 if (code == EQ_EXPR)
5522 t = invert_truthvalue (t);
5527 if (TREE_CODE_CLASS (code) == '1')
5529 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5530 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5531 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5532 else if (TREE_CODE (arg0) == COND_EXPR)
5534 tree arg01 = TREE_OPERAND (arg0, 1);
5535 tree arg02 = TREE_OPERAND (arg0, 2);
5536 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5537 arg01 = fold (build1 (code, type, arg01));
5538 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5539 arg02 = fold (build1 (code, type, arg02));
5540 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5543 /* If this was a conversion, and all we did was to move into
5544 inside the COND_EXPR, bring it back out. But leave it if
5545 it is a conversion from integer to integer and the
5546 result precision is no wider than a word since such a
5547 conversion is cheap and may be optimized away by combine,
5548 while it couldn't if it were outside the COND_EXPR. Then return
5549 so we don't get into an infinite recursion loop taking the
5550 conversion out and then back in. */
5552 if ((code == NOP_EXPR || code == CONVERT_EXPR
5553 || code == NON_LVALUE_EXPR)
5554 && TREE_CODE (t) == COND_EXPR
5555 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5556 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5557 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5558 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5559 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5560 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5561 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5563 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5564 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5565 t = build1 (code, type,
5567 TREE_TYPE (TREE_OPERAND
5568 (TREE_OPERAND (t, 1), 0)),
5569 TREE_OPERAND (t, 0),
5570 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5571 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5574 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5575 return fold (build (COND_EXPR, type, arg0,
5576 fold (build1 (code, type, integer_one_node)),
5577 fold (build1 (code, type, integer_zero_node))));
5579 else if (TREE_CODE_CLASS (code) == '<'
5580 && TREE_CODE (arg0) == COMPOUND_EXPR)
5581 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5582 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5583 else if (TREE_CODE_CLASS (code) == '<'
5584 && TREE_CODE (arg1) == COMPOUND_EXPR)
5585 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5586 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5587 else if (TREE_CODE_CLASS (code) == '2'
5588 || TREE_CODE_CLASS (code) == '<')
5590 if (TREE_CODE (arg1) == COMPOUND_EXPR
5591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5592 && ! TREE_SIDE_EFFECTS (arg0))
5593 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5594 fold (build (code, type,
5595 arg0, TREE_OPERAND (arg1, 1))));
5596 else if ((TREE_CODE (arg1) == COND_EXPR
5597 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5598 && TREE_CODE_CLASS (code) != '<'))
5599 && (TREE_CODE (arg0) != COND_EXPR
5600 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5601 && (! TREE_SIDE_EFFECTS (arg0)
5602 || (lang_hooks.decls.global_bindings_p () == 0
5603 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5605 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5606 /*cond_first_p=*/0);
5607 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5608 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5609 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5610 else if ((TREE_CODE (arg0) == COND_EXPR
5611 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5612 && TREE_CODE_CLASS (code) != '<'))
5613 && (TREE_CODE (arg1) != COND_EXPR
5614 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5615 && (! TREE_SIDE_EFFECTS (arg1)
5616 || (lang_hooks.decls.global_bindings_p () == 0
5617 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5619 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5620 /*cond_first_p=*/1);
5634 return fold (DECL_INITIAL (t));
5639 case FIX_TRUNC_EXPR:
5641 case FIX_FLOOR_EXPR:
5642 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5643 return TREE_OPERAND (t, 0);
5645 /* Handle cases of two conversions in a row. */
5646 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5647 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5649 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5650 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5651 tree final_type = TREE_TYPE (t);
5652 int inside_int = INTEGRAL_TYPE_P (inside_type);
5653 int inside_ptr = POINTER_TYPE_P (inside_type);
5654 int inside_float = FLOAT_TYPE_P (inside_type);
5655 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5656 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5657 int inter_int = INTEGRAL_TYPE_P (inter_type);
5658 int inter_ptr = POINTER_TYPE_P (inter_type);
5659 int inter_float = FLOAT_TYPE_P (inter_type);
5660 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5661 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5662 int final_int = INTEGRAL_TYPE_P (final_type);
5663 int final_ptr = POINTER_TYPE_P (final_type);
5664 int final_float = FLOAT_TYPE_P (final_type);
5665 unsigned int final_prec = TYPE_PRECISION (final_type);
5666 int final_unsignedp = TREE_UNSIGNED (final_type);
5668 /* In addition to the cases of two conversions in a row
5669 handled below, if we are converting something to its own
5670 type via an object of identical or wider precision, neither
5671 conversion is needed. */
5672 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5673 && ((inter_int && final_int) || (inter_float && final_float))
5674 && inter_prec >= final_prec)
5675 return fold (build1 (code, final_type,
5676 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5678 /* Likewise, if the intermediate and final types are either both
5679 float or both integer, we don't need the middle conversion if
5680 it is wider than the final type and doesn't change the signedness
5681 (for integers). Avoid this if the final type is a pointer
5682 since then we sometimes need the inner conversion. Likewise if
5683 the outer has a precision not equal to the size of its mode. */
5684 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5685 || (inter_float && inside_float))
5686 && inter_prec >= inside_prec
5687 && (inter_float || inter_unsignedp == inside_unsignedp)
5688 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5689 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5691 return fold (build1 (code, final_type,
5692 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5694 /* If we have a sign-extension of a zero-extended value, we can
5695 replace that by a single zero-extension. */
5696 if (inside_int && inter_int && final_int
5697 && inside_prec < inter_prec && inter_prec < final_prec
5698 && inside_unsignedp && !inter_unsignedp)
5699 return fold (build1 (code, final_type,
5700 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5702 /* Two conversions in a row are not needed unless:
5703 - some conversion is floating-point (overstrict for now), or
5704 - the intermediate type is narrower than both initial and
5706 - the intermediate type and innermost type differ in signedness,
5707 and the outermost type is wider than the intermediate, or
5708 - the initial type is a pointer type and the precisions of the
5709 intermediate and final types differ, or
5710 - the final type is a pointer type and the precisions of the
5711 initial and intermediate types differ. */
5712 if (! inside_float && ! inter_float && ! final_float
5713 && (inter_prec > inside_prec || inter_prec > final_prec)
5714 && ! (inside_int && inter_int
5715 && inter_unsignedp != inside_unsignedp
5716 && inter_prec < final_prec)
5717 && ((inter_unsignedp && inter_prec > inside_prec)
5718 == (final_unsignedp && final_prec > inter_prec))
5719 && ! (inside_ptr && inter_prec != final_prec)
5720 && ! (final_ptr && inside_prec != inter_prec)
5721 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5722 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5724 return fold (build1 (code, final_type,
5725 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5728 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5729 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5730 /* Detect assigning a bitfield. */
5731 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5732 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5734 /* Don't leave an assignment inside a conversion
5735 unless assigning a bitfield. */
5736 tree prev = TREE_OPERAND (t, 0);
5739 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5740 /* First do the assignment, then return converted constant. */
5741 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5742 TREE_NO_UNUSED_WARNING (t) = 1;
5747 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5748 constants (if x has signed type, the sign bit cannot be set
5749 in c). This folds extension into the BIT_AND_EXPR. */
5750 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5751 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5752 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5753 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5755 tree and = TREE_OPERAND (t, 0);
5756 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5759 if (TREE_UNSIGNED (TREE_TYPE (and))
5760 || (TYPE_PRECISION (TREE_TYPE (t))
5761 <= TYPE_PRECISION (TREE_TYPE (and))))
5763 else if (TYPE_PRECISION (TREE_TYPE (and1))
5764 <= HOST_BITS_PER_WIDE_INT
5765 && host_integerp (and1, 1))
5767 unsigned HOST_WIDE_INT cst;
5769 cst = tree_low_cst (and1, 1);
5770 cst &= (HOST_WIDE_INT) -1
5771 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5772 change = (cst == 0);
5773 #ifdef LOAD_EXTEND_OP
5775 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5778 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5779 and0 = fold_convert (uns, and0);
5780 and1 = fold_convert (uns, and1);
5785 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5786 fold_convert (TREE_TYPE (t), and0),
5787 fold_convert (TREE_TYPE (t), and1)));
5790 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5791 return tem ? tem : t;
5793 case VIEW_CONVERT_EXPR:
5794 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5795 return build1 (VIEW_CONVERT_EXPR, type,
5796 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5800 if (TREE_CODE (arg0) == CONSTRUCTOR
5801 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5803 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5810 if (TREE_CONSTANT (t) != wins)
5814 TREE_CONSTANT (t) = wins;
5819 if (negate_expr_p (arg0))
5820 return fold_convert (type, negate_expr (arg0));
5825 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5826 return fold_abs_const (arg0, type);
5827 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5828 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5829 /* Convert fabs((double)float) into (double)fabsf(float). */
5830 else if (TREE_CODE (arg0) == NOP_EXPR
5831 && TREE_CODE (type) == REAL_TYPE)
5833 tree targ0 = strip_float_extensions (arg0);
5835 return fold_convert (type, fold (build1 (ABS_EXPR,
5839 else if (tree_expr_nonnegative_p (arg0))
5844 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5845 return fold_convert (type, arg0);
5846 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5847 return build (COMPLEX_EXPR, type,
5848 TREE_OPERAND (arg0, 0),
5849 negate_expr (TREE_OPERAND (arg0, 1)));
5850 else if (TREE_CODE (arg0) == COMPLEX_CST)
5851 return build_complex (type, TREE_REALPART (arg0),
5852 negate_expr (TREE_IMAGPART (arg0)));
5853 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5854 return fold (build (TREE_CODE (arg0), type,
5855 fold (build1 (CONJ_EXPR, type,
5856 TREE_OPERAND (arg0, 0))),
5857 fold (build1 (CONJ_EXPR,
5858 type, TREE_OPERAND (arg0, 1)))));
5859 else if (TREE_CODE (arg0) == CONJ_EXPR)
5860 return TREE_OPERAND (arg0, 0);
5866 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5867 ~ TREE_INT_CST_HIGH (arg0));
5868 TREE_TYPE (t) = type;
5869 force_fit_type (t, 0);
5870 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5871 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5873 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5874 return TREE_OPERAND (arg0, 0);
5878 /* A + (-B) -> A - B */
5879 if (TREE_CODE (arg1) == NEGATE_EXPR)
5880 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5881 /* (-A) + B -> B - A */
5882 if (TREE_CODE (arg0) == NEGATE_EXPR)
5883 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5884 else if (! FLOAT_TYPE_P (type))
5886 if (integer_zerop (arg1))
5887 return non_lvalue (fold_convert (type, arg0));
5889 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5890 with a constant, and the two constants have no bits in common,
5891 we should treat this as a BIT_IOR_EXPR since this may produce more
5893 if (TREE_CODE (arg0) == BIT_AND_EXPR
5894 && TREE_CODE (arg1) == BIT_AND_EXPR
5895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5896 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5897 && integer_zerop (const_binop (BIT_AND_EXPR,
5898 TREE_OPERAND (arg0, 1),
5899 TREE_OPERAND (arg1, 1), 0)))
5901 code = BIT_IOR_EXPR;
5905 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5906 (plus (plus (mult) (mult)) (foo)) so that we can
5907 take advantage of the factoring cases below. */
5908 if ((TREE_CODE (arg0) == PLUS_EXPR
5909 && TREE_CODE (arg1) == MULT_EXPR)
5910 || (TREE_CODE (arg1) == PLUS_EXPR
5911 && TREE_CODE (arg0) == MULT_EXPR))
5913 tree parg0, parg1, parg, marg;
5915 if (TREE_CODE (arg0) == PLUS_EXPR)
5916 parg = arg0, marg = arg1;
5918 parg = arg1, marg = arg0;
5919 parg0 = TREE_OPERAND (parg, 0);
5920 parg1 = TREE_OPERAND (parg, 1);
5924 if (TREE_CODE (parg0) == MULT_EXPR
5925 && TREE_CODE (parg1) != MULT_EXPR)
5926 return fold (build (PLUS_EXPR, type,
5927 fold (build (PLUS_EXPR, type,
5928 fold_convert (type, parg0),
5929 fold_convert (type, marg))),
5930 fold_convert (type, parg1)));
5931 if (TREE_CODE (parg0) != MULT_EXPR
5932 && TREE_CODE (parg1) == MULT_EXPR)
5933 return fold (build (PLUS_EXPR, type,
5934 fold (build (PLUS_EXPR, type,
5935 fold_convert (type, parg1),
5936 fold_convert (type, marg))),
5937 fold_convert (type, parg0)));
5940 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5942 tree arg00, arg01, arg10, arg11;
5943 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5945 /* (A * C) + (B * C) -> (A+B) * C.
5946 We are most concerned about the case where C is a constant,
5947 but other combinations show up during loop reduction. Since
5948 it is not difficult, try all four possibilities. */
5950 arg00 = TREE_OPERAND (arg0, 0);
5951 arg01 = TREE_OPERAND (arg0, 1);
5952 arg10 = TREE_OPERAND (arg1, 0);
5953 arg11 = TREE_OPERAND (arg1, 1);
5956 if (operand_equal_p (arg01, arg11, 0))
5957 same = arg01, alt0 = arg00, alt1 = arg10;
5958 else if (operand_equal_p (arg00, arg10, 0))
5959 same = arg00, alt0 = arg01, alt1 = arg11;
5960 else if (operand_equal_p (arg00, arg11, 0))
5961 same = arg00, alt0 = arg01, alt1 = arg10;
5962 else if (operand_equal_p (arg01, arg10, 0))
5963 same = arg01, alt0 = arg00, alt1 = arg11;
5965 /* No identical multiplicands; see if we can find a common
5966 power-of-two factor in non-power-of-two multiplies. This
5967 can help in multi-dimensional array access. */
5968 else if (TREE_CODE (arg01) == INTEGER_CST
5969 && TREE_CODE (arg11) == INTEGER_CST
5970 && TREE_INT_CST_HIGH (arg01) == 0
5971 && TREE_INT_CST_HIGH (arg11) == 0)
5973 HOST_WIDE_INT int01, int11, tmp;
5974 int01 = TREE_INT_CST_LOW (arg01);
5975 int11 = TREE_INT_CST_LOW (arg11);
5977 /* Move min of absolute values to int11. */
5978 if ((int01 >= 0 ? int01 : -int01)
5979 < (int11 >= 0 ? int11 : -int11))
5981 tmp = int01, int01 = int11, int11 = tmp;
5982 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5983 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5986 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5988 alt0 = fold (build (MULT_EXPR, type, arg00,
5989 build_int_2 (int01 / int11, 0)));
5996 return fold (build (MULT_EXPR, type,
5997 fold (build (PLUS_EXPR, type, alt0, alt1)),
6003 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6004 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6005 return non_lvalue (fold_convert (type, arg0));
6007 /* Likewise if the operands are reversed. */
6008 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6009 return non_lvalue (fold_convert (type, arg1));
6011 /* Convert x+x into x*2.0. */
6012 if (operand_equal_p (arg0, arg1, 0)
6013 && SCALAR_FLOAT_TYPE_P (type))
6014 return fold (build (MULT_EXPR, type, arg0,
6015 build_real (type, dconst2)));
6017 /* Convert x*c+x into x*(c+1). */
6018 if (flag_unsafe_math_optimizations
6019 && TREE_CODE (arg0) == MULT_EXPR
6020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6021 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6026 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6027 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6028 return fold (build (MULT_EXPR, type, arg1,
6029 build_real (type, c)));
6032 /* Convert x+x*c into x*(c+1). */
6033 if (flag_unsafe_math_optimizations
6034 && TREE_CODE (arg1) == MULT_EXPR
6035 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6036 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6037 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6041 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6042 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6043 return fold (build (MULT_EXPR, type, arg0,
6044 build_real (type, c)));
6047 /* Convert x*c1+x*c2 into x*(c1+c2). */
6048 if (flag_unsafe_math_optimizations
6049 && TREE_CODE (arg0) == MULT_EXPR
6050 && TREE_CODE (arg1) == MULT_EXPR
6051 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6052 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6053 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6054 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6055 && operand_equal_p (TREE_OPERAND (arg0, 0),
6056 TREE_OPERAND (arg1, 0), 0))
6058 REAL_VALUE_TYPE c1, c2;
6060 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6061 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6062 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6063 return fold (build (MULT_EXPR, type,
6064 TREE_OPERAND (arg0, 0),
6065 build_real (type, c1)));
6070 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6071 is a rotate of A by C1 bits. */
6072 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6073 is a rotate of A by B bits. */
6075 enum tree_code code0, code1;
6076 code0 = TREE_CODE (arg0);
6077 code1 = TREE_CODE (arg1);
6078 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6079 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6080 && operand_equal_p (TREE_OPERAND (arg0, 0),
6081 TREE_OPERAND (arg1, 0), 0)
6082 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6084 tree tree01, tree11;
6085 enum tree_code code01, code11;
6087 tree01 = TREE_OPERAND (arg0, 1);
6088 tree11 = TREE_OPERAND (arg1, 1);
6089 STRIP_NOPS (tree01);
6090 STRIP_NOPS (tree11);
6091 code01 = TREE_CODE (tree01);
6092 code11 = TREE_CODE (tree11);
6093 if (code01 == INTEGER_CST
6094 && code11 == INTEGER_CST
6095 && TREE_INT_CST_HIGH (tree01) == 0
6096 && TREE_INT_CST_HIGH (tree11) == 0
6097 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6098 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6099 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6100 code0 == LSHIFT_EXPR ? tree01 : tree11);
6101 else if (code11 == MINUS_EXPR)
6103 tree tree110, tree111;
6104 tree110 = TREE_OPERAND (tree11, 0);
6105 tree111 = TREE_OPERAND (tree11, 1);
6106 STRIP_NOPS (tree110);
6107 STRIP_NOPS (tree111);
6108 if (TREE_CODE (tree110) == INTEGER_CST
6109 && 0 == compare_tree_int (tree110,
6111 (TREE_TYPE (TREE_OPERAND
6113 && operand_equal_p (tree01, tree111, 0))
6114 return build ((code0 == LSHIFT_EXPR
6117 type, TREE_OPERAND (arg0, 0), tree01);
6119 else if (code01 == MINUS_EXPR)
6121 tree tree010, tree011;
6122 tree010 = TREE_OPERAND (tree01, 0);
6123 tree011 = TREE_OPERAND (tree01, 1);
6124 STRIP_NOPS (tree010);
6125 STRIP_NOPS (tree011);
6126 if (TREE_CODE (tree010) == INTEGER_CST
6127 && 0 == compare_tree_int (tree010,
6129 (TREE_TYPE (TREE_OPERAND
6131 && operand_equal_p (tree11, tree011, 0))
6132 return build ((code0 != LSHIFT_EXPR
6135 type, TREE_OPERAND (arg0, 0), tree11);
6141 /* In most languages, can't associate operations on floats through
6142 parentheses. Rather than remember where the parentheses were, we
6143 don't associate floats at all, unless the user has specified
6144 -funsafe-math-optimizations. */
6147 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6149 tree var0, con0, lit0, minus_lit0;
6150 tree var1, con1, lit1, minus_lit1;
6152 /* Split both trees into variables, constants, and literals. Then
6153 associate each group together, the constants with literals,
6154 then the result with variables. This increases the chances of
6155 literals being recombined later and of generating relocatable
6156 expressions for the sum of a constant and literal. */
6157 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6158 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6159 code == MINUS_EXPR);
6161 /* Only do something if we found more than two objects. Otherwise,
6162 nothing has changed and we risk infinite recursion. */
6163 if (2 < ((var0 != 0) + (var1 != 0)
6164 + (con0 != 0) + (con1 != 0)
6165 + (lit0 != 0) + (lit1 != 0)
6166 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6168 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6169 if (code == MINUS_EXPR)
6172 var0 = associate_trees (var0, var1, code, type);
6173 con0 = associate_trees (con0, con1, code, type);
6174 lit0 = associate_trees (lit0, lit1, code, type);
6175 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6177 /* Preserve the MINUS_EXPR if the negative part of the literal is
6178 greater than the positive part. Otherwise, the multiplicative
6179 folding code (i.e extract_muldiv) may be fooled in case
6180 unsigned constants are subtracted, like in the following
6181 example: ((X*2 + 4) - 8U)/2. */
6182 if (minus_lit0 && lit0)
6184 if (TREE_CODE (lit0) == INTEGER_CST
6185 && TREE_CODE (minus_lit0) == INTEGER_CST
6186 && tree_int_cst_lt (lit0, minus_lit0))
6188 minus_lit0 = associate_trees (minus_lit0, lit0,
6194 lit0 = associate_trees (lit0, minus_lit0,
6202 return fold_convert (type,
6203 associate_trees (var0, minus_lit0,
6207 con0 = associate_trees (con0, minus_lit0,
6209 return fold_convert (type,
6210 associate_trees (var0, con0,
6215 con0 = associate_trees (con0, lit0, code, type);
6216 return fold_convert (type, associate_trees (var0, con0,
6223 t1 = const_binop (code, arg0, arg1, 0);
6224 if (t1 != NULL_TREE)
6226 /* The return value should always have
6227 the same type as the original expression. */
6228 if (TREE_TYPE (t1) != TREE_TYPE (t))
6229 t1 = fold_convert (TREE_TYPE (t), t1);
6236 /* A - (-B) -> A + B */
6237 if (TREE_CODE (arg1) == NEGATE_EXPR)
6238 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6239 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6240 if (TREE_CODE (arg0) == NEGATE_EXPR
6241 && (FLOAT_TYPE_P (type)
6242 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6243 && negate_expr_p (arg1)
6244 && reorder_operands_p (arg0, arg1))
6245 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6246 TREE_OPERAND (arg0, 0)));
6248 if (! FLOAT_TYPE_P (type))
6250 if (! wins && integer_zerop (arg0))
6251 return negate_expr (fold_convert (type, arg1));
6252 if (integer_zerop (arg1))
6253 return non_lvalue (fold_convert (type, arg0));
6255 /* Fold A - (A & B) into ~B & A. */
6256 if (!TREE_SIDE_EFFECTS (arg0)
6257 && TREE_CODE (arg1) == BIT_AND_EXPR)
6259 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6260 return fold (build (BIT_AND_EXPR, type,
6261 fold (build1 (BIT_NOT_EXPR, type,
6262 TREE_OPERAND (arg1, 0))),
6264 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6265 return fold (build (BIT_AND_EXPR, type,
6266 fold (build1 (BIT_NOT_EXPR, type,
6267 TREE_OPERAND (arg1, 1))),
6271 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6272 any power of 2 minus 1. */
6273 if (TREE_CODE (arg0) == BIT_AND_EXPR
6274 && TREE_CODE (arg1) == BIT_AND_EXPR
6275 && operand_equal_p (TREE_OPERAND (arg0, 0),
6276 TREE_OPERAND (arg1, 0), 0))
6278 tree mask0 = TREE_OPERAND (arg0, 1);
6279 tree mask1 = TREE_OPERAND (arg1, 1);
6280 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6282 if (operand_equal_p (tem, mask1, 0))
6284 tem = fold (build (BIT_XOR_EXPR, type,
6285 TREE_OPERAND (arg0, 0), mask1));
6286 return fold (build (MINUS_EXPR, type, tem, mask1));
6291 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6292 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6293 return non_lvalue (fold_convert (type, arg0));
6295 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6296 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6297 (-ARG1 + ARG0) reduces to -ARG1. */
6298 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6299 return negate_expr (fold_convert (type, arg1));
6301 /* Fold &x - &x. This can happen from &x.foo - &x.
6302 This is unsafe for certain floats even in non-IEEE formats.
6303 In IEEE, it is unsafe because it does wrong for NaNs.
6304 Also note that operand_equal_p is always false if an operand
6307 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6308 && operand_equal_p (arg0, arg1, 0))
6309 return fold_convert (type, integer_zero_node);
6311 /* A - B -> A + (-B) if B is easily negatable. */
6312 if (!wins && negate_expr_p (arg1)
6313 && (FLOAT_TYPE_P (type)
6314 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6315 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6317 if (TREE_CODE (arg0) == MULT_EXPR
6318 && TREE_CODE (arg1) == MULT_EXPR
6319 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6321 /* (A * C) - (B * C) -> (A-B) * C. */
6322 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6323 TREE_OPERAND (arg1, 1), 0))
6324 return fold (build (MULT_EXPR, type,
6325 fold (build (MINUS_EXPR, type,
6326 TREE_OPERAND (arg0, 0),
6327 TREE_OPERAND (arg1, 0))),
6328 TREE_OPERAND (arg0, 1)));
6329 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6330 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6331 TREE_OPERAND (arg1, 0), 0))
6332 return fold (build (MULT_EXPR, type,
6333 TREE_OPERAND (arg0, 0),
6334 fold (build (MINUS_EXPR, type,
6335 TREE_OPERAND (arg0, 1),
6336 TREE_OPERAND (arg1, 1)))));
6342 /* (-A) * (-B) -> A * B */
6343 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6344 return fold (build (MULT_EXPR, type,
6345 TREE_OPERAND (arg0, 0),
6346 negate_expr (arg1)));
6347 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6348 return fold (build (MULT_EXPR, type,
6350 TREE_OPERAND (arg1, 0)));
6352 if (! FLOAT_TYPE_P (type))
6354 if (integer_zerop (arg1))
6355 return omit_one_operand (type, arg1, arg0);
6356 if (integer_onep (arg1))
6357 return non_lvalue (fold_convert (type, arg0));
6359 /* (a * (1 << b)) is (a << b) */
6360 if (TREE_CODE (arg1) == LSHIFT_EXPR
6361 && integer_onep (TREE_OPERAND (arg1, 0)))
6362 return fold (build (LSHIFT_EXPR, type, arg0,
6363 TREE_OPERAND (arg1, 1)));
6364 if (TREE_CODE (arg0) == LSHIFT_EXPR
6365 && integer_onep (TREE_OPERAND (arg0, 0)))
6366 return fold (build (LSHIFT_EXPR, type, arg1,
6367 TREE_OPERAND (arg0, 1)));
6369 if (TREE_CODE (arg1) == INTEGER_CST
6370 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6371 fold_convert (type, arg1),
6373 return fold_convert (type, tem);
6378 /* Maybe fold x * 0 to 0. The expressions aren't the same
6379 when x is NaN, since x * 0 is also NaN. Nor are they the
6380 same in modes with signed zeros, since multiplying a
6381 negative value by 0 gives -0, not +0. */
6382 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6383 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6384 && real_zerop (arg1))
6385 return omit_one_operand (type, arg1, arg0);
6386 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6387 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6388 && real_onep (arg1))
6389 return non_lvalue (fold_convert (type, arg0));
6391 /* Transform x * -1.0 into -x. */
6392 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6393 && real_minus_onep (arg1))
6394 return fold (build1 (NEGATE_EXPR, type, arg0));
6396 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6397 if (flag_unsafe_math_optimizations
6398 && TREE_CODE (arg0) == RDIV_EXPR
6399 && TREE_CODE (arg1) == REAL_CST
6400 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6402 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6405 return fold (build (RDIV_EXPR, type, tem,
6406 TREE_OPERAND (arg0, 1)));
6409 if (flag_unsafe_math_optimizations)
6411 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6412 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6414 /* Optimizations of sqrt(...)*sqrt(...). */
6415 if (fcode0 == fcode1 && BUILTIN_SQRT_P (fcode0))
6417 tree sqrtfn, arg, arglist;
6418 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6419 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6421 /* Optimize sqrt(x)*sqrt(x) as x. */
6422 if (operand_equal_p (arg00, arg10, 0)
6423 && ! HONOR_SNANS (TYPE_MODE (type)))
6426 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6427 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6428 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6429 arglist = build_tree_list (NULL_TREE, arg);
6430 return build_function_call_expr (sqrtfn, arglist);
6433 /* Optimize expN(x)*expN(y) as expN(x+y). */
6434 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6436 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6437 tree arg = build (PLUS_EXPR, type,
6438 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6439 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6440 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6441 return build_function_call_expr (expfn, arglist);
6444 /* Optimizations of pow(...)*pow(...). */
6445 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6446 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6447 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6449 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6450 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6452 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6453 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6456 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6457 if (operand_equal_p (arg01, arg11, 0))
6459 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6460 tree arg = build (MULT_EXPR, type, arg00, arg10);
6461 tree arglist = tree_cons (NULL_TREE, fold (arg),
6462 build_tree_list (NULL_TREE,
6464 return build_function_call_expr (powfn, arglist);
6467 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6468 if (operand_equal_p (arg00, arg10, 0))
6470 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6471 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6472 tree arglist = tree_cons (NULL_TREE, arg00,
6473 build_tree_list (NULL_TREE,
6475 return build_function_call_expr (powfn, arglist);
6479 /* Optimize tan(x)*cos(x) as sin(x). */
6480 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6481 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6482 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6483 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6484 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6485 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6486 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6487 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6495 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6499 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6503 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6509 if (sinfn != NULL_TREE)
6510 return build_function_call_expr (sinfn,
6511 TREE_OPERAND (arg0, 1));
6514 /* Optimize x*pow(x,c) as pow(x,c+1). */
6515 if (fcode1 == BUILT_IN_POW
6516 || fcode1 == BUILT_IN_POWF
6517 || fcode1 == BUILT_IN_POWL)
6519 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6520 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6522 if (TREE_CODE (arg11) == REAL_CST
6523 && ! TREE_CONSTANT_OVERFLOW (arg11)
6524 && operand_equal_p (arg0, arg10, 0))
6526 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6530 c = TREE_REAL_CST (arg11);
6531 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6532 arg = build_real (type, c);
6533 arglist = build_tree_list (NULL_TREE, arg);
6534 arglist = tree_cons (NULL_TREE, arg0, arglist);
6535 return build_function_call_expr (powfn, arglist);
6539 /* Optimize pow(x,c)*x as pow(x,c+1). */
6540 if (fcode0 == BUILT_IN_POW
6541 || fcode0 == BUILT_IN_POWF
6542 || fcode0 == BUILT_IN_POWL)
6544 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6545 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6547 if (TREE_CODE (arg01) == REAL_CST
6548 && ! TREE_CONSTANT_OVERFLOW (arg01)
6549 && operand_equal_p (arg1, arg00, 0))
6551 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6555 c = TREE_REAL_CST (arg01);
6556 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6557 arg = build_real (type, c);
6558 arglist = build_tree_list (NULL_TREE, arg);
6559 arglist = tree_cons (NULL_TREE, arg1, arglist);
6560 return build_function_call_expr (powfn, arglist);
6564 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6566 && operand_equal_p (arg0, arg1, 0))
6570 if (type == double_type_node)
6571 powfn = implicit_built_in_decls[BUILT_IN_POW];
6572 else if (type == float_type_node)
6573 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6574 else if (type == long_double_type_node)
6575 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6581 tree arg = build_real (type, dconst2);
6582 tree arglist = build_tree_list (NULL_TREE, arg);
6583 arglist = tree_cons (NULL_TREE, arg0, arglist);
6584 return build_function_call_expr (powfn, arglist);
6593 if (integer_all_onesp (arg1))
6594 return omit_one_operand (type, arg1, arg0);
6595 if (integer_zerop (arg1))
6596 return non_lvalue (fold_convert (type, arg0));
6597 if (operand_equal_p (arg0, arg1, 0))
6598 return non_lvalue (fold_convert (type, arg0));
6599 t1 = distribute_bit_expr (code, type, arg0, arg1);
6600 if (t1 != NULL_TREE)
6603 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6605 This results in more efficient code for machines without a NAND
6606 instruction. Combine will canonicalize to the first form
6607 which will allow use of NAND instructions provided by the
6608 backend if they exist. */
6609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6610 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6612 return fold (build1 (BIT_NOT_EXPR, type,
6613 build (BIT_AND_EXPR, type,
6614 TREE_OPERAND (arg0, 0),
6615 TREE_OPERAND (arg1, 0))));
6618 /* See if this can be simplified into a rotate first. If that
6619 is unsuccessful continue in the association code. */
6623 if (integer_zerop (arg1))
6624 return non_lvalue (fold_convert (type, arg0));
6625 if (integer_all_onesp (arg1))
6626 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6627 if (operand_equal_p (arg0, arg1, 0))
6628 return omit_one_operand (type, integer_zero_node, arg0);
6630 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6631 with a constant, and the two constants have no bits in common,
6632 we should treat this as a BIT_IOR_EXPR since this may produce more
6634 if (TREE_CODE (arg0) == BIT_AND_EXPR
6635 && TREE_CODE (arg1) == BIT_AND_EXPR
6636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6637 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6638 && integer_zerop (const_binop (BIT_AND_EXPR,
6639 TREE_OPERAND (arg0, 1),
6640 TREE_OPERAND (arg1, 1), 0)))
6642 code = BIT_IOR_EXPR;
6646 /* See if this can be simplified into a rotate first. If that
6647 is unsuccessful continue in the association code. */
6651 if (integer_all_onesp (arg1))
6652 return non_lvalue (fold_convert (type, arg0));
6653 if (integer_zerop (arg1))
6654 return omit_one_operand (type, arg1, arg0);
6655 if (operand_equal_p (arg0, arg1, 0))
6656 return non_lvalue (fold_convert (type, arg0));
6657 t1 = distribute_bit_expr (code, type, arg0, arg1);
6658 if (t1 != NULL_TREE)
6660 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6661 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6662 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6665 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6667 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6668 && (~TREE_INT_CST_LOW (arg1)
6669 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6670 return fold_convert (type, TREE_OPERAND (arg0, 0));
6673 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6675 This results in more efficient code for machines without a NOR
6676 instruction. Combine will canonicalize to the first form
6677 which will allow use of NOR instructions provided by the
6678 backend if they exist. */
6679 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6680 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6682 return fold (build1 (BIT_NOT_EXPR, type,
6683 build (BIT_IOR_EXPR, type,
6684 TREE_OPERAND (arg0, 0),
6685 TREE_OPERAND (arg1, 0))));
6691 /* Don't touch a floating-point divide by zero unless the mode
6692 of the constant can represent infinity. */
6693 if (TREE_CODE (arg1) == REAL_CST
6694 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6695 && real_zerop (arg1))
6698 /* (-A) / (-B) -> A / B */
6699 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6700 return fold (build (RDIV_EXPR, type,
6701 TREE_OPERAND (arg0, 0),
6702 negate_expr (arg1)));
6703 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6704 return fold (build (RDIV_EXPR, type,
6706 TREE_OPERAND (arg1, 0)));
6708 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6709 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6710 && real_onep (arg1))
6711 return non_lvalue (fold_convert (type, arg0));
6713 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6714 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6715 && real_minus_onep (arg1))
6716 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6718 /* If ARG1 is a constant, we can convert this to a multiply by the
6719 reciprocal. This does not have the same rounding properties,
6720 so only do this if -funsafe-math-optimizations. We can actually
6721 always safely do it if ARG1 is a power of two, but it's hard to
6722 tell if it is or not in a portable manner. */
6723 if (TREE_CODE (arg1) == REAL_CST)
6725 if (flag_unsafe_math_optimizations
6726 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6728 return fold (build (MULT_EXPR, type, arg0, tem));
6729 /* Find the reciprocal if optimizing and the result is exact. */
6733 r = TREE_REAL_CST (arg1);
6734 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6736 tem = build_real (type, r);
6737 return fold (build (MULT_EXPR, type, arg0, tem));
6741 /* Convert A/B/C to A/(B*C). */
6742 if (flag_unsafe_math_optimizations
6743 && TREE_CODE (arg0) == RDIV_EXPR)
6744 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6745 fold (build (MULT_EXPR, type,
6746 TREE_OPERAND (arg0, 1), arg1))));
6748 /* Convert A/(B/C) to (A/B)*C. */
6749 if (flag_unsafe_math_optimizations
6750 && TREE_CODE (arg1) == RDIV_EXPR)
6751 return fold (build (MULT_EXPR, type,
6752 fold (build (RDIV_EXPR, type, arg0,
6753 TREE_OPERAND (arg1, 0))),
6754 TREE_OPERAND (arg1, 1)));
6756 /* Convert C1/(X*C2) into (C1/C2)/X. */
6757 if (flag_unsafe_math_optimizations
6758 && TREE_CODE (arg1) == MULT_EXPR
6759 && TREE_CODE (arg0) == REAL_CST
6760 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6762 tree tem = const_binop (RDIV_EXPR, arg0,
6763 TREE_OPERAND (arg1, 1), 0);
6765 return fold (build (RDIV_EXPR, type, tem,
6766 TREE_OPERAND (arg1, 0)));
6769 if (flag_unsafe_math_optimizations)
6771 enum built_in_function fcode = builtin_mathfn_code (arg1);
6772 /* Optimize x/expN(y) into x*expN(-y). */
6773 if (BUILTIN_EXPONENT_P (fcode))
6775 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6776 tree arg = build1 (NEGATE_EXPR, type,
6777 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6778 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6779 arg1 = build_function_call_expr (expfn, arglist);
6780 return fold (build (MULT_EXPR, type, arg0, arg1));
6783 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6784 if (fcode == BUILT_IN_POW
6785 || fcode == BUILT_IN_POWF
6786 || fcode == BUILT_IN_POWL)
6788 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6789 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6790 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6791 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6792 tree arglist = tree_cons(NULL_TREE, arg10,
6793 build_tree_list (NULL_TREE, neg11));
6794 arg1 = build_function_call_expr (powfn, arglist);
6795 return fold (build (MULT_EXPR, type, arg0, arg1));
6799 if (flag_unsafe_math_optimizations)
6801 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6802 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6804 /* Optimize sin(x)/cos(x) as tan(x). */
6805 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6806 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6807 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6808 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6809 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6813 if (fcode0 == BUILT_IN_SIN)
6814 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6815 else if (fcode0 == BUILT_IN_SINF)
6816 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6817 else if (fcode0 == BUILT_IN_SINL)
6818 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6822 if (tanfn != NULL_TREE)
6823 return build_function_call_expr (tanfn,
6824 TREE_OPERAND (arg0, 1));
6827 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6828 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6829 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6830 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6831 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6832 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6836 if (fcode0 == BUILT_IN_COS)
6837 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6838 else if (fcode0 == BUILT_IN_COSF)
6839 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6840 else if (fcode0 == BUILT_IN_COSL)
6841 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6845 if (tanfn != NULL_TREE)
6847 tree tmp = TREE_OPERAND (arg0, 1);
6848 tmp = build_function_call_expr (tanfn, tmp);
6849 return fold (build (RDIV_EXPR, type,
6850 build_real (type, dconst1),
6855 /* Optimize pow(x,c)/x as pow(x,c-1). */
6856 if (fcode0 == BUILT_IN_POW
6857 || fcode0 == BUILT_IN_POWF
6858 || fcode0 == BUILT_IN_POWL)
6860 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6861 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6862 if (TREE_CODE (arg01) == REAL_CST
6863 && ! TREE_CONSTANT_OVERFLOW (arg01)
6864 && operand_equal_p (arg1, arg00, 0))
6866 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6870 c = TREE_REAL_CST (arg01);
6871 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6872 arg = build_real (type, c);
6873 arglist = build_tree_list (NULL_TREE, arg);
6874 arglist = tree_cons (NULL_TREE, arg1, arglist);
6875 return build_function_call_expr (powfn, arglist);
6881 case TRUNC_DIV_EXPR:
6882 case ROUND_DIV_EXPR:
6883 case FLOOR_DIV_EXPR:
6885 case EXACT_DIV_EXPR:
6886 if (integer_onep (arg1))
6887 return non_lvalue (fold_convert (type, arg0));
6888 if (integer_zerop (arg1))
6891 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6892 operation, EXACT_DIV_EXPR.
6894 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6895 At one time others generated faster code, it's not clear if they do
6896 after the last round to changes to the DIV code in expmed.c. */
6897 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6898 && multiple_of_p (type, arg0, arg1))
6899 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6901 if (TREE_CODE (arg1) == INTEGER_CST
6902 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6904 return fold_convert (type, tem);
6909 case FLOOR_MOD_EXPR:
6910 case ROUND_MOD_EXPR:
6911 case TRUNC_MOD_EXPR:
6912 if (integer_onep (arg1))
6913 return omit_one_operand (type, integer_zero_node, arg0);
6914 if (integer_zerop (arg1))
6917 if (TREE_CODE (arg1) == INTEGER_CST
6918 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6920 return fold_convert (type, tem);
6926 if (integer_all_onesp (arg0))
6927 return omit_one_operand (type, arg0, arg1);
6931 /* Optimize -1 >> x for arithmetic right shifts. */
6932 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6933 return omit_one_operand (type, arg0, arg1);
6934 /* ... fall through ... */
6938 if (integer_zerop (arg1))
6939 return non_lvalue (fold_convert (type, arg0));
6940 if (integer_zerop (arg0))
6941 return omit_one_operand (type, arg0, arg1);
6943 /* Since negative shift count is not well-defined,
6944 don't try to compute it in the compiler. */
6945 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6947 /* Rewrite an LROTATE_EXPR by a constant into an
6948 RROTATE_EXPR by a new constant. */
6949 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6951 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6952 tem = fold_convert (TREE_TYPE (arg1), tem);
6953 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6954 return fold (build (RROTATE_EXPR, type, arg0, tem));
6957 /* If we have a rotate of a bit operation with the rotate count and
6958 the second operand of the bit operation both constant,
6959 permute the two operations. */
6960 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6961 && (TREE_CODE (arg0) == BIT_AND_EXPR
6962 || TREE_CODE (arg0) == BIT_IOR_EXPR
6963 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6965 return fold (build (TREE_CODE (arg0), type,
6966 fold (build (code, type,
6967 TREE_OPERAND (arg0, 0), arg1)),
6968 fold (build (code, type,
6969 TREE_OPERAND (arg0, 1), arg1))));
6971 /* Two consecutive rotates adding up to the width of the mode can
6973 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6974 && TREE_CODE (arg0) == RROTATE_EXPR
6975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6976 && TREE_INT_CST_HIGH (arg1) == 0
6977 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6978 && ((TREE_INT_CST_LOW (arg1)
6979 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6980 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6981 return TREE_OPERAND (arg0, 0);
6986 if (operand_equal_p (arg0, arg1, 0))
6987 return omit_one_operand (type, arg0, arg1);
6988 if (INTEGRAL_TYPE_P (type)
6989 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6990 return omit_one_operand (type, arg1, arg0);
6994 if (operand_equal_p (arg0, arg1, 0))
6995 return omit_one_operand (type, arg0, arg1);
6996 if (INTEGRAL_TYPE_P (type)
6997 && TYPE_MAX_VALUE (type)
6998 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6999 return omit_one_operand (type, arg1, arg0);
7002 case TRUTH_NOT_EXPR:
7003 /* Note that the operand of this must be an int
7004 and its values must be 0 or 1.
7005 ("true" is a fixed value perhaps depending on the language,
7006 but we don't handle values other than 1 correctly yet.) */
7007 tem = invert_truthvalue (arg0);
7008 /* Avoid infinite recursion. */
7009 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7011 tem = fold_single_bit_test (code, arg0, arg1, type);
7016 return fold_convert (type, tem);
7018 case TRUTH_ANDIF_EXPR:
7019 /* Note that the operands of this must be ints
7020 and their values must be 0 or 1.
7021 ("true" is a fixed value perhaps depending on the language.) */
7022 /* If first arg is constant zero, return it. */
7023 if (integer_zerop (arg0))
7024 return fold_convert (type, arg0);
7025 case TRUTH_AND_EXPR:
7026 /* If either arg is constant true, drop it. */
7027 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7028 return non_lvalue (fold_convert (type, arg1));
7029 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7030 /* Preserve sequence points. */
7031 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7032 return non_lvalue (fold_convert (type, arg0));
7033 /* If second arg is constant zero, result is zero, but first arg
7034 must be evaluated. */
7035 if (integer_zerop (arg1))
7036 return omit_one_operand (type, arg1, arg0);
7037 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7038 case will be handled here. */
7039 if (integer_zerop (arg0))
7040 return omit_one_operand (type, arg0, arg1);
7043 /* We only do these simplifications if we are optimizing. */
7047 /* Check for things like (A || B) && (A || C). We can convert this
7048 to A || (B && C). Note that either operator can be any of the four
7049 truth and/or operations and the transformation will still be
7050 valid. Also note that we only care about order for the
7051 ANDIF and ORIF operators. If B contains side effects, this
7052 might change the truth-value of A. */
7053 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7054 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7055 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7056 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7057 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7058 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7060 tree a00 = TREE_OPERAND (arg0, 0);
7061 tree a01 = TREE_OPERAND (arg0, 1);
7062 tree a10 = TREE_OPERAND (arg1, 0);
7063 tree a11 = TREE_OPERAND (arg1, 1);
7064 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7065 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7066 && (code == TRUTH_AND_EXPR
7067 || code == TRUTH_OR_EXPR));
7069 if (operand_equal_p (a00, a10, 0))
7070 return fold (build (TREE_CODE (arg0), type, a00,
7071 fold (build (code, type, a01, a11))));
7072 else if (commutative && operand_equal_p (a00, a11, 0))
7073 return fold (build (TREE_CODE (arg0), type, a00,
7074 fold (build (code, type, a01, a10))));
7075 else if (commutative && operand_equal_p (a01, a10, 0))
7076 return fold (build (TREE_CODE (arg0), type, a01,
7077 fold (build (code, type, a00, a11))));
7079 /* This case if tricky because we must either have commutative
7080 operators or else A10 must not have side-effects. */
7082 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7083 && operand_equal_p (a01, a11, 0))
7084 return fold (build (TREE_CODE (arg0), type,
7085 fold (build (code, type, a00, a10)),
7089 /* See if we can build a range comparison. */
7090 if (0 != (tem = fold_range_test (t)))
7093 /* Check for the possibility of merging component references. If our
7094 lhs is another similar operation, try to merge its rhs with our
7095 rhs. Then try to merge our lhs and rhs. */
7096 if (TREE_CODE (arg0) == code
7097 && 0 != (tem = fold_truthop (code, type,
7098 TREE_OPERAND (arg0, 1), arg1)))
7099 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7101 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7106 case TRUTH_ORIF_EXPR:
7107 /* Note that the operands of this must be ints
7108 and their values must be 0 or true.
7109 ("true" is a fixed value perhaps depending on the language.) */
7110 /* If first arg is constant true, return it. */
7111 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7112 return fold_convert (type, arg0);
7114 /* If either arg is constant zero, drop it. */
7115 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7116 return non_lvalue (fold_convert (type, arg1));
7117 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7118 /* Preserve sequence points. */
7119 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7120 return non_lvalue (fold_convert (type, arg0));
7121 /* If second arg is constant true, result is true, but we must
7122 evaluate first arg. */
7123 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7124 return omit_one_operand (type, arg1, arg0);
7125 /* Likewise for first arg, but note this only occurs here for
7127 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7128 return omit_one_operand (type, arg0, arg1);
7131 case TRUTH_XOR_EXPR:
7132 /* If either arg is constant zero, drop it. */
7133 if (integer_zerop (arg0))
7134 return non_lvalue (fold_convert (type, arg1));
7135 if (integer_zerop (arg1))
7136 return non_lvalue (fold_convert (type, arg0));
7137 /* If either arg is constant true, this is a logical inversion. */
7138 if (integer_onep (arg0))
7139 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7140 if (integer_onep (arg1))
7141 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7150 /* If one arg is a real or integer constant, put it last. */
7151 if (tree_swap_operands_p (arg0, arg1, true))
7152 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7154 /* If this is an equality comparison of the address of a non-weak
7155 object against zero, then we know the result. */
7156 if ((code == EQ_EXPR || code == NE_EXPR)
7157 && TREE_CODE (arg0) == ADDR_EXPR
7158 && DECL_P (TREE_OPERAND (arg0, 0))
7159 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7160 && integer_zerop (arg1))
7162 if (code == EQ_EXPR)
7163 return integer_zero_node;
7165 return integer_one_node;
7168 /* If this is an equality comparison of the address of two non-weak,
7169 unaliased symbols neither of which are extern (since we do not
7170 have access to attributes for externs), then we know the result. */
7171 if ((code == EQ_EXPR || code == NE_EXPR)
7172 && TREE_CODE (arg0) == ADDR_EXPR
7173 && DECL_P (TREE_OPERAND (arg0, 0))
7174 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7175 && ! lookup_attribute ("alias",
7176 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7177 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7178 && TREE_CODE (arg1) == ADDR_EXPR
7179 && DECL_P (TREE_OPERAND (arg1, 0))
7180 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7181 && ! lookup_attribute ("alias",
7182 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7183 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7185 if (code == EQ_EXPR)
7186 return (operand_equal_p (arg0, arg1, 0)
7187 ? integer_one_node : integer_zero_node);
7189 return (operand_equal_p (arg0, arg1, 0)
7190 ? integer_zero_node : integer_one_node);
7193 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7195 tree targ0 = strip_float_extensions (arg0);
7196 tree targ1 = strip_float_extensions (arg1);
7197 tree newtype = TREE_TYPE (targ0);
7199 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7200 newtype = TREE_TYPE (targ1);
7202 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7203 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7204 return fold (build (code, type, fold_convert (newtype, targ0),
7205 fold_convert (newtype, targ1)));
7207 /* (-a) CMP (-b) -> b CMP a */
7208 if (TREE_CODE (arg0) == NEGATE_EXPR
7209 && TREE_CODE (arg1) == NEGATE_EXPR)
7210 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7211 TREE_OPERAND (arg0, 0)));
7213 if (TREE_CODE (arg1) == REAL_CST)
7215 REAL_VALUE_TYPE cst;
7216 cst = TREE_REAL_CST (arg1);
7218 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7219 if (TREE_CODE (arg0) == NEGATE_EXPR)
7221 fold (build (swap_tree_comparison (code), type,
7222 TREE_OPERAND (arg0, 0),
7223 build_real (TREE_TYPE (arg1),
7224 REAL_VALUE_NEGATE (cst))));
7226 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7227 /* a CMP (-0) -> a CMP 0 */
7228 if (REAL_VALUE_MINUS_ZERO (cst))
7229 return fold (build (code, type, arg0,
7230 build_real (TREE_TYPE (arg1), dconst0)));
7232 /* x != NaN is always true, other ops are always false. */
7233 if (REAL_VALUE_ISNAN (cst)
7234 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7236 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7237 return omit_one_operand (type, fold_convert (type, t), arg0);
7240 /* Fold comparisons against infinity. */
7241 if (REAL_VALUE_ISINF (cst))
7243 tem = fold_inf_compare (code, type, arg0, arg1);
7244 if (tem != NULL_TREE)
7249 /* If this is a comparison of a real constant with a PLUS_EXPR
7250 or a MINUS_EXPR of a real constant, we can convert it into a
7251 comparison with a revised real constant as long as no overflow
7252 occurs when unsafe_math_optimizations are enabled. */
7253 if (flag_unsafe_math_optimizations
7254 && TREE_CODE (arg1) == REAL_CST
7255 && (TREE_CODE (arg0) == PLUS_EXPR
7256 || TREE_CODE (arg0) == MINUS_EXPR)
7257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7258 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7259 ? MINUS_EXPR : PLUS_EXPR,
7260 arg1, TREE_OPERAND (arg0, 1), 0))
7261 && ! TREE_CONSTANT_OVERFLOW (tem))
7262 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7264 /* Likewise, we can simplify a comparison of a real constant with
7265 a MINUS_EXPR whose first operand is also a real constant, i.e.
7266 (c1 - x) < c2 becomes x > c1-c2. */
7267 if (flag_unsafe_math_optimizations
7268 && TREE_CODE (arg1) == REAL_CST
7269 && TREE_CODE (arg0) == MINUS_EXPR
7270 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7271 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7273 && ! TREE_CONSTANT_OVERFLOW (tem))
7274 return fold (build (swap_tree_comparison (code), type,
7275 TREE_OPERAND (arg0, 1), tem));
7277 /* Fold comparisons against built-in math functions. */
7278 if (TREE_CODE (arg1) == REAL_CST
7279 && flag_unsafe_math_optimizations
7280 && ! flag_errno_math)
7282 enum built_in_function fcode = builtin_mathfn_code (arg0);
7284 if (fcode != END_BUILTINS)
7286 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7287 if (tem != NULL_TREE)
7293 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7294 if (TREE_CONSTANT (arg1)
7295 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7296 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7297 /* This optimization is invalid for ordered comparisons
7298 if CONST+INCR overflows or if foo+incr might overflow.
7299 This optimization is invalid for floating point due to rounding.
7300 For pointer types we assume overflow doesn't happen. */
7301 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7302 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7303 && (code == EQ_EXPR || code == NE_EXPR))))
7305 tree varop, newconst;
7307 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7309 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7310 arg1, TREE_OPERAND (arg0, 1)));
7311 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7312 TREE_OPERAND (arg0, 0),
7313 TREE_OPERAND (arg0, 1));
7317 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7318 arg1, TREE_OPERAND (arg0, 1)));
7319 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7320 TREE_OPERAND (arg0, 0),
7321 TREE_OPERAND (arg0, 1));
7325 /* If VAROP is a reference to a bitfield, we must mask
7326 the constant by the width of the field. */
7327 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7328 && DECL_BIT_FIELD(TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7330 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7331 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7332 tree folded_compare;
7335 /* First check whether the comparison would come out
7336 always the same. If we don't do that we would
7337 change the meaning with the masking. */
7338 folded_compare = fold (build2 (code, type,
7339 TREE_OPERAND (varop, 0),
7341 if (integer_zerop (folded_compare)
7342 || integer_onep (folded_compare))
7343 return omit_one_operand (type, folded_compare, varop);
7345 if (size < HOST_BITS_PER_WIDE_INT)
7347 unsigned HOST_WIDE_INT lo = ((unsigned HOST_WIDE_INT) 1
7349 mask = build_int_2 (lo, 0);
7351 else if (size < 2 * HOST_BITS_PER_WIDE_INT)
7353 HOST_WIDE_INT hi = ((HOST_WIDE_INT) 1
7354 << (size - HOST_BITS_PER_WIDE_INT)) - 1;
7355 mask = build_int_2 (~0, hi);
7360 mask = fold_convert (TREE_TYPE (varop), mask);
7361 newconst = fold (build2 (BIT_AND_EXPR, TREE_TYPE (varop),
7366 return fold (build2 (code, type, varop, newconst));
7369 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7370 This transformation affects the cases which are handled in later
7371 optimizations involving comparisons with non-negative constants. */
7372 if (TREE_CODE (arg1) == INTEGER_CST
7373 && TREE_CODE (arg0) != INTEGER_CST
7374 && tree_int_cst_sgn (arg1) > 0)
7379 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7380 return fold (build (GT_EXPR, type, arg0, arg1));
7383 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7384 return fold (build (LE_EXPR, type, arg0, arg1));
7391 /* Comparisons with the highest or lowest possible integer of
7392 the specified size will have known values. */
7394 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7396 if (TREE_CODE (arg1) == INTEGER_CST
7397 && ! TREE_CONSTANT_OVERFLOW (arg1)
7398 && width <= HOST_BITS_PER_WIDE_INT
7399 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7400 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7402 unsigned HOST_WIDE_INT signed_max;
7403 unsigned HOST_WIDE_INT max, min;
7405 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7407 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7409 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7415 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7418 if (TREE_INT_CST_HIGH (arg1) == 0
7419 && TREE_INT_CST_LOW (arg1) == max)
7423 return omit_one_operand (type,
7428 return fold (build (EQ_EXPR, type, arg0, arg1));
7431 return omit_one_operand (type,
7436 return fold (build (NE_EXPR, type, arg0, arg1));
7438 /* The GE_EXPR and LT_EXPR cases above are not normally
7439 reached because of previous transformations. */
7444 else if (TREE_INT_CST_HIGH (arg1) == 0
7445 && TREE_INT_CST_LOW (arg1) == max - 1)
7449 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7450 return fold (build (EQ_EXPR, type, arg0, arg1));
7452 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7453 return fold (build (NE_EXPR, type, arg0, arg1));
7457 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7458 && TREE_INT_CST_LOW (arg1) == min)
7462 return omit_one_operand (type,
7467 return fold (build (EQ_EXPR, type, arg0, arg1));
7470 return omit_one_operand (type,
7475 return fold (build (NE_EXPR, type, arg0, arg1));
7480 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7481 && TREE_INT_CST_LOW (arg1) == min + 1)
7485 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7486 return fold (build (NE_EXPR, type, arg0, arg1));
7488 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7489 return fold (build (EQ_EXPR, type, arg0, arg1));
7494 else if (TREE_INT_CST_HIGH (arg1) == 0
7495 && TREE_INT_CST_LOW (arg1) == signed_max
7496 && TREE_UNSIGNED (TREE_TYPE (arg1))
7497 /* signed_type does not work on pointer types. */
7498 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7500 /* The following case also applies to X < signed_max+1
7501 and X >= signed_max+1 because previous transformations. */
7502 if (code == LE_EXPR || code == GT_EXPR)
7505 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7506 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7508 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7509 type, fold_convert (st0, arg0),
7510 fold_convert (st1, integer_zero_node)));
7516 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7517 a MINUS_EXPR of a constant, we can convert it into a comparison with
7518 a revised constant as long as no overflow occurs. */
7519 if ((code == EQ_EXPR || code == NE_EXPR)
7520 && TREE_CODE (arg1) == INTEGER_CST
7521 && (TREE_CODE (arg0) == PLUS_EXPR
7522 || TREE_CODE (arg0) == MINUS_EXPR)
7523 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7524 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7525 ? MINUS_EXPR : PLUS_EXPR,
7526 arg1, TREE_OPERAND (arg0, 1), 0))
7527 && ! TREE_CONSTANT_OVERFLOW (tem))
7528 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7530 /* Similarly for a NEGATE_EXPR. */
7531 else if ((code == EQ_EXPR || code == NE_EXPR)
7532 && TREE_CODE (arg0) == NEGATE_EXPR
7533 && TREE_CODE (arg1) == INTEGER_CST
7534 && 0 != (tem = negate_expr (arg1))
7535 && TREE_CODE (tem) == INTEGER_CST
7536 && ! TREE_CONSTANT_OVERFLOW (tem))
7537 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7539 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7540 for !=. Don't do this for ordered comparisons due to overflow. */
7541 else if ((code == NE_EXPR || code == EQ_EXPR)
7542 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7543 return fold (build (code, type,
7544 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7546 /* If we are widening one operand of an integer comparison,
7547 see if the other operand is similarly being widened. Perhaps we
7548 can do the comparison in the narrower type. */
7549 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7550 && TREE_CODE (arg0) == NOP_EXPR
7551 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7552 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7553 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7554 || (TREE_CODE (t1) == INTEGER_CST
7555 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7556 return fold (build (code, type, tem,
7557 fold_convert (TREE_TYPE (tem), t1)));
7559 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7560 constant, we can simplify it. */
7561 else if (TREE_CODE (arg1) == INTEGER_CST
7562 && (TREE_CODE (arg0) == MIN_EXPR
7563 || TREE_CODE (arg0) == MAX_EXPR)
7564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7565 return optimize_minmax_comparison (t);
7567 /* If we are comparing an ABS_EXPR with a constant, we can
7568 convert all the cases into explicit comparisons, but they may
7569 well not be faster than doing the ABS and one comparison.
7570 But ABS (X) <= C is a range comparison, which becomes a subtraction
7571 and a comparison, and is probably faster. */
7572 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7573 && TREE_CODE (arg0) == ABS_EXPR
7574 && ! TREE_SIDE_EFFECTS (arg0)
7575 && (0 != (tem = negate_expr (arg1)))
7576 && TREE_CODE (tem) == INTEGER_CST
7577 && ! TREE_CONSTANT_OVERFLOW (tem))
7578 return fold (build (TRUTH_ANDIF_EXPR, type,
7579 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7580 build (LE_EXPR, type,
7581 TREE_OPERAND (arg0, 0), arg1)));
7583 /* If this is an EQ or NE comparison with zero and ARG0 is
7584 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7585 two operations, but the latter can be done in one less insn
7586 on machines that have only two-operand insns or on which a
7587 constant cannot be the first operand. */
7588 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7589 && TREE_CODE (arg0) == BIT_AND_EXPR)
7591 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7592 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7594 fold (build (code, type,
7595 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7597 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7598 TREE_OPERAND (arg0, 1),
7599 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7600 fold_convert (TREE_TYPE (arg0),
7603 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7604 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7606 fold (build (code, type,
7607 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7609 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7610 TREE_OPERAND (arg0, 0),
7611 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7612 fold_convert (TREE_TYPE (arg0),
7617 /* If this is an NE or EQ comparison of zero against the result of a
7618 signed MOD operation whose second operand is a power of 2, make
7619 the MOD operation unsigned since it is simpler and equivalent. */
7620 if ((code == NE_EXPR || code == EQ_EXPR)
7621 && integer_zerop (arg1)
7622 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7623 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7624 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7625 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7626 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7627 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7629 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7630 tree newmod = build (TREE_CODE (arg0), newtype,
7631 fold_convert (newtype,
7632 TREE_OPERAND (arg0, 0)),
7633 fold_convert (newtype,
7634 TREE_OPERAND (arg0, 1)));
7636 return build (code, type, newmod, fold_convert (newtype, arg1));
7639 /* If this is an NE comparison of zero with an AND of one, remove the
7640 comparison since the AND will give the correct value. */
7641 if (code == NE_EXPR && integer_zerop (arg1)
7642 && TREE_CODE (arg0) == BIT_AND_EXPR
7643 && integer_onep (TREE_OPERAND (arg0, 1)))
7644 return fold_convert (type, arg0);
7646 /* If we have (A & C) == C where C is a power of 2, convert this into
7647 (A & C) != 0. Similarly for NE_EXPR. */
7648 if ((code == EQ_EXPR || code == NE_EXPR)
7649 && TREE_CODE (arg0) == BIT_AND_EXPR
7650 && integer_pow2p (TREE_OPERAND (arg0, 1))
7651 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7652 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7653 arg0, integer_zero_node));
7655 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7656 2, then fold the expression into shifts and logical operations. */
7657 tem = fold_single_bit_test (code, arg0, arg1, type);
7661 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7662 Similarly for NE_EXPR. */
7663 if ((code == EQ_EXPR || code == NE_EXPR)
7664 && TREE_CODE (arg0) == BIT_AND_EXPR
7665 && TREE_CODE (arg1) == INTEGER_CST
7666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7669 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7670 arg1, build1 (BIT_NOT_EXPR,
7671 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7672 TREE_OPERAND (arg0, 1))));
7673 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7674 if (integer_nonzerop (dandnotc))
7675 return omit_one_operand (type, rslt, arg0);
7678 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7679 Similarly for NE_EXPR. */
7680 if ((code == EQ_EXPR || code == NE_EXPR)
7681 && TREE_CODE (arg0) == BIT_IOR_EXPR
7682 && TREE_CODE (arg1) == INTEGER_CST
7683 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7686 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7687 TREE_OPERAND (arg0, 1),
7688 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7689 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7690 if (integer_nonzerop (candnotd))
7691 return omit_one_operand (type, rslt, arg0);
7694 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7695 and similarly for >= into !=. */
7696 if ((code == LT_EXPR || code == GE_EXPR)
7697 && TREE_UNSIGNED (TREE_TYPE (arg0))
7698 && TREE_CODE (arg1) == LSHIFT_EXPR
7699 && integer_onep (TREE_OPERAND (arg1, 0)))
7700 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7701 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7702 TREE_OPERAND (arg1, 1)),
7703 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7705 else if ((code == LT_EXPR || code == GE_EXPR)
7706 && TREE_UNSIGNED (TREE_TYPE (arg0))
7707 && (TREE_CODE (arg1) == NOP_EXPR
7708 || TREE_CODE (arg1) == CONVERT_EXPR)
7709 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7710 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7712 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7713 fold_convert (TREE_TYPE (arg0),
7714 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7715 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7717 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7719 /* Simplify comparison of something with itself. (For IEEE
7720 floating-point, we can only do some of these simplifications.) */
7721 if (operand_equal_p (arg0, arg1, 0))
7726 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7727 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7728 return constant_boolean_node (1, type);
7733 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7734 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7735 return constant_boolean_node (1, type);
7736 return fold (build (EQ_EXPR, type, arg0, arg1));
7739 /* For NE, we can only do this simplification if integer
7740 or we don't honor IEEE floating point NaNs. */
7741 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7742 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7744 /* ... fall through ... */
7747 return constant_boolean_node (0, type);
7753 /* If we are comparing an expression that just has comparisons
7754 of two integer values, arithmetic expressions of those comparisons,
7755 and constants, we can simplify it. There are only three cases
7756 to check: the two values can either be equal, the first can be
7757 greater, or the second can be greater. Fold the expression for
7758 those three values. Since each value must be 0 or 1, we have
7759 eight possibilities, each of which corresponds to the constant 0
7760 or 1 or one of the six possible comparisons.
7762 This handles common cases like (a > b) == 0 but also handles
7763 expressions like ((x > y) - (y > x)) > 0, which supposedly
7764 occur in macroized code. */
7766 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7768 tree cval1 = 0, cval2 = 0;
7771 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7772 /* Don't handle degenerate cases here; they should already
7773 have been handled anyway. */
7774 && cval1 != 0 && cval2 != 0
7775 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7776 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7777 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7778 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7779 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7780 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7781 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7783 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7784 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7786 /* We can't just pass T to eval_subst in case cval1 or cval2
7787 was the same as ARG1. */
7790 = fold (build (code, type,
7791 eval_subst (arg0, cval1, maxval, cval2, minval),
7794 = fold (build (code, type,
7795 eval_subst (arg0, cval1, maxval, cval2, maxval),
7798 = fold (build (code, type,
7799 eval_subst (arg0, cval1, minval, cval2, maxval),
7802 /* All three of these results should be 0 or 1. Confirm they
7803 are. Then use those values to select the proper code
7806 if ((integer_zerop (high_result)
7807 || integer_onep (high_result))
7808 && (integer_zerop (equal_result)
7809 || integer_onep (equal_result))
7810 && (integer_zerop (low_result)
7811 || integer_onep (low_result)))
7813 /* Make a 3-bit mask with the high-order bit being the
7814 value for `>', the next for '=', and the low for '<'. */
7815 switch ((integer_onep (high_result) * 4)
7816 + (integer_onep (equal_result) * 2)
7817 + integer_onep (low_result))
7821 return omit_one_operand (type, integer_zero_node, arg0);
7842 return omit_one_operand (type, integer_one_node, arg0);
7845 t = build (code, type, cval1, cval2);
7847 return save_expr (t);
7854 /* If this is a comparison of a field, we may be able to simplify it. */
7855 if (((TREE_CODE (arg0) == COMPONENT_REF
7856 && lang_hooks.can_use_bit_fields_p ())
7857 || TREE_CODE (arg0) == BIT_FIELD_REF)
7858 && (code == EQ_EXPR || code == NE_EXPR)
7859 /* Handle the constant case even without -O
7860 to make sure the warnings are given. */
7861 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7863 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7868 /* If this is a comparison of complex values and either or both sides
7869 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7870 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7871 This may prevent needless evaluations. */
7872 if ((code == EQ_EXPR || code == NE_EXPR)
7873 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7874 && (TREE_CODE (arg0) == COMPLEX_EXPR
7875 || TREE_CODE (arg1) == COMPLEX_EXPR
7876 || TREE_CODE (arg0) == COMPLEX_CST
7877 || TREE_CODE (arg1) == COMPLEX_CST))
7879 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7880 tree real0, imag0, real1, imag1;
7882 arg0 = save_expr (arg0);
7883 arg1 = save_expr (arg1);
7884 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7885 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7886 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7887 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7889 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7892 fold (build (code, type, real0, real1)),
7893 fold (build (code, type, imag0, imag1))));
7896 /* Optimize comparisons of strlen vs zero to a compare of the
7897 first character of the string vs zero. To wit,
7898 strlen(ptr) == 0 => *ptr == 0
7899 strlen(ptr) != 0 => *ptr != 0
7900 Other cases should reduce to one of these two (or a constant)
7901 due to the return value of strlen being unsigned. */
7902 if ((code == EQ_EXPR || code == NE_EXPR)
7903 && integer_zerop (arg1)
7904 && TREE_CODE (arg0) == CALL_EXPR)
7906 tree fndecl = get_callee_fndecl (arg0);
7910 && DECL_BUILT_IN (fndecl)
7911 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7912 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7913 && (arglist = TREE_OPERAND (arg0, 1))
7914 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7915 && ! TREE_CHAIN (arglist))
7916 return fold (build (code, type,
7917 build1 (INDIRECT_REF, char_type_node,
7918 TREE_VALUE(arglist)),
7919 integer_zero_node));
7922 /* From here on, the only cases we handle are when the result is
7923 known to be a constant.
7925 To compute GT, swap the arguments and do LT.
7926 To compute GE, do LT and invert the result.
7927 To compute LE, swap the arguments, do LT and invert the result.
7928 To compute NE, do EQ and invert the result.
7930 Therefore, the code below must handle only EQ and LT. */
7932 if (code == LE_EXPR || code == GT_EXPR)
7934 tem = arg0, arg0 = arg1, arg1 = tem;
7935 code = swap_tree_comparison (code);
7938 /* Note that it is safe to invert for real values here because we
7939 will check below in the one case that it matters. */
7943 if (code == NE_EXPR || code == GE_EXPR)
7946 code = invert_tree_comparison (code);
7949 /* Compute a result for LT or EQ if args permit;
7950 otherwise return T. */
7951 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7953 if (code == EQ_EXPR)
7954 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7956 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7957 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7958 : INT_CST_LT (arg0, arg1)),
7962 #if 0 /* This is no longer useful, but breaks some real code. */
7963 /* Assume a nonexplicit constant cannot equal an explicit one,
7964 since such code would be undefined anyway.
7965 Exception: on sysvr4, using #pragma weak,
7966 a label can come out as 0. */
7967 else if (TREE_CODE (arg1) == INTEGER_CST
7968 && !integer_zerop (arg1)
7969 && TREE_CONSTANT (arg0)
7970 && TREE_CODE (arg0) == ADDR_EXPR
7972 t1 = build_int_2 (0, 0);
7974 /* Two real constants can be compared explicitly. */
7975 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7977 /* If either operand is a NaN, the result is false with two
7978 exceptions: First, an NE_EXPR is true on NaNs, but that case
7979 is already handled correctly since we will be inverting the
7980 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7981 or a GE_EXPR into a LT_EXPR, we must return true so that it
7982 will be inverted into false. */
7984 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7985 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7986 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7988 else if (code == EQ_EXPR)
7989 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7990 TREE_REAL_CST (arg1)),
7993 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7994 TREE_REAL_CST (arg1)),
7998 if (t1 == NULL_TREE)
8002 TREE_INT_CST_LOW (t1) ^= 1;
8004 TREE_TYPE (t1) = type;
8005 if (TREE_CODE (type) == BOOLEAN_TYPE)
8006 return lang_hooks.truthvalue_conversion (t1);
8010 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8011 so all simple results must be passed through pedantic_non_lvalue. */
8012 if (TREE_CODE (arg0) == INTEGER_CST)
8014 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8015 /* Only optimize constant conditions when the selected branch
8016 has the same type as the COND_EXPR. This avoids optimizing
8017 away "c ? x : throw", where the throw has a void type. */
8018 if (! VOID_TYPE_P (TREE_TYPE (tem))
8019 || VOID_TYPE_P (TREE_TYPE (t)))
8020 return pedantic_non_lvalue (tem);
8023 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8024 return pedantic_omit_one_operand (type, arg1, arg0);
8026 /* If we have A op B ? A : C, we may be able to convert this to a
8027 simpler expression, depending on the operation and the values
8028 of B and C. Signed zeros prevent all of these transformations,
8029 for reasons given above each one. */
8031 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8032 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8033 arg1, TREE_OPERAND (arg0, 1))
8034 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8036 tree arg2 = TREE_OPERAND (t, 2);
8037 enum tree_code comp_code = TREE_CODE (arg0);
8041 /* If we have A op 0 ? A : -A, consider applying the following
8044 A == 0? A : -A same as -A
8045 A != 0? A : -A same as A
8046 A >= 0? A : -A same as abs (A)
8047 A > 0? A : -A same as abs (A)
8048 A <= 0? A : -A same as -abs (A)
8049 A < 0? A : -A same as -abs (A)
8051 None of these transformations work for modes with signed
8052 zeros. If A is +/-0, the first two transformations will
8053 change the sign of the result (from +0 to -0, or vice
8054 versa). The last four will fix the sign of the result,
8055 even though the original expressions could be positive or
8056 negative, depending on the sign of A.
8058 Note that all these transformations are correct if A is
8059 NaN, since the two alternatives (A and -A) are also NaNs. */
8060 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8061 ? real_zerop (TREE_OPERAND (arg0, 1))
8062 : integer_zerop (TREE_OPERAND (arg0, 1)))
8063 && TREE_CODE (arg2) == NEGATE_EXPR
8064 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8068 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8069 tem = fold_convert (type, negate_expr (tem));
8070 return pedantic_non_lvalue (tem);
8072 return pedantic_non_lvalue (fold_convert (type, arg1));
8075 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8076 arg1 = fold_convert (lang_hooks.types.signed_type
8077 (TREE_TYPE (arg1)), arg1);
8078 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8079 return pedantic_non_lvalue (fold_convert (type, arg1));
8082 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8083 arg1 = fold_convert (lang_hooks.types.signed_type
8084 (TREE_TYPE (arg1)), arg1);
8085 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8086 arg1 = negate_expr (fold_convert (type, arg1));
8087 return pedantic_non_lvalue (arg1);
8092 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8093 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8094 both transformations are correct when A is NaN: A != 0
8095 is then true, and A == 0 is false. */
8097 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8099 if (comp_code == NE_EXPR)
8100 return pedantic_non_lvalue (fold_convert (type, arg1));
8101 else if (comp_code == EQ_EXPR)
8102 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8105 /* Try some transformations of A op B ? A : B.
8107 A == B? A : B same as B
8108 A != B? A : B same as A
8109 A >= B? A : B same as max (A, B)
8110 A > B? A : B same as max (B, A)
8111 A <= B? A : B same as min (A, B)
8112 A < B? A : B same as min (B, A)
8114 As above, these transformations don't work in the presence
8115 of signed zeros. For example, if A and B are zeros of
8116 opposite sign, the first two transformations will change
8117 the sign of the result. In the last four, the original
8118 expressions give different results for (A=+0, B=-0) and
8119 (A=-0, B=+0), but the transformed expressions do not.
8121 The first two transformations are correct if either A or B
8122 is a NaN. In the first transformation, the condition will
8123 be false, and B will indeed be chosen. In the case of the
8124 second transformation, the condition A != B will be true,
8125 and A will be chosen.
8127 The conversions to max() and min() are not correct if B is
8128 a number and A is not. The conditions in the original
8129 expressions will be false, so all four give B. The min()
8130 and max() versions would give a NaN instead. */
8131 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8132 arg2, TREE_OPERAND (arg0, 0)))
8134 tree comp_op0 = TREE_OPERAND (arg0, 0);
8135 tree comp_op1 = TREE_OPERAND (arg0, 1);
8136 tree comp_type = TREE_TYPE (comp_op0);
8138 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8139 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8149 return pedantic_non_lvalue (fold_convert (type, arg2));
8151 return pedantic_non_lvalue (fold_convert (type, arg1));
8154 /* In C++ a ?: expression can be an lvalue, so put the
8155 operand which will be used if they are equal first
8156 so that we can convert this back to the
8157 corresponding COND_EXPR. */
8158 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8159 return pedantic_non_lvalue (fold_convert
8160 (type, fold (build (MIN_EXPR, comp_type,
8161 (comp_code == LE_EXPR
8162 ? comp_op0 : comp_op1),
8163 (comp_code == LE_EXPR
8164 ? comp_op1 : comp_op0)))));
8168 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8169 return pedantic_non_lvalue (fold_convert
8170 (type, fold (build (MAX_EXPR, comp_type,
8171 (comp_code == GE_EXPR
8172 ? comp_op0 : comp_op1),
8173 (comp_code == GE_EXPR
8174 ? comp_op1 : comp_op0)))));
8181 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8182 we might still be able to simplify this. For example,
8183 if C1 is one less or one more than C2, this might have started
8184 out as a MIN or MAX and been transformed by this function.
8185 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8187 if (INTEGRAL_TYPE_P (type)
8188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8189 && TREE_CODE (arg2) == INTEGER_CST)
8193 /* We can replace A with C1 in this case. */
8194 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8195 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8196 TREE_OPERAND (t, 2)));
8199 /* If C1 is C2 + 1, this is min(A, C2). */
8200 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8201 && operand_equal_p (TREE_OPERAND (arg0, 1),
8202 const_binop (PLUS_EXPR, arg2,
8203 integer_one_node, 0), 1))
8204 return pedantic_non_lvalue
8205 (fold (build (MIN_EXPR, type, arg1, arg2)));
8209 /* If C1 is C2 - 1, this is min(A, C2). */
8210 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8211 && operand_equal_p (TREE_OPERAND (arg0, 1),
8212 const_binop (MINUS_EXPR, arg2,
8213 integer_one_node, 0), 1))
8214 return pedantic_non_lvalue
8215 (fold (build (MIN_EXPR, type, arg1, arg2)));
8219 /* If C1 is C2 - 1, this is max(A, C2). */
8220 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8221 && operand_equal_p (TREE_OPERAND (arg0, 1),
8222 const_binop (MINUS_EXPR, arg2,
8223 integer_one_node, 0), 1))
8224 return pedantic_non_lvalue
8225 (fold (build (MAX_EXPR, type, arg1, arg2)));
8229 /* If C1 is C2 + 1, this is max(A, C2). */
8230 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8231 && operand_equal_p (TREE_OPERAND (arg0, 1),
8232 const_binop (PLUS_EXPR, arg2,
8233 integer_one_node, 0), 1))
8234 return pedantic_non_lvalue
8235 (fold (build (MAX_EXPR, type, arg1, arg2)));
8244 /* If the second operand is simpler than the third, swap them
8245 since that produces better jump optimization results. */
8246 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8247 TREE_OPERAND (t, 2), false))
8249 /* See if this can be inverted. If it can't, possibly because
8250 it was a floating-point inequality comparison, don't do
8252 tem = invert_truthvalue (arg0);
8254 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8255 return fold (build (code, type, tem,
8256 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8259 /* Convert A ? 1 : 0 to simply A. */
8260 if (integer_onep (TREE_OPERAND (t, 1))
8261 && integer_zerop (TREE_OPERAND (t, 2))
8262 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8263 call to fold will try to move the conversion inside
8264 a COND, which will recurse. In that case, the COND_EXPR
8265 is probably the best choice, so leave it alone. */
8266 && type == TREE_TYPE (arg0))
8267 return pedantic_non_lvalue (arg0);
8269 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8270 over COND_EXPR in cases such as floating point comparisons. */
8271 if (integer_zerop (TREE_OPERAND (t, 1))
8272 && integer_onep (TREE_OPERAND (t, 2))
8273 && truth_value_p (TREE_CODE (arg0)))
8274 return pedantic_non_lvalue (fold_convert (type,
8275 invert_truthvalue (arg0)));
8277 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8278 operation is simply A & 2. */
8280 if (integer_zerop (TREE_OPERAND (t, 2))
8281 && TREE_CODE (arg0) == NE_EXPR
8282 && integer_zerop (TREE_OPERAND (arg0, 1))
8283 && integer_pow2p (arg1)
8284 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8285 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8287 return pedantic_non_lvalue (fold_convert (type,
8288 TREE_OPERAND (arg0, 0)));
8290 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8291 if (integer_zerop (TREE_OPERAND (t, 2))
8292 && truth_value_p (TREE_CODE (arg0))
8293 && truth_value_p (TREE_CODE (arg1)))
8294 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8297 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8298 if (integer_onep (TREE_OPERAND (t, 2))
8299 && truth_value_p (TREE_CODE (arg0))
8300 && truth_value_p (TREE_CODE (arg1)))
8302 /* Only perform transformation if ARG0 is easily inverted. */
8303 tem = invert_truthvalue (arg0);
8304 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8305 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8312 /* When pedantic, a compound expression can be neither an lvalue
8313 nor an integer constant expression. */
8314 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8316 /* Don't let (0, 0) be null pointer constant. */
8317 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8318 : fold_convert (type, arg1);
8319 return pedantic_non_lvalue (tem);
8323 return build_complex (type, arg0, arg1);
8327 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8329 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8330 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8331 TREE_OPERAND (arg0, 1));
8332 else if (TREE_CODE (arg0) == COMPLEX_CST)
8333 return TREE_REALPART (arg0);
8334 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8335 return fold (build (TREE_CODE (arg0), type,
8336 fold (build1 (REALPART_EXPR, type,
8337 TREE_OPERAND (arg0, 0))),
8338 fold (build1 (REALPART_EXPR,
8339 type, TREE_OPERAND (arg0, 1)))));
8343 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8344 return fold_convert (type, integer_zero_node);
8345 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8346 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8347 TREE_OPERAND (arg0, 0));
8348 else if (TREE_CODE (arg0) == COMPLEX_CST)
8349 return TREE_IMAGPART (arg0);
8350 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8351 return fold (build (TREE_CODE (arg0), type,
8352 fold (build1 (IMAGPART_EXPR, type,
8353 TREE_OPERAND (arg0, 0))),
8354 fold (build1 (IMAGPART_EXPR, type,
8355 TREE_OPERAND (arg0, 1)))));
8358 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8360 case CLEANUP_POINT_EXPR:
8361 if (! has_cleanups (arg0))
8362 return TREE_OPERAND (t, 0);
8365 enum tree_code code0 = TREE_CODE (arg0);
8366 int kind0 = TREE_CODE_CLASS (code0);
8367 tree arg00 = TREE_OPERAND (arg0, 0);
8370 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8371 return fold (build1 (code0, type,
8372 fold (build1 (CLEANUP_POINT_EXPR,
8373 TREE_TYPE (arg00), arg00))));
8375 if (kind0 == '<' || kind0 == '2'
8376 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8377 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8378 || code0 == TRUTH_XOR_EXPR)
8380 arg01 = TREE_OPERAND (arg0, 1);
8382 if (TREE_CONSTANT (arg00)
8383 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8384 && ! has_cleanups (arg00)))
8385 return fold (build (code0, type, arg00,
8386 fold (build1 (CLEANUP_POINT_EXPR,
8387 TREE_TYPE (arg01), arg01))));
8389 if (TREE_CONSTANT (arg01))
8390 return fold (build (code0, type,
8391 fold (build1 (CLEANUP_POINT_EXPR,
8392 TREE_TYPE (arg00), arg00)),
8400 /* Check for a built-in function. */
8401 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8402 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8404 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8406 tree tmp = fold_builtin (expr);
8414 } /* switch (code) */
8417 #ifdef ENABLE_FOLD_CHECKING
8420 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8421 static void fold_check_failed (tree, tree);
8422 void print_fold_checksum (tree);
8424 /* When --enable-checking=fold, compute a digest of expr before
8425 and after actual fold call to see if fold did not accidentally
8426 change original expr. */
8433 unsigned char checksum_before[16], checksum_after[16];
8436 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8437 md5_init_ctx (&ctx);
8438 fold_checksum_tree (expr, &ctx, ht);
8439 md5_finish_ctx (&ctx, checksum_before);
8442 ret = fold_1 (expr);
8444 md5_init_ctx (&ctx);
8445 fold_checksum_tree (expr, &ctx, ht);
8446 md5_finish_ctx (&ctx, checksum_after);
8449 if (memcmp (checksum_before, checksum_after, 16))
8450 fold_check_failed (expr, ret);
8456 print_fold_checksum (tree expr)
8459 unsigned char checksum[16], cnt;
8462 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8463 md5_init_ctx (&ctx);
8464 fold_checksum_tree (expr, &ctx, ht);
8465 md5_finish_ctx (&ctx, checksum);
8467 for (cnt = 0; cnt < 16; ++cnt)
8468 fprintf (stderr, "%02x", checksum[cnt]);
8469 putc ('\n', stderr);
8473 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8475 internal_error ("fold check: original tree changed by fold");
8479 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8482 enum tree_code code;
8483 char buf[sizeof (struct tree_decl)];
8486 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8487 > sizeof (struct tree_decl)
8488 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8492 slot = htab_find_slot (ht, expr, INSERT);
8496 code = TREE_CODE (expr);
8497 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8499 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8500 memcpy (buf, expr, tree_size (expr));
8502 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8504 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8506 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8507 memcpy (buf, expr, tree_size (expr));
8509 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8511 else if (TREE_CODE_CLASS (code) == 't'
8512 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8514 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8515 memcpy (buf, expr, tree_size (expr));
8517 TYPE_POINTER_TO (expr) = NULL;
8518 TYPE_REFERENCE_TO (expr) = NULL;
8520 md5_process_bytes (expr, tree_size (expr), ctx);
8521 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8522 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8523 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8524 len = TREE_CODE_LENGTH (code);
8525 switch (TREE_CODE_CLASS (code))
8531 md5_process_bytes (TREE_STRING_POINTER (expr),
8532 TREE_STRING_LENGTH (expr), ctx);
8535 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8536 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8539 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8549 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8550 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8553 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8554 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8563 case SAVE_EXPR: len = 2; break;
8564 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8565 case RTL_EXPR: len = 0; break;
8566 case WITH_CLEANUP_EXPR: len = 2; break;
8575 for (i = 0; i < len; ++i)
8576 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8579 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8580 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8581 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8582 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8583 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8584 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8585 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8586 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8587 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8588 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8589 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8592 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8593 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8594 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8595 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8596 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8597 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8598 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8599 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8600 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8601 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8610 /* Perform constant folding and related simplification of initializer
8611 expression EXPR. This behaves identically to "fold" but ignores
8612 potential run-time traps and exceptions that fold must preserve. */
8615 fold_initializer (tree expr)
8617 int saved_signaling_nans = flag_signaling_nans;
8618 int saved_trapping_math = flag_trapping_math;
8619 int saved_trapv = flag_trapv;
8622 flag_signaling_nans = 0;
8623 flag_trapping_math = 0;
8626 result = fold (expr);
8628 flag_signaling_nans = saved_signaling_nans;
8629 flag_trapping_math = saved_trapping_math;
8630 flag_trapv = saved_trapv;
8635 /* Determine if first argument is a multiple of second argument. Return 0 if
8636 it is not, or we cannot easily determined it to be.
8638 An example of the sort of thing we care about (at this point; this routine
8639 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8640 fold cases do now) is discovering that
8642 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8648 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8650 This code also handles discovering that
8652 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8654 is a multiple of 8 so we don't have to worry about dealing with a
8657 Note that we *look* inside a SAVE_EXPR only to determine how it was
8658 calculated; it is not safe for fold to do much of anything else with the
8659 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8660 at run time. For example, the latter example above *cannot* be implemented
8661 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8662 evaluation time of the original SAVE_EXPR is not necessarily the same at
8663 the time the new expression is evaluated. The only optimization of this
8664 sort that would be valid is changing
8666 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8670 SAVE_EXPR (I) * SAVE_EXPR (J)
8672 (where the same SAVE_EXPR (J) is used in the original and the
8673 transformed version). */
8676 multiple_of_p (tree type, tree top, tree bottom)
8678 if (operand_equal_p (top, bottom, 0))
8681 if (TREE_CODE (type) != INTEGER_TYPE)
8684 switch (TREE_CODE (top))
8687 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8688 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8692 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8693 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8696 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8700 op1 = TREE_OPERAND (top, 1);
8701 /* const_binop may not detect overflow correctly,
8702 so check for it explicitly here. */
8703 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8704 > TREE_INT_CST_LOW (op1)
8705 && TREE_INT_CST_HIGH (op1) == 0
8706 && 0 != (t1 = fold_convert (type,
8707 const_binop (LSHIFT_EXPR,
8710 && ! TREE_OVERFLOW (t1))
8711 return multiple_of_p (type, t1, bottom);
8716 /* Can't handle conversions from non-integral or wider integral type. */
8717 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8718 || (TYPE_PRECISION (type)
8719 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8722 /* .. fall through ... */
8725 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8728 if (TREE_CODE (bottom) != INTEGER_CST
8729 || (TREE_UNSIGNED (type)
8730 && (tree_int_cst_sgn (top) < 0
8731 || tree_int_cst_sgn (bottom) < 0)))
8733 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8741 /* Return true if `t' is known to be non-negative. */
8744 tree_expr_nonnegative_p (tree t)
8746 switch (TREE_CODE (t))
8752 return tree_int_cst_sgn (t) >= 0;
8755 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8758 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8759 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8760 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8762 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8763 both unsigned and at least 2 bits shorter than the result. */
8764 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8765 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8766 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8768 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8769 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8770 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8771 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8773 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8774 TYPE_PRECISION (inner2)) + 1;
8775 return prec < TYPE_PRECISION (TREE_TYPE (t));
8781 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8783 /* x * x for floating point x is always non-negative. */
8784 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8786 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8787 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8790 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8791 both unsigned and their total bits is shorter than the result. */
8792 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8793 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8794 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8796 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8797 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8798 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8799 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8800 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8801 < TYPE_PRECISION (TREE_TYPE (t));
8805 case TRUNC_DIV_EXPR:
8807 case FLOOR_DIV_EXPR:
8808 case ROUND_DIV_EXPR:
8809 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8810 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8812 case TRUNC_MOD_EXPR:
8814 case FLOOR_MOD_EXPR:
8815 case ROUND_MOD_EXPR:
8816 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8819 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8820 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8824 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8825 tree outer_type = TREE_TYPE (t);
8827 if (TREE_CODE (outer_type) == REAL_TYPE)
8829 if (TREE_CODE (inner_type) == REAL_TYPE)
8830 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8831 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8833 if (TREE_UNSIGNED (inner_type))
8835 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8838 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8840 if (TREE_CODE (inner_type) == REAL_TYPE)
8841 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8842 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8843 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8844 && TREE_UNSIGNED (inner_type);
8850 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8851 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8853 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8855 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8856 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8858 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8859 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8861 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8865 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8866 case NON_LVALUE_EXPR:
8867 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8871 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8875 tree fndecl = get_callee_fndecl (t);
8876 tree arglist = TREE_OPERAND (t, 1);
8878 && DECL_BUILT_IN (fndecl)
8879 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8880 switch (DECL_FUNCTION_CODE (fndecl))
8882 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8883 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8884 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8885 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8887 CASE_BUILTIN_F (BUILT_IN_ACOS)
8888 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8889 CASE_BUILTIN_F (BUILT_IN_CABS)
8890 CASE_BUILTIN_F (BUILT_IN_COSH)
8891 CASE_BUILTIN_F (BUILT_IN_ERFC)
8892 CASE_BUILTIN_F (BUILT_IN_EXP)
8893 CASE_BUILTIN_F (BUILT_IN_EXP10)
8894 CASE_BUILTIN_F (BUILT_IN_EXP2)
8895 CASE_BUILTIN_F (BUILT_IN_FABS)
8896 CASE_BUILTIN_F (BUILT_IN_FDIM)
8897 CASE_BUILTIN_F (BUILT_IN_FREXP)
8898 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8899 CASE_BUILTIN_F (BUILT_IN_POW10)
8900 CASE_BUILTIN_F (BUILT_IN_SQRT)
8901 CASE_BUILTIN_I (BUILT_IN_FFS)
8902 CASE_BUILTIN_I (BUILT_IN_PARITY)
8903 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8907 CASE_BUILTIN_F (BUILT_IN_ASINH)
8908 CASE_BUILTIN_F (BUILT_IN_ATAN)
8909 CASE_BUILTIN_F (BUILT_IN_ATANH)
8910 CASE_BUILTIN_F (BUILT_IN_CBRT)
8911 CASE_BUILTIN_F (BUILT_IN_CEIL)
8912 CASE_BUILTIN_F (BUILT_IN_ERF)
8913 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8914 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8915 CASE_BUILTIN_F (BUILT_IN_FMOD)
8916 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8917 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8918 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8919 CASE_BUILTIN_F (BUILT_IN_LRINT)
8920 CASE_BUILTIN_F (BUILT_IN_LROUND)
8921 CASE_BUILTIN_F (BUILT_IN_MODF)
8922 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8923 CASE_BUILTIN_F (BUILT_IN_POW)
8924 CASE_BUILTIN_F (BUILT_IN_RINT)
8925 CASE_BUILTIN_F (BUILT_IN_ROUND)
8926 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8927 CASE_BUILTIN_F (BUILT_IN_SINH)
8928 CASE_BUILTIN_F (BUILT_IN_TANH)
8929 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8930 /* True if the 1st argument is nonnegative. */
8931 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8933 CASE_BUILTIN_F(BUILT_IN_FMAX)
8934 /* True if the 1st OR 2nd arguments are nonnegative. */
8935 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8936 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8938 CASE_BUILTIN_F(BUILT_IN_FMIN)
8939 /* True if the 1st AND 2nd arguments are nonnegative. */
8940 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8941 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8943 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8944 /* True if the 2nd argument is nonnegative. */
8945 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8949 #undef CASE_BUILTIN_F
8950 #undef CASE_BUILTIN_I
8954 /* ... fall through ... */
8957 if (truth_value_p (TREE_CODE (t)))
8958 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8962 /* We don't know sign of `t', so be conservative and return false. */
8966 /* Return true if `r' is known to be non-negative.
8967 Only handles constants at the moment. */
8970 rtl_expr_nonnegative_p (rtx r)
8972 switch (GET_CODE (r))
8975 return INTVAL (r) >= 0;
8978 if (GET_MODE (r) == VOIDmode)
8979 return CONST_DOUBLE_HIGH (r) >= 0;
8987 units = CONST_VECTOR_NUNITS (r);
8989 for (i = 0; i < units; ++i)
8991 elt = CONST_VECTOR_ELT (r, i);
8992 if (!rtl_expr_nonnegative_p (elt))
9001 /* These are always nonnegative. */
9009 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9010 an integer constant or real constant.
9012 TYPE is the type of the result. */
9015 fold_negate_const (tree arg0, tree type)
9019 if (TREE_CODE (arg0) == INTEGER_CST)
9021 unsigned HOST_WIDE_INT low;
9023 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9024 TREE_INT_CST_HIGH (arg0),
9026 t = build_int_2 (low, high);
9027 TREE_TYPE (t) = type;
9029 = (TREE_OVERFLOW (arg0)
9030 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
9031 TREE_CONSTANT_OVERFLOW (t)
9032 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9034 else if (TREE_CODE (arg0) == REAL_CST)
9035 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9036 #ifdef ENABLE_CHECKING
9044 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9045 an integer constant or real constant.
9047 TYPE is the type of the result. */
9050 fold_abs_const (tree arg0, tree type)
9054 if (TREE_CODE (arg0) == INTEGER_CST)
9056 /* If the value is unsigned, then the absolute value is
9057 the same as the ordinary value. */
9058 if (TREE_UNSIGNED (type))
9060 /* Similarly, if the value is non-negative. */
9061 else if (INT_CST_LT (integer_minus_one_node, arg0))
9063 /* If the value is negative, then the absolute value is
9067 unsigned HOST_WIDE_INT low;
9069 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9070 TREE_INT_CST_HIGH (arg0),
9072 t = build_int_2 (low, high);
9073 TREE_TYPE (t) = type;
9075 = (TREE_OVERFLOW (arg0)
9076 | force_fit_type (t, overflow));
9077 TREE_CONSTANT_OVERFLOW (t)
9078 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9082 else if (TREE_CODE (arg0) == REAL_CST)
9084 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9085 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9089 #ifdef ENABLE_CHECKING
9097 #include "gt-fold-const.h"