1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* Make the integer constant T valid for its type by setting to 0 or 1 all
188 the bits in the constant that don't belong in the type.
190 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
191 nonzero, a signed overflow has already occurred in calculating T, so
195 force_fit_type (tree t, int overflow)
197 unsigned HOST_WIDE_INT low;
201 if (TREE_CODE (t) == REAL_CST)
203 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
204 Consider doing it via real_convert now. */
208 else if (TREE_CODE (t) != INTEGER_CST)
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* First clear all bits that are beyond the type's precision. */
222 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 TREE_INT_CST_HIGH (t)
226 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 TREE_INT_CST_HIGH (t) = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
234 /* Unsigned types do not suffer sign extension or overflow unless they
236 if (TYPE_UNSIGNED (TREE_TYPE (t))
237 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
238 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
241 /* If the value's sign bit is set, extend the sign. */
242 if (prec != 2 * HOST_BITS_PER_WIDE_INT
243 && (prec > HOST_BITS_PER_WIDE_INT
244 ? 0 != (TREE_INT_CST_HIGH (t)
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 : 0 != (TREE_INT_CST_LOW (t)
248 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
250 /* Value is negative:
251 set to 1 all the bits that are outside this type's precision. */
252 if (prec > HOST_BITS_PER_WIDE_INT)
253 TREE_INT_CST_HIGH (t)
254 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
257 TREE_INT_CST_HIGH (t) = -1;
258 if (prec < HOST_BITS_PER_WIDE_INT)
259 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
263 /* Return nonzero if signed overflow occurred. */
265 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
269 /* Add two doubleword integers with doubleword result.
270 Each argument is given as two `HOST_WIDE_INT' pieces.
271 One argument is L1 and H1; the other, L2 and H2.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
275 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
276 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
277 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
279 unsigned HOST_WIDE_INT l;
283 h = h1 + h2 + (l < l1);
287 return OVERFLOW_SUM_SIGN (h1, h2, h);
290 /* Negate a doubleword integer with doubleword result.
291 Return nonzero if the operation overflows, assuming it's signed.
292 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
293 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
296 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 return (*hv & h1) < 0;
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
324 HOST_WIDE_INT arg1[4];
325 HOST_WIDE_INT arg2[4];
326 HOST_WIDE_INT prod[4 * 2];
327 unsigned HOST_WIDE_INT carry;
329 unsigned HOST_WIDE_INT toplow, neglow;
330 HOST_WIDE_INT tophigh, neghigh;
332 encode (arg1, l1, h1);
333 encode (arg2, l2, h2);
335 memset (prod, 0, sizeof prod);
337 for (i = 0; i < 4; i++)
340 for (j = 0; j < 4; j++)
343 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
344 carry += arg1[i] * arg2[j];
345 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
347 prod[k] = LOWPART (carry);
348 carry = HIGHPART (carry);
353 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
355 /* Check for overflow by calculating the top half of the answer in full;
356 it should agree with the low half's sign bit. */
357 decode (prod + 4, &toplow, &tophigh);
360 neg_double (l2, h2, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 neg_double (l1, h1, &neglow, &neghigh);
366 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
368 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
371 /* Shift the doubleword integer in L1, H1 left by COUNT places
372 keeping only PREC bits of result.
373 Shift right if COUNT is negative.
374 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
375 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
378 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
379 HOST_WIDE_INT count, unsigned int prec,
380 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
382 unsigned HOST_WIDE_INT signmask;
386 rshift_double (l1, h1, -count, prec, lv, hv, arith);
390 if (SHIFT_COUNT_TRUNCATED)
393 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
395 /* Shifting by the host word size is undefined according to the
396 ANSI standard, so we must handle this as a special case. */
400 else if (count >= HOST_BITS_PER_WIDE_INT)
402 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
408 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
412 /* Sign extend all bits that are beyond the precision. */
414 signmask = -((prec > HOST_BITS_PER_WIDE_INT
415 ? ((unsigned HOST_WIDE_INT) *hv
416 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
417 : (*lv >> (prec - 1))) & 1);
419 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
421 else if (prec >= HOST_BITS_PER_WIDE_INT)
423 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
424 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
430 *lv |= signmask << prec;
434 /* Shift the doubleword integer in L1, H1 right by COUNT places
435 keeping only PREC bits of result. COUNT must be positive.
436 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
437 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
440 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
441 HOST_WIDE_INT count, unsigned int prec,
442 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
445 unsigned HOST_WIDE_INT signmask;
448 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
451 if (SHIFT_COUNT_TRUNCATED)
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
461 else if (count >= HOST_BITS_PER_WIDE_INT)
464 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
468 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
470 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
473 /* Zero / sign extend all bits that are beyond the precision. */
475 if (count >= (HOST_WIDE_INT)prec)
480 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
491 *lv |= signmask << (prec - count);
495 /* Rotate the doubleword integer in L1, H1 left by COUNT places
496 keeping only PREC bits of result.
497 Rotate right if COUNT is negative.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
505 unsigned HOST_WIDE_INT s1l, s2l;
506 HOST_WIDE_INT s1h, s2h;
512 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
513 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
518 /* Rotate the doubleword integer in L1, H1 left by COUNT places
519 keeping only PREC bits of result. COUNT must be positive.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
523 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
534 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
540 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
541 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
542 CODE is a tree code for a kind of division, one of
543 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
545 It controls how the quotient is rounded to an integer.
546 Return nonzero if the operation overflows.
547 UNS nonzero says do unsigned division. */
550 div_and_round_double (enum tree_code code, int uns,
551 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
552 HOST_WIDE_INT hnum_orig,
553 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
554 HOST_WIDE_INT hden_orig,
555 unsigned HOST_WIDE_INT *lquo,
556 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
560 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
561 HOST_WIDE_INT den[4], quo[4];
563 unsigned HOST_WIDE_INT work;
564 unsigned HOST_WIDE_INT carry = 0;
565 unsigned HOST_WIDE_INT lnum = lnum_orig;
566 HOST_WIDE_INT hnum = hnum_orig;
567 unsigned HOST_WIDE_INT lden = lden_orig;
568 HOST_WIDE_INT hden = hden_orig;
571 if (hden == 0 && lden == 0)
572 overflow = 1, lden = 1;
574 /* Calculate quotient sign and convert operands to unsigned. */
580 /* (minimum integer) / (-1) is the only overflow case. */
581 if (neg_double (lnum, hnum, &lnum, &hnum)
582 && ((HOST_WIDE_INT) lden & hden) == -1)
588 neg_double (lden, hden, &lden, &hden);
592 if (hnum == 0 && hden == 0)
593 { /* single precision */
595 /* This unsigned division rounds toward zero. */
601 { /* trivial case: dividend < divisor */
602 /* hden != 0 already checked. */
609 memset (quo, 0, sizeof quo);
611 memset (num, 0, sizeof num); /* to zero 9th element */
612 memset (den, 0, sizeof den);
614 encode (num, lnum, hnum);
615 encode (den, lden, hden);
617 /* Special code for when the divisor < BASE. */
618 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
620 /* hnum != 0 already checked. */
621 for (i = 4 - 1; i >= 0; i--)
623 work = num[i] + carry * BASE;
624 quo[i] = work / lden;
630 /* Full double precision division,
631 with thanks to Don Knuth's "Seminumerical Algorithms". */
632 int num_hi_sig, den_hi_sig;
633 unsigned HOST_WIDE_INT quo_est, scale;
635 /* Find the highest nonzero divisor digit. */
636 for (i = 4 - 1;; i--)
643 /* Insure that the first digit of the divisor is at least BASE/2.
644 This is required by the quotient digit estimation algorithm. */
646 scale = BASE / (den[den_hi_sig] + 1);
648 { /* scale divisor and dividend */
650 for (i = 0; i <= 4 - 1; i++)
652 work = (num[i] * scale) + carry;
653 num[i] = LOWPART (work);
654 carry = HIGHPART (work);
659 for (i = 0; i <= 4 - 1; i++)
661 work = (den[i] * scale) + carry;
662 den[i] = LOWPART (work);
663 carry = HIGHPART (work);
664 if (den[i] != 0) den_hi_sig = i;
671 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
673 /* Guess the next quotient digit, quo_est, by dividing the first
674 two remaining dividend digits by the high order quotient digit.
675 quo_est is never low and is at most 2 high. */
676 unsigned HOST_WIDE_INT tmp;
678 num_hi_sig = i + den_hi_sig + 1;
679 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
680 if (num[num_hi_sig] != den[den_hi_sig])
681 quo_est = work / den[den_hi_sig];
685 /* Refine quo_est so it's usually correct, and at most one high. */
686 tmp = work - quo_est * den[den_hi_sig];
688 && (den[den_hi_sig - 1] * quo_est
689 > (tmp * BASE + num[num_hi_sig - 2])))
692 /* Try QUO_EST as the quotient digit, by multiplying the
693 divisor by QUO_EST and subtracting from the remaining dividend.
694 Keep in mind that QUO_EST is the I - 1st digit. */
697 for (j = 0; j <= den_hi_sig; j++)
699 work = quo_est * den[j] + carry;
700 carry = HIGHPART (work);
701 work = num[i + j] - LOWPART (work);
702 num[i + j] = LOWPART (work);
703 carry += HIGHPART (work) != 0;
706 /* If quo_est was high by one, then num[i] went negative and
707 we need to correct things. */
708 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
711 carry = 0; /* add divisor back in */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = num[i + j] + den[j] + carry;
715 carry = HIGHPART (work);
716 num[i + j] = LOWPART (work);
719 num [num_hi_sig] += carry;
722 /* Store the quotient digit. */
727 decode (quo, lquo, hquo);
730 /* If result is negative, make it so. */
732 neg_double (*lquo, *hquo, lquo, hquo);
734 /* Compute trial remainder: rem = num - (quo * den) */
735 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
736 neg_double (*lrem, *hrem, lrem, hrem);
737 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
742 case TRUNC_MOD_EXPR: /* round toward zero */
743 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
747 case FLOOR_MOD_EXPR: /* round toward negative infinity */
748 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
759 case CEIL_MOD_EXPR: /* round toward positive infinity */
760 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
762 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
770 case ROUND_MOD_EXPR: /* round to closest integer */
772 unsigned HOST_WIDE_INT labs_rem = *lrem;
773 HOST_WIDE_INT habs_rem = *hrem;
774 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
775 HOST_WIDE_INT habs_den = hden, htwice;
777 /* Get absolute values. */
779 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
781 neg_double (lden, hden, &labs_den, &habs_den);
783 /* If (2 * abs (lrem) >= abs (lden)) */
784 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
785 labs_rem, habs_rem, <wice, &htwice);
787 if (((unsigned HOST_WIDE_INT) habs_den
788 < (unsigned HOST_WIDE_INT) htwice)
789 || (((unsigned HOST_WIDE_INT) habs_den
790 == (unsigned HOST_WIDE_INT) htwice)
791 && (labs_den < ltwice)))
795 add_double (*lquo, *hquo,
796 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
811 /* Compute true remainder: rem = num - (quo * den) */
812 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
813 neg_double (*lrem, *hrem, lrem, hrem);
814 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
818 /* Return true if built-in mathematical function specified by CODE
819 preserves the sign of it argument, i.e. -f(x) == f(-x). */
822 negate_mathfn_p (enum built_in_function code)
846 /* Determine whether an expression T can be cheaply negated using
847 the function negate_expr. */
850 negate_expr_p (tree t)
852 unsigned HOST_WIDE_INT val;
859 type = TREE_TYPE (t);
862 switch (TREE_CODE (t))
865 if (TYPE_UNSIGNED (type) || ! flag_trapv)
868 /* Check that -CST will not overflow type. */
869 prec = TYPE_PRECISION (type);
870 if (prec > HOST_BITS_PER_WIDE_INT)
872 if (TREE_INT_CST_LOW (t) != 0)
874 prec -= HOST_BITS_PER_WIDE_INT;
875 val = TREE_INT_CST_HIGH (t);
878 val = TREE_INT_CST_LOW (t);
879 if (prec < HOST_BITS_PER_WIDE_INT)
880 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
881 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
888 return negate_expr_p (TREE_REALPART (t))
889 && negate_expr_p (TREE_IMAGPART (t));
892 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
894 /* -(A + B) -> (-B) - A. */
895 if (negate_expr_p (TREE_OPERAND (t, 1))
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1)))
899 /* -(A + B) -> (-A) - B. */
900 return negate_expr_p (TREE_OPERAND (t, 0));
903 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
904 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 && reorder_operands_p (TREE_OPERAND (t, 0),
906 TREE_OPERAND (t, 1));
909 if (TYPE_UNSIGNED (TREE_TYPE (t)))
915 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
916 return negate_expr_p (TREE_OPERAND (t, 1))
917 || negate_expr_p (TREE_OPERAND (t, 0));
921 /* Negate -((double)float) as (double)(-float). */
922 if (TREE_CODE (type) == REAL_TYPE)
924 tree tem = strip_float_extensions (t);
926 return negate_expr_p (tem);
931 /* Negate -f(x) as f(-x). */
932 if (negate_mathfn_p (builtin_mathfn_code (t)))
933 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
937 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
938 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
940 tree op1 = TREE_OPERAND (t, 1);
941 if (TREE_INT_CST_HIGH (op1) == 0
942 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
943 == TREE_INT_CST_LOW (op1))
954 /* Given T, an expression, return the negation of T. Allow for T to be
955 null, in which case return null. */
966 type = TREE_TYPE (t);
969 switch (TREE_CODE (t))
972 tem = fold_negate_const (t, type);
973 if (! TREE_OVERFLOW (tem)
974 || TYPE_UNSIGNED (type)
980 tem = fold_negate_const (t, type);
981 /* Two's complement FP formats, such as c4x, may overflow. */
982 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
983 return fold_convert (type, tem);
988 tree rpart = negate_expr (TREE_REALPART (t));
989 tree ipart = negate_expr (TREE_IMAGPART (t));
991 if ((TREE_CODE (rpart) == REAL_CST
992 && TREE_CODE (ipart) == REAL_CST)
993 || (TREE_CODE (rpart) == INTEGER_CST
994 && TREE_CODE (ipart) == INTEGER_CST))
995 return build_complex (type, rpart, ipart);
1000 return fold_convert (type, TREE_OPERAND (t, 0));
1003 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1005 /* -(A + B) -> (-B) - A. */
1006 if (negate_expr_p (TREE_OPERAND (t, 1))
1007 && reorder_operands_p (TREE_OPERAND (t, 0),
1008 TREE_OPERAND (t, 1)))
1010 tem = negate_expr (TREE_OPERAND (t, 1));
1011 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1012 tem, TREE_OPERAND (t, 0)));
1013 return fold_convert (type, tem);
1016 /* -(A + B) -> (-A) - B. */
1017 if (negate_expr_p (TREE_OPERAND (t, 0)))
1019 tem = negate_expr (TREE_OPERAND (t, 0));
1020 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1021 tem, TREE_OPERAND (t, 1)));
1022 return fold_convert (type, tem);
1028 /* - (A - B) -> B - A */
1029 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1030 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1031 return fold_convert (type,
1032 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1033 TREE_OPERAND (t, 1),
1034 TREE_OPERAND (t, 0))));
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1046 tem = TREE_OPERAND (t, 1);
1047 if (negate_expr_p (tem))
1048 return fold_convert (type,
1049 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1050 TREE_OPERAND (t, 0),
1051 negate_expr (tem))));
1052 tem = TREE_OPERAND (t, 0);
1053 if (negate_expr_p (tem))
1054 return fold_convert (type,
1055 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1057 TREE_OPERAND (t, 1))));
1062 /* Convert -((double)float) into (double)(-float). */
1063 if (TREE_CODE (type) == REAL_TYPE)
1065 tem = strip_float_extensions (t);
1066 if (tem != t && negate_expr_p (tem))
1067 return fold_convert (type, negate_expr (tem));
1072 /* Negate -f(x) as f(-x). */
1073 if (negate_mathfn_p (builtin_mathfn_code (t))
1074 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1076 tree fndecl, arg, arglist;
1078 fndecl = get_callee_fndecl (t);
1079 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1080 arglist = build_tree_list (NULL_TREE, arg);
1081 return build_function_call_expr (fndecl, arglist);
1086 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1087 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1089 tree op1 = TREE_OPERAND (t, 1);
1090 if (TREE_INT_CST_HIGH (op1) == 0
1091 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1092 == TREE_INT_CST_LOW (op1))
1094 tree ntype = TYPE_UNSIGNED (type)
1095 ? lang_hooks.types.signed_type (type)
1096 : lang_hooks.types.unsigned_type (type);
1097 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1098 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1099 return fold_convert (type, temp);
1108 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1109 return fold_convert (type, tem);
1112 /* Split a tree IN into a constant, literal and variable parts that could be
1113 combined with CODE to make IN. "constant" means an expression with
1114 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1115 commutative arithmetic operation. Store the constant part into *CONP,
1116 the literal in *LITP and return the variable part. If a part isn't
1117 present, set it to null. If the tree does not decompose in this way,
1118 return the entire tree as the variable part and the other parts as null.
1120 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1121 case, we negate an operand that was subtracted. Except if it is a
1122 literal for which we use *MINUS_LITP instead.
1124 If NEGATE_P is true, we are negating all of IN, again except a literal
1125 for which we use *MINUS_LITP instead.
1127 If IN is itself a literal or constant, return it as appropriate.
1129 Note that we do not guarantee that any of the three values will be the
1130 same type as IN, but they will have the same signedness and mode. */
1133 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1134 tree *minus_litp, int negate_p)
1142 /* Strip any conversions that don't change the machine mode or signedness. */
1143 STRIP_SIGN_NOPS (in);
1145 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1147 else if (TREE_CODE (in) == code
1148 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1149 /* We can associate addition and subtraction together (even
1150 though the C standard doesn't say so) for integers because
1151 the value is not affected. For reals, the value might be
1152 affected, so we can't. */
1153 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1154 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1156 tree op0 = TREE_OPERAND (in, 0);
1157 tree op1 = TREE_OPERAND (in, 1);
1158 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1159 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1161 /* First see if either of the operands is a literal, then a constant. */
1162 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1163 *litp = op0, op0 = 0;
1164 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1165 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1167 if (op0 != 0 && TREE_CONSTANT (op0))
1168 *conp = op0, op0 = 0;
1169 else if (op1 != 0 && TREE_CONSTANT (op1))
1170 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1172 /* If we haven't dealt with either operand, this is not a case we can
1173 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1174 if (op0 != 0 && op1 != 0)
1179 var = op1, neg_var_p = neg1_p;
1181 /* Now do any needed negations. */
1183 *minus_litp = *litp, *litp = 0;
1185 *conp = negate_expr (*conp);
1187 var = negate_expr (var);
1189 else if (TREE_CONSTANT (in))
1197 *minus_litp = *litp, *litp = 0;
1198 else if (*minus_litp)
1199 *litp = *minus_litp, *minus_litp = 0;
1200 *conp = negate_expr (*conp);
1201 var = negate_expr (var);
1207 /* Re-associate trees split by the above function. T1 and T2 are either
1208 expressions to associate or null. Return the new expression, if any. If
1209 we build an operation, do it in TYPE and with CODE. */
1212 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1219 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1220 try to fold this since we will have infinite recursion. But do
1221 deal with any NEGATE_EXPRs. */
1222 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1223 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1225 if (code == PLUS_EXPR)
1227 if (TREE_CODE (t1) == NEGATE_EXPR)
1228 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1229 fold_convert (type, TREE_OPERAND (t1, 0)));
1230 else if (TREE_CODE (t2) == NEGATE_EXPR)
1231 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1232 fold_convert (type, TREE_OPERAND (t2, 0)));
1234 return build2 (code, type, fold_convert (type, t1),
1235 fold_convert (type, t2));
1238 return fold (build2 (code, type, fold_convert (type, t1),
1239 fold_convert (type, t2)));
1242 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1243 to produce a new constant.
1245 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1248 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1250 unsigned HOST_WIDE_INT int1l, int2l;
1251 HOST_WIDE_INT int1h, int2h;
1252 unsigned HOST_WIDE_INT low;
1254 unsigned HOST_WIDE_INT garbagel;
1255 HOST_WIDE_INT garbageh;
1257 tree type = TREE_TYPE (arg1);
1258 int uns = TYPE_UNSIGNED (type);
1260 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1262 int no_overflow = 0;
1264 int1l = TREE_INT_CST_LOW (arg1);
1265 int1h = TREE_INT_CST_HIGH (arg1);
1266 int2l = TREE_INT_CST_LOW (arg2);
1267 int2h = TREE_INT_CST_HIGH (arg2);
1272 low = int1l | int2l, hi = int1h | int2h;
1276 low = int1l ^ int2l, hi = int1h ^ int2h;
1280 low = int1l & int2l, hi = int1h & int2h;
1286 /* It's unclear from the C standard whether shifts can overflow.
1287 The following code ignores overflow; perhaps a C standard
1288 interpretation ruling is needed. */
1289 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1297 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1302 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1306 neg_double (int2l, int2h, &low, &hi);
1307 add_double (int1l, int1h, low, hi, &low, &hi);
1308 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1312 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1315 case TRUNC_DIV_EXPR:
1316 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1317 case EXACT_DIV_EXPR:
1318 /* This is a shortcut for a common special case. */
1319 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1320 && ! TREE_CONSTANT_OVERFLOW (arg1)
1321 && ! TREE_CONSTANT_OVERFLOW (arg2)
1322 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1324 if (code == CEIL_DIV_EXPR)
1327 low = int1l / int2l, hi = 0;
1331 /* ... fall through ... */
1333 case ROUND_DIV_EXPR:
1334 if (int2h == 0 && int2l == 1)
1336 low = int1l, hi = int1h;
1339 if (int1l == int2l && int1h == int2h
1340 && ! (int1l == 0 && int1h == 0))
1345 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1346 &low, &hi, &garbagel, &garbageh);
1349 case TRUNC_MOD_EXPR:
1350 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1351 /* This is a shortcut for a common special case. */
1352 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1353 && ! TREE_CONSTANT_OVERFLOW (arg1)
1354 && ! TREE_CONSTANT_OVERFLOW (arg2)
1355 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1357 if (code == CEIL_MOD_EXPR)
1359 low = int1l % int2l, hi = 0;
1363 /* ... fall through ... */
1365 case ROUND_MOD_EXPR:
1366 overflow = div_and_round_double (code, uns,
1367 int1l, int1h, int2l, int2h,
1368 &garbagel, &garbageh, &low, &hi);
1374 low = (((unsigned HOST_WIDE_INT) int1h
1375 < (unsigned HOST_WIDE_INT) int2h)
1376 || (((unsigned HOST_WIDE_INT) int1h
1377 == (unsigned HOST_WIDE_INT) int2h)
1380 low = (int1h < int2h
1381 || (int1h == int2h && int1l < int2l));
1383 if (low == (code == MIN_EXPR))
1384 low = int1l, hi = int1h;
1386 low = int2l, hi = int2h;
1393 /* If this is for a sizetype, can be represented as one (signed)
1394 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1397 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1398 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1399 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1400 return size_int_type_wide (low, type);
1403 t = build_int_2 (low, hi);
1404 TREE_TYPE (t) = TREE_TYPE (arg1);
1409 ? (!uns || is_sizetype) && overflow
1410 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1412 | TREE_OVERFLOW (arg1)
1413 | TREE_OVERFLOW (arg2));
1415 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1416 So check if force_fit_type truncated the value. */
1418 && ! TREE_OVERFLOW (t)
1419 && (TREE_INT_CST_HIGH (t) != hi
1420 || TREE_INT_CST_LOW (t) != low))
1421 TREE_OVERFLOW (t) = 1;
1423 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1424 | TREE_CONSTANT_OVERFLOW (arg1)
1425 | TREE_CONSTANT_OVERFLOW (arg2));
1429 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1430 constant. We assume ARG1 and ARG2 have the same data type, or at least
1431 are the same kind of constant and the same machine mode.
1433 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1436 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1441 if (TREE_CODE (arg1) == INTEGER_CST)
1442 return int_const_binop (code, arg1, arg2, notrunc);
1444 if (TREE_CODE (arg1) == REAL_CST)
1446 enum machine_mode mode;
1449 REAL_VALUE_TYPE value;
1452 d1 = TREE_REAL_CST (arg1);
1453 d2 = TREE_REAL_CST (arg2);
1455 type = TREE_TYPE (arg1);
1456 mode = TYPE_MODE (type);
1458 /* Don't perform operation if we honor signaling NaNs and
1459 either operand is a NaN. */
1460 if (HONOR_SNANS (mode)
1461 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1464 /* Don't perform operation if it would raise a division
1465 by zero exception. */
1466 if (code == RDIV_EXPR
1467 && REAL_VALUES_EQUAL (d2, dconst0)
1468 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1471 /* If either operand is a NaN, just return it. Otherwise, set up
1472 for floating-point trap; we return an overflow. */
1473 if (REAL_VALUE_ISNAN (d1))
1475 else if (REAL_VALUE_ISNAN (d2))
1478 REAL_ARITHMETIC (value, code, d1, d2);
1480 t = build_real (type, real_value_truncate (mode, value));
1483 = (force_fit_type (t, 0)
1484 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1485 TREE_CONSTANT_OVERFLOW (t)
1487 | TREE_CONSTANT_OVERFLOW (arg1)
1488 | TREE_CONSTANT_OVERFLOW (arg2);
1491 if (TREE_CODE (arg1) == COMPLEX_CST)
1493 tree type = TREE_TYPE (arg1);
1494 tree r1 = TREE_REALPART (arg1);
1495 tree i1 = TREE_IMAGPART (arg1);
1496 tree r2 = TREE_REALPART (arg2);
1497 tree i2 = TREE_IMAGPART (arg2);
1503 t = build_complex (type,
1504 const_binop (PLUS_EXPR, r1, r2, notrunc),
1505 const_binop (PLUS_EXPR, i1, i2, notrunc));
1509 t = build_complex (type,
1510 const_binop (MINUS_EXPR, r1, r2, notrunc),
1511 const_binop (MINUS_EXPR, i1, i2, notrunc));
1515 t = build_complex (type,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR,
1519 const_binop (MULT_EXPR,
1522 const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR,
1525 const_binop (MULT_EXPR,
1533 = const_binop (PLUS_EXPR,
1534 const_binop (MULT_EXPR, r2, r2, notrunc),
1535 const_binop (MULT_EXPR, i2, i2, notrunc),
1538 t = build_complex (type,
1540 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1541 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1542 const_binop (PLUS_EXPR,
1543 const_binop (MULT_EXPR, r1, r2,
1545 const_binop (MULT_EXPR, i1, i2,
1548 magsquared, notrunc),
1550 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1551 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1552 const_binop (MINUS_EXPR,
1553 const_binop (MULT_EXPR, i1, r2,
1555 const_binop (MULT_EXPR, r1, i2,
1558 magsquared, notrunc));
1570 /* These are the hash table functions for the hash table of INTEGER_CST
1571 nodes of a sizetype. */
1573 /* Return the hash code code X, an INTEGER_CST. */
1576 size_htab_hash (const void *x)
1580 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1581 ^ htab_hash_pointer (TREE_TYPE (t))
1582 ^ (TREE_OVERFLOW (t) << 20));
1585 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1586 is the same as that given by *Y, which is the same. */
1589 size_htab_eq (const void *x, const void *y)
1594 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1595 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1596 && TREE_TYPE (xt) == TREE_TYPE (yt)
1597 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1600 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1601 bits are given by NUMBER and of the sizetype represented by KIND. */
1604 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1606 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1609 /* Likewise, but the desired type is specified explicitly. */
1611 static GTY (()) tree new_const;
1612 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1616 size_int_type_wide (HOST_WIDE_INT number, tree type)
1622 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1623 new_const = make_node (INTEGER_CST);
1626 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1627 hash table, we return the value from the hash table. Otherwise, we
1628 place that in the hash table and make a new node for the next time. */
1629 TREE_INT_CST_LOW (new_const) = number;
1630 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1631 TREE_TYPE (new_const) = type;
1632 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1633 = force_fit_type (new_const, 0);
1635 slot = htab_find_slot (size_htab, new_const, INSERT);
1641 new_const = make_node (INTEGER_CST);
1645 return (tree) *slot;
1648 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1649 is a tree code. The type of the result is taken from the operands.
1650 Both must be the same type integer type and it must be a size type.
1651 If the operands are constant, so is the result. */
1654 size_binop (enum tree_code code, tree arg0, tree arg1)
1656 tree type = TREE_TYPE (arg0);
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1662 /* Handle the special case of two integer constants faster. */
1663 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1665 /* And some specific cases even faster than that. */
1666 if (code == PLUS_EXPR && integer_zerop (arg0))
1668 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1669 && integer_zerop (arg1))
1671 else if (code == MULT_EXPR && integer_onep (arg0))
1674 /* Handle general case of two integer constants. */
1675 return int_const_binop (code, arg0, arg1, 0);
1678 if (arg0 == error_mark_node || arg1 == error_mark_node)
1679 return error_mark_node;
1681 return fold (build2 (code, type, arg0, arg1));
1684 /* Given two values, either both of sizetype or both of bitsizetype,
1685 compute the difference between the two values. Return the value
1686 in signed type corresponding to the type of the operands. */
1689 size_diffop (tree arg0, tree arg1)
1691 tree type = TREE_TYPE (arg0);
1694 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1695 || type != TREE_TYPE (arg1))
1698 /* If the type is already signed, just do the simple thing. */
1699 if (!TYPE_UNSIGNED (type))
1700 return size_binop (MINUS_EXPR, arg0, arg1);
1702 ctype = (type == bitsizetype || type == ubitsizetype
1703 ? sbitsizetype : ssizetype);
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1727 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1728 type TYPE. If no simplification can be done return NULL_TREE. */
1731 fold_convert_const (enum tree_code code, tree type, tree arg1)
1736 if (TREE_TYPE (arg1) == type)
1739 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1741 if (TREE_CODE (arg1) == INTEGER_CST)
1743 /* If we would build a constant wider than GCC supports,
1744 leave the conversion unfolded. */
1745 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1748 /* If we are trying to make a sizetype for a small integer, use
1749 size_int to pick up cached types to reduce duplicate nodes. */
1750 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1751 && !TREE_CONSTANT_OVERFLOW (arg1)
1752 && compare_tree_int (arg1, 10000) < 0)
1753 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1755 /* Given an integer constant, make new constant with new type,
1756 appropriately sign-extended or truncated. */
1757 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1758 TREE_INT_CST_HIGH (arg1));
1759 TREE_TYPE (t) = type;
1760 /* Indicate an overflow if (1) ARG1 already overflowed,
1761 or (2) force_fit_type indicates an overflow.
1762 Tell force_fit_type that an overflow has already occurred
1763 if ARG1 is a too-large unsigned value and T is signed.
1764 But don't indicate an overflow if converting a pointer. */
1766 = ((force_fit_type (t,
1767 (TREE_INT_CST_HIGH (arg1) < 0
1768 && (TYPE_UNSIGNED (type)
1769 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1770 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1771 || TREE_OVERFLOW (arg1));
1772 TREE_CONSTANT_OVERFLOW (t)
1773 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1776 else if (TREE_CODE (arg1) == REAL_CST)
1778 /* The following code implements the floating point to integer
1779 conversion rules required by the Java Language Specification,
1780 that IEEE NaNs are mapped to zero and values that overflow
1781 the target precision saturate, i.e. values greater than
1782 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1783 are mapped to INT_MIN. These semantics are allowed by the
1784 C and C++ standards that simply state that the behavior of
1785 FP-to-integer conversion is unspecified upon overflow. */
1787 HOST_WIDE_INT high, low;
1790 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1794 case FIX_TRUNC_EXPR:
1795 real_trunc (&r, VOIDmode, &x);
1799 real_ceil (&r, VOIDmode, &x);
1802 case FIX_FLOOR_EXPR:
1803 real_floor (&r, VOIDmode, &x);
1806 case FIX_ROUND_EXPR:
1807 real_round (&r, VOIDmode, &x);
1814 /* If R is NaN, return zero and show we have an overflow. */
1815 if (REAL_VALUE_ISNAN (r))
1822 /* See if R is less than the lower bound or greater than the
1827 tree lt = TYPE_MIN_VALUE (type);
1828 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1829 if (REAL_VALUES_LESS (r, l))
1832 high = TREE_INT_CST_HIGH (lt);
1833 low = TREE_INT_CST_LOW (lt);
1839 tree ut = TYPE_MAX_VALUE (type);
1842 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1843 if (REAL_VALUES_LESS (u, r))
1846 high = TREE_INT_CST_HIGH (ut);
1847 low = TREE_INT_CST_LOW (ut);
1853 REAL_VALUE_TO_INT (&low, &high, r);
1855 t = build_int_2 (low, high);
1856 TREE_TYPE (t) = type;
1858 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1864 else if (TREE_CODE (type) == REAL_TYPE)
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return build_real_from_int_cst (type, arg1);
1868 if (TREE_CODE (arg1) == REAL_CST)
1870 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1872 /* We make a copy of ARG1 so that we don't modify an
1873 existing constant tree. */
1874 t = copy_node (arg1);
1875 TREE_TYPE (t) = type;
1879 t = build_real (type,
1880 real_value_truncate (TYPE_MODE (type),
1881 TREE_REAL_CST (arg1)));
1884 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1885 TREE_CONSTANT_OVERFLOW (t)
1886 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1897 fold_convert (tree type, tree arg)
1899 tree orig = TREE_TYPE (arg);
1905 if (TREE_CODE (arg) == ERROR_MARK
1906 || TREE_CODE (type) == ERROR_MARK
1907 || TREE_CODE (orig) == ERROR_MARK)
1908 return error_mark_node;
1910 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1911 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1912 TYPE_MAIN_VARIANT (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1916 || TREE_CODE (type) == OFFSET_TYPE)
1918 if (TREE_CODE (arg) == INTEGER_CST)
1920 tem = fold_convert_const (NOP_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1924 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1925 || TREE_CODE (orig) == OFFSET_TYPE)
1926 return fold (build1 (NOP_EXPR, type, arg));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1929 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1930 return fold_convert (type, tem);
1932 if (TREE_CODE (orig) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type))
1934 == GET_MODE_SIZE (TYPE_MODE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1937 else if (TREE_CODE (type) == REAL_TYPE)
1939 if (TREE_CODE (arg) == INTEGER_CST)
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1945 else if (TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1952 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 return fold (build1 (FLOAT_EXPR, type, arg));
1954 if (TREE_CODE (orig) == REAL_TYPE)
1955 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1963 else if (TREE_CODE (type) == COMPLEX_TYPE)
1965 if (INTEGRAL_TYPE_P (orig)
1966 || POINTER_TYPE_P (orig)
1967 || TREE_CODE (orig) == REAL_TYPE)
1968 return build2 (COMPLEX_EXPR, type,
1969 fold_convert (TREE_TYPE (type), arg),
1970 fold_convert (TREE_TYPE (type), integer_zero_node));
1971 if (TREE_CODE (orig) == COMPLEX_TYPE)
1975 if (TREE_CODE (arg) == COMPLEX_EXPR)
1977 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1978 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1982 arg = save_expr (arg);
1983 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1984 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1985 rpart = fold_convert (TREE_TYPE (type), rpart);
1986 ipart = fold_convert (TREE_TYPE (type), ipart);
1987 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1990 else if (TREE_CODE (type) == VECTOR_TYPE)
1992 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1993 && GET_MODE_SIZE (TYPE_MODE (type))
1994 == GET_MODE_SIZE (TYPE_MODE (orig)))
1995 return fold (build1 (NOP_EXPR, type, arg));
1996 if (TREE_CODE (orig) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type))
1998 == GET_MODE_SIZE (TYPE_MODE (orig)))
1999 return fold (build1 (NOP_EXPR, type, arg));
2001 else if (VOID_TYPE_P (type))
2002 return fold (build1 (CONVERT_EXPR, type, arg));
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x))
2024 case ARRAY_RANGE_REF:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2123 swap_tree_comparison (enum tree_code code)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2170 return COMPCODE_UNLT;
2172 return COMPCODE_UNEQ;
2174 return COMPCODE_UNLE;
2176 return COMPCODE_UNGT;
2178 return COMPCODE_LTGT;
2180 return COMPCODE_UNGE;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == '<'
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 if (flags & OEP_ONLY_CONST)
2445 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2448 /* Two conversions are equal only if signedness and modes match. */
2449 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2450 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2454 return operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 0), flags);
2459 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2460 TREE_OPERAND (arg1, 0), flags)
2461 && operand_equal_p (TREE_OPERAND (arg0, 1),
2462 TREE_OPERAND (arg1, 1), flags))
2465 /* For commutative ops, allow the other order. */
2466 return (commutative_tree_code (TREE_CODE (arg0))
2467 && operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 0), flags));
2473 /* If either of the pointer (or reference) expressions we are
2474 dereferencing contain a side effect, these cannot be equal. */
2475 if (TREE_SIDE_EFFECTS (arg0)
2476 || TREE_SIDE_EFFECTS (arg1))
2479 switch (TREE_CODE (arg0))
2482 return operand_equal_p (TREE_OPERAND (arg0, 0),
2483 TREE_OPERAND (arg1, 0), flags);
2487 case ARRAY_RANGE_REF:
2488 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 0), flags)
2490 && operand_equal_p (TREE_OPERAND (arg0, 1),
2491 TREE_OPERAND (arg1, 1), flags));
2494 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2495 TREE_OPERAND (arg1, 0), flags)
2496 && operand_equal_p (TREE_OPERAND (arg0, 1),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 2),
2499 TREE_OPERAND (arg1, 2), flags));
2505 switch (TREE_CODE (arg0))
2508 case TRUTH_NOT_EXPR:
2509 return operand_equal_p (TREE_OPERAND (arg0, 0),
2510 TREE_OPERAND (arg1, 0), flags);
2512 case TRUTH_ANDIF_EXPR:
2513 case TRUTH_ORIF_EXPR:
2514 return operand_equal_p (TREE_OPERAND (arg0, 0),
2515 TREE_OPERAND (arg1, 0), flags)
2516 && operand_equal_p (TREE_OPERAND (arg0, 1),
2517 TREE_OPERAND (arg1, 1), flags);
2519 case TRUTH_AND_EXPR:
2521 case TRUTH_XOR_EXPR:
2522 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2523 TREE_OPERAND (arg1, 0), flags)
2524 && operand_equal_p (TREE_OPERAND (arg0, 1),
2525 TREE_OPERAND (arg1, 1), flags))
2526 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2527 TREE_OPERAND (arg1, 1), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 1),
2529 TREE_OPERAND (arg1, 0), flags));
2532 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2535 /* If the CALL_EXPRs call different functions, then they
2536 clearly can not be equal. */
2537 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2538 TREE_OPERAND (arg1, 0), flags))
2542 unsigned int cef = call_expr_flags (arg0);
2543 if (flags & OEP_PURE_SAME)
2544 cef &= ECF_CONST | ECF_PURE;
2551 /* Now see if all the arguments are the same. operand_equal_p
2552 does not handle TREE_LIST, so we walk the operands here
2553 feeding them to operand_equal_p. */
2554 arg0 = TREE_OPERAND (arg0, 1);
2555 arg1 = TREE_OPERAND (arg1, 1);
2556 while (arg0 && arg1)
2558 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2562 arg0 = TREE_CHAIN (arg0);
2563 arg1 = TREE_CHAIN (arg1);
2566 /* If we get here and both argument lists are exhausted
2567 then the CALL_EXPRs are equal. */
2568 return ! (arg0 || arg1);
2575 /* Consider __builtin_sqrt equal to sqrt. */
2576 return (TREE_CODE (arg0) == FUNCTION_DECL
2577 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2578 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2579 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2586 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2587 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2589 When in doubt, return 0. */
2592 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2594 int unsignedp1, unsignedpo;
2595 tree primarg0, primarg1, primother;
2596 unsigned int correct_width;
2598 if (operand_equal_p (arg0, arg1, 0))
2601 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2602 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2605 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2606 and see if the inner values are the same. This removes any
2607 signedness comparison, which doesn't matter here. */
2608 primarg0 = arg0, primarg1 = arg1;
2609 STRIP_NOPS (primarg0);
2610 STRIP_NOPS (primarg1);
2611 if (operand_equal_p (primarg0, primarg1, 0))
2614 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2615 actual comparison operand, ARG0.
2617 First throw away any conversions to wider types
2618 already present in the operands. */
2620 primarg1 = get_narrower (arg1, &unsignedp1);
2621 primother = get_narrower (other, &unsignedpo);
2623 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2624 if (unsignedp1 == unsignedpo
2625 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2626 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2628 tree type = TREE_TYPE (arg0);
2630 /* Make sure shorter operand is extended the right way
2631 to match the longer operand. */
2632 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2633 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2635 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2642 /* See if ARG is an expression that is either a comparison or is performing
2643 arithmetic on comparisons. The comparisons must only be comparing
2644 two different values, which will be stored in *CVAL1 and *CVAL2; if
2645 they are nonzero it means that some operands have already been found.
2646 No variables may be used anywhere else in the expression except in the
2647 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2648 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2650 If this is true, return 1. Otherwise, return zero. */
2653 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2655 enum tree_code code = TREE_CODE (arg);
2656 char class = TREE_CODE_CLASS (code);
2658 /* We can handle some of the 'e' cases here. */
2659 if (class == 'e' && code == TRUTH_NOT_EXPR)
2661 else if (class == 'e'
2662 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2663 || code == COMPOUND_EXPR))
2666 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2667 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2669 /* If we've already found a CVAL1 or CVAL2, this expression is
2670 two complex to handle. */
2671 if (*cval1 || *cval2)
2681 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2684 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2685 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2686 cval1, cval2, save_p));
2692 if (code == COND_EXPR)
2693 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2694 cval1, cval2, save_p)
2695 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2696 cval1, cval2, save_p)
2697 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2698 cval1, cval2, save_p));
2702 /* First see if we can handle the first operand, then the second. For
2703 the second operand, we know *CVAL1 can't be zero. It must be that
2704 one side of the comparison is each of the values; test for the
2705 case where this isn't true by failing if the two operands
2708 if (operand_equal_p (TREE_OPERAND (arg, 0),
2709 TREE_OPERAND (arg, 1), 0))
2713 *cval1 = TREE_OPERAND (arg, 0);
2714 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2716 else if (*cval2 == 0)
2717 *cval2 = TREE_OPERAND (arg, 0);
2718 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2723 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2725 else if (*cval2 == 0)
2726 *cval2 = TREE_OPERAND (arg, 1);
2727 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2739 /* ARG is a tree that is known to contain just arithmetic operations and
2740 comparisons. Evaluate the operations in the tree substituting NEW0 for
2741 any occurrence of OLD0 as an operand of a comparison and likewise for
2745 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2747 tree type = TREE_TYPE (arg);
2748 enum tree_code code = TREE_CODE (arg);
2749 char class = TREE_CODE_CLASS (code);
2751 /* We can handle some of the 'e' cases here. */
2752 if (class == 'e' && code == TRUTH_NOT_EXPR)
2754 else if (class == 'e'
2755 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2761 return fold (build1 (code, type,
2762 eval_subst (TREE_OPERAND (arg, 0),
2763 old0, new0, old1, new1)));
2766 return fold (build2 (code, type,
2767 eval_subst (TREE_OPERAND (arg, 0),
2768 old0, new0, old1, new1),
2769 eval_subst (TREE_OPERAND (arg, 1),
2770 old0, new0, old1, new1)));
2776 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2779 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2782 return fold (build3 (code, type,
2783 eval_subst (TREE_OPERAND (arg, 0),
2784 old0, new0, old1, new1),
2785 eval_subst (TREE_OPERAND (arg, 1),
2786 old0, new0, old1, new1),
2787 eval_subst (TREE_OPERAND (arg, 2),
2788 old0, new0, old1, new1)));
2792 /* Fall through - ??? */
2796 tree arg0 = TREE_OPERAND (arg, 0);
2797 tree arg1 = TREE_OPERAND (arg, 1);
2799 /* We need to check both for exact equality and tree equality. The
2800 former will be true if the operand has a side-effect. In that
2801 case, we know the operand occurred exactly once. */
2803 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2805 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2808 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2810 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2813 return fold (build2 (code, type, arg0, arg1));
2821 /* Return a tree for the case when the result of an expression is RESULT
2822 converted to TYPE and OMITTED was previously an operand of the expression
2823 but is now not needed (e.g., we folded OMITTED * 0).
2825 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2826 the conversion of RESULT to TYPE. */
2829 omit_one_operand (tree type, tree result, tree omitted)
2831 tree t = fold_convert (type, result);
2833 if (TREE_SIDE_EFFECTS (omitted))
2834 return build2 (COMPOUND_EXPR, type, omitted, t);
2836 return non_lvalue (t);
2839 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2842 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2844 tree t = fold_convert (type, result);
2846 if (TREE_SIDE_EFFECTS (omitted))
2847 return build2 (COMPOUND_EXPR, type, omitted, t);
2849 return pedantic_non_lvalue (t);
2852 /* Return a tree for the case when the result of an expression is RESULT
2853 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2854 of the expression but are now not needed.
2856 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2857 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2858 evaluated before OMITTED2. Otherwise, if neither has side effects,
2859 just do the conversion of RESULT to TYPE. */
2862 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2864 tree t = fold_convert (type, result);
2866 if (TREE_SIDE_EFFECTS (omitted2))
2867 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2868 if (TREE_SIDE_EFFECTS (omitted1))
2869 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2871 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2875 /* Return a simplified tree node for the truth-negation of ARG. This
2876 never alters ARG itself. We assume that ARG is an operation that
2877 returns a truth value (0 or 1).
2879 FIXME: one would think we would fold the result, but it causes
2880 problems with the dominator optimizer. */
2882 invert_truthvalue (tree arg)
2884 tree type = TREE_TYPE (arg);
2885 enum tree_code code = TREE_CODE (arg);
2887 if (code == ERROR_MARK)
2890 /* If this is a comparison, we can simply invert it, except for
2891 floating-point non-equality comparisons, in which case we just
2892 enclose a TRUTH_NOT_EXPR around what we have. */
2894 if (TREE_CODE_CLASS (code) == '<')
2896 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2897 if (FLOAT_TYPE_P (op_type)
2898 && flag_trapping_math
2899 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2900 && code != NE_EXPR && code != EQ_EXPR)
2901 return build1 (TRUTH_NOT_EXPR, type, arg);
2904 code = invert_tree_comparison (code,
2905 HONOR_NANS (TYPE_MODE (op_type)));
2906 if (code == ERROR_MARK)
2907 return build1 (TRUTH_NOT_EXPR, type, arg);
2909 return build2 (code, type,
2910 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2917 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2919 case TRUTH_AND_EXPR:
2920 return build2 (TRUTH_OR_EXPR, type,
2921 invert_truthvalue (TREE_OPERAND (arg, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg, 1)));
2925 return build2 (TRUTH_AND_EXPR, type,
2926 invert_truthvalue (TREE_OPERAND (arg, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929 case TRUTH_XOR_EXPR:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2936 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2939 return build2 (TRUTH_XOR_EXPR, type,
2940 invert_truthvalue (TREE_OPERAND (arg, 0)),
2941 TREE_OPERAND (arg, 1));
2943 case TRUTH_ANDIF_EXPR:
2944 return build2 (TRUTH_ORIF_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 case TRUTH_ORIF_EXPR:
2949 return build2 (TRUTH_ANDIF_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_NOT_EXPR:
2954 return TREE_OPERAND (arg, 0);
2957 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2958 invert_truthvalue (TREE_OPERAND (arg, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg, 2)));
2962 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case NON_LVALUE_EXPR:
2966 return invert_truthvalue (TREE_OPERAND (arg, 0));
2969 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2974 return build1 (TREE_CODE (arg), type,
2975 invert_truthvalue (TREE_OPERAND (arg, 0)));
2978 if (!integer_onep (TREE_OPERAND (arg, 1)))
2980 return build2 (EQ_EXPR, type, arg,
2981 fold_convert (type, integer_zero_node));
2984 return build1 (TRUTH_NOT_EXPR, type, arg);
2986 case CLEANUP_POINT_EXPR:
2987 return build1 (CLEANUP_POINT_EXPR, type,
2988 invert_truthvalue (TREE_OPERAND (arg, 0)));
2993 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2995 return build1 (TRUTH_NOT_EXPR, type, arg);
2998 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2999 operands are another bit-wise operation with a common input. If so,
3000 distribute the bit operations to save an operation and possibly two if
3001 constants are involved. For example, convert
3002 (A | B) & (A | C) into A | (B & C)
3003 Further simplification will occur if B and C are constants.
3005 If this optimization cannot be done, 0 will be returned. */
3008 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3013 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3014 || TREE_CODE (arg0) == code
3015 || (TREE_CODE (arg0) != BIT_AND_EXPR
3016 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3019 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3021 common = TREE_OPERAND (arg0, 0);
3022 left = TREE_OPERAND (arg0, 1);
3023 right = TREE_OPERAND (arg1, 1);
3025 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3027 common = TREE_OPERAND (arg0, 0);
3028 left = TREE_OPERAND (arg0, 1);
3029 right = TREE_OPERAND (arg1, 0);
3031 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3033 common = TREE_OPERAND (arg0, 1);
3034 left = TREE_OPERAND (arg0, 0);
3035 right = TREE_OPERAND (arg1, 1);
3037 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3039 common = TREE_OPERAND (arg0, 1);
3040 left = TREE_OPERAND (arg0, 0);
3041 right = TREE_OPERAND (arg1, 0);
3046 return fold (build2 (TREE_CODE (arg0), type, common,
3047 fold (build2 (code, type, left, right))));
3050 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3051 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3054 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3057 tree result = build3 (BIT_FIELD_REF, type, inner,
3058 size_int (bitsize), bitsize_int (bitpos));
3060 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3065 /* Optimize a bit-field compare.
3067 There are two cases: First is a compare against a constant and the
3068 second is a comparison of two items where the fields are at the same
3069 bit position relative to the start of a chunk (byte, halfword, word)
3070 large enough to contain it. In these cases we can avoid the shift
3071 implicit in bitfield extractions.
3073 For constants, we emit a compare of the shifted constant with the
3074 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3075 compared. For two fields at the same position, we do the ANDs with the
3076 similar mask and compare the result of the ANDs.
3078 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3079 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3080 are the left and right operands of the comparison, respectively.
3082 If the optimization described above can be done, we return the resulting
3083 tree. Otherwise we return zero. */
3086 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3089 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3090 tree type = TREE_TYPE (lhs);
3091 tree signed_type, unsigned_type;
3092 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3093 enum machine_mode lmode, rmode, nmode;
3094 int lunsignedp, runsignedp;
3095 int lvolatilep = 0, rvolatilep = 0;
3096 tree linner, rinner = NULL_TREE;
3100 /* Get all the information about the extractions being done. If the bit size
3101 if the same as the size of the underlying object, we aren't doing an
3102 extraction at all and so can do nothing. We also don't want to
3103 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3104 then will no longer be able to replace it. */
3105 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3106 &lunsignedp, &lvolatilep);
3107 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3108 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3113 /* If this is not a constant, we can only do something if bit positions,
3114 sizes, and signedness are the same. */
3115 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3116 &runsignedp, &rvolatilep);
3118 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3119 || lunsignedp != runsignedp || offset != 0
3120 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3124 /* See if we can find a mode to refer to this field. We should be able to,
3125 but fail if we can't. */
3126 nmode = get_best_mode (lbitsize, lbitpos,
3127 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3128 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3129 TYPE_ALIGN (TREE_TYPE (rinner))),
3130 word_mode, lvolatilep || rvolatilep);
3131 if (nmode == VOIDmode)
3134 /* Set signed and unsigned types of the precision of this mode for the
3136 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3137 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3139 /* Compute the bit position and size for the new reference and our offset
3140 within it. If the new reference is the same size as the original, we
3141 won't optimize anything, so return zero. */
3142 nbitsize = GET_MODE_BITSIZE (nmode);
3143 nbitpos = lbitpos & ~ (nbitsize - 1);
3145 if (nbitsize == lbitsize)
3148 if (BYTES_BIG_ENDIAN)
3149 lbitpos = nbitsize - lbitsize - lbitpos;
3151 /* Make the mask to be used against the extracted field. */
3152 mask = build_int_2 (~0, ~0);
3153 TREE_TYPE (mask) = unsigned_type;
3154 force_fit_type (mask, 0);
3155 mask = fold_convert (unsigned_type, mask);
3156 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3157 mask = const_binop (RSHIFT_EXPR, mask,
3158 size_int (nbitsize - lbitsize - lbitpos), 0);
3161 /* If not comparing with constant, just rework the comparison
3163 return build2 (code, compare_type,
3164 build2 (BIT_AND_EXPR, unsigned_type,
3165 make_bit_field_ref (linner, unsigned_type,
3166 nbitsize, nbitpos, 1),
3168 build2 (BIT_AND_EXPR, unsigned_type,
3169 make_bit_field_ref (rinner, unsigned_type,
3170 nbitsize, nbitpos, 1),
3173 /* Otherwise, we are handling the constant case. See if the constant is too
3174 big for the field. Warn and return a tree of for 0 (false) if so. We do
3175 this not only for its own sake, but to avoid having to test for this
3176 error case below. If we didn't, we might generate wrong code.
3178 For unsigned fields, the constant shifted right by the field length should
3179 be all zero. For signed fields, the high-order bits should agree with
3184 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3185 fold_convert (unsigned_type, rhs),
3186 size_int (lbitsize), 0)))
3188 warning ("comparison is always %d due to width of bit-field",
3190 return constant_boolean_node (code == NE_EXPR, compare_type);
3195 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3196 size_int (lbitsize - 1), 0);
3197 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3199 warning ("comparison is always %d due to width of bit-field",
3201 return constant_boolean_node (code == NE_EXPR, compare_type);
3205 /* Single-bit compares should always be against zero. */
3206 if (lbitsize == 1 && ! integer_zerop (rhs))
3208 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3209 rhs = fold_convert (type, integer_zero_node);
3212 /* Make a new bitfield reference, shift the constant over the
3213 appropriate number of bits and mask it with the computed mask
3214 (in case this was a signed field). If we changed it, make a new one. */
3215 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3218 TREE_SIDE_EFFECTS (lhs) = 1;
3219 TREE_THIS_VOLATILE (lhs) = 1;
3222 rhs = fold (const_binop (BIT_AND_EXPR,
3223 const_binop (LSHIFT_EXPR,
3224 fold_convert (unsigned_type, rhs),
3225 size_int (lbitpos), 0),
3228 return build2 (code, compare_type,
3229 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3233 /* Subroutine for fold_truthop: decode a field reference.
3235 If EXP is a comparison reference, we return the innermost reference.
3237 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3238 set to the starting bit number.
3240 If the innermost field can be completely contained in a mode-sized
3241 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3243 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3244 otherwise it is not changed.
3246 *PUNSIGNEDP is set to the signedness of the field.
3248 *PMASK is set to the mask used. This is either contained in a
3249 BIT_AND_EXPR or derived from the width of the field.
3251 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3253 Return 0 if this is not a component reference or is one that we can't
3254 do anything with. */
3257 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3258 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3259 int *punsignedp, int *pvolatilep,
3260 tree *pmask, tree *pand_mask)
3262 tree outer_type = 0;
3264 tree mask, inner, offset;
3266 unsigned int precision;
3268 /* All the optimizations using this function assume integer fields.
3269 There are problems with FP fields since the type_for_size call
3270 below can fail for, e.g., XFmode. */
3271 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3274 /* We are interested in the bare arrangement of bits, so strip everything
3275 that doesn't affect the machine mode. However, record the type of the
3276 outermost expression if it may matter below. */
3277 if (TREE_CODE (exp) == NOP_EXPR
3278 || TREE_CODE (exp) == CONVERT_EXPR
3279 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3280 outer_type = TREE_TYPE (exp);
3283 if (TREE_CODE (exp) == BIT_AND_EXPR)
3285 and_mask = TREE_OPERAND (exp, 1);
3286 exp = TREE_OPERAND (exp, 0);
3287 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3288 if (TREE_CODE (and_mask) != INTEGER_CST)
3292 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3293 punsignedp, pvolatilep);
3294 if ((inner == exp && and_mask == 0)
3295 || *pbitsize < 0 || offset != 0
3296 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3299 /* If the number of bits in the reference is the same as the bitsize of
3300 the outer type, then the outer type gives the signedness. Otherwise
3301 (in case of a small bitfield) the signedness is unchanged. */
3302 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3303 *punsignedp = TYPE_UNSIGNED (outer_type);
3305 /* Compute the mask to access the bitfield. */
3306 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3307 precision = TYPE_PRECISION (unsigned_type);
3309 mask = build_int_2 (~0, ~0);
3310 TREE_TYPE (mask) = unsigned_type;
3311 force_fit_type (mask, 0);
3312 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3315 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3317 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3318 fold_convert (unsigned_type, and_mask), mask));
3321 *pand_mask = and_mask;
3325 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3329 all_ones_mask_p (tree mask, int size)
3331 tree type = TREE_TYPE (mask);
3332 unsigned int precision = TYPE_PRECISION (type);
3335 tmask = build_int_2 (~0, ~0);
3336 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3337 force_fit_type (tmask, 0);
3339 tree_int_cst_equal (mask,
3340 const_binop (RSHIFT_EXPR,
3341 const_binop (LSHIFT_EXPR, tmask,
3342 size_int (precision - size),
3344 size_int (precision - size), 0));
3347 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3348 represents the sign bit of EXP's type. If EXP represents a sign
3349 or zero extension, also test VAL against the unextended type.
3350 The return value is the (sub)expression whose sign bit is VAL,
3351 or NULL_TREE otherwise. */
3354 sign_bit_p (tree exp, tree val)
3356 unsigned HOST_WIDE_INT mask_lo, lo;
3357 HOST_WIDE_INT mask_hi, hi;
3361 /* Tree EXP must have an integral type. */
3362 t = TREE_TYPE (exp);
3363 if (! INTEGRAL_TYPE_P (t))
3366 /* Tree VAL must be an integer constant. */
3367 if (TREE_CODE (val) != INTEGER_CST
3368 || TREE_CONSTANT_OVERFLOW (val))
3371 width = TYPE_PRECISION (t);
3372 if (width > HOST_BITS_PER_WIDE_INT)
3374 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3377 mask_hi = ((unsigned HOST_WIDE_INT) -1
3378 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3384 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3387 mask_lo = ((unsigned HOST_WIDE_INT) -1
3388 >> (HOST_BITS_PER_WIDE_INT - width));
3391 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3392 treat VAL as if it were unsigned. */
3393 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3394 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3397 /* Handle extension from a narrower type. */
3398 if (TREE_CODE (exp) == NOP_EXPR
3399 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3400 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3405 /* Subroutine for fold_truthop: determine if an operand is simple enough
3406 to be evaluated unconditionally. */
3409 simple_operand_p (tree exp)
3411 /* Strip any conversions that don't change the machine mode. */
3412 while ((TREE_CODE (exp) == NOP_EXPR
3413 || TREE_CODE (exp) == CONVERT_EXPR)
3414 && (TYPE_MODE (TREE_TYPE (exp))
3415 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3416 exp = TREE_OPERAND (exp, 0);
3418 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3420 && ! TREE_ADDRESSABLE (exp)
3421 && ! TREE_THIS_VOLATILE (exp)
3422 && ! DECL_NONLOCAL (exp)
3423 /* Don't regard global variables as simple. They may be
3424 allocated in ways unknown to the compiler (shared memory,
3425 #pragma weak, etc). */
3426 && ! TREE_PUBLIC (exp)
3427 && ! DECL_EXTERNAL (exp)
3428 /* Loading a static variable is unduly expensive, but global
3429 registers aren't expensive. */
3430 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3433 /* The following functions are subroutines to fold_range_test and allow it to
3434 try to change a logical combination of comparisons into a range test.
3437 X == 2 || X == 3 || X == 4 || X == 5
3441 (unsigned) (X - 2) <= 3
3443 We describe each set of comparisons as being either inside or outside
3444 a range, using a variable named like IN_P, and then describe the
3445 range with a lower and upper bound. If one of the bounds is omitted,
3446 it represents either the highest or lowest value of the type.
3448 In the comments below, we represent a range by two numbers in brackets
3449 preceded by a "+" to designate being inside that range, or a "-" to
3450 designate being outside that range, so the condition can be inverted by
3451 flipping the prefix. An omitted bound is represented by a "-". For
3452 example, "- [-, 10]" means being outside the range starting at the lowest
3453 possible value and ending at 10, in other words, being greater than 10.
3454 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3457 We set up things so that the missing bounds are handled in a consistent
3458 manner so neither a missing bound nor "true" and "false" need to be
3459 handled using a special case. */
3461 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3462 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3463 and UPPER1_P are nonzero if the respective argument is an upper bound
3464 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3465 must be specified for a comparison. ARG1 will be converted to ARG0's
3466 type if both are specified. */
3469 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3470 tree arg1, int upper1_p)
3476 /* If neither arg represents infinity, do the normal operation.
3477 Else, if not a comparison, return infinity. Else handle the special
3478 comparison rules. Note that most of the cases below won't occur, but
3479 are handled for consistency. */
3481 if (arg0 != 0 && arg1 != 0)
3483 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3484 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3486 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3489 if (TREE_CODE_CLASS (code) != '<')
3492 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3493 for neither. In real maths, we cannot assume open ended ranges are
3494 the same. But, this is computer arithmetic, where numbers are finite.
3495 We can therefore make the transformation of any unbounded range with
3496 the value Z, Z being greater than any representable number. This permits
3497 us to treat unbounded ranges as equal. */
3498 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3499 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3503 result = sgn0 == sgn1;
3506 result = sgn0 != sgn1;
3509 result = sgn0 < sgn1;
3512 result = sgn0 <= sgn1;
3515 result = sgn0 > sgn1;
3518 result = sgn0 >= sgn1;
3524 return constant_boolean_node (result, type);
3527 /* Given EXP, a logical expression, set the range it is testing into
3528 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3529 actually being tested. *PLOW and *PHIGH will be made of the same type
3530 as the returned expression. If EXP is not a comparison, we will most
3531 likely not be returning a useful value and range. */
3534 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3536 enum tree_code code;
3537 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3538 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3540 tree low, high, n_low, n_high;
3542 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3543 and see if we can refine the range. Some of the cases below may not
3544 happen, but it doesn't seem worth worrying about this. We "continue"
3545 the outer loop when we've changed something; otherwise we "break"
3546 the switch, which will "break" the while. */
3549 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3553 code = TREE_CODE (exp);
3554 exp_type = TREE_TYPE (exp);
3556 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3558 if (first_rtl_op (code) > 0)
3559 arg0 = TREE_OPERAND (exp, 0);
3560 if (TREE_CODE_CLASS (code) == '<'
3561 || TREE_CODE_CLASS (code) == '1'
3562 || TREE_CODE_CLASS (code) == '2')
3563 arg0_type = TREE_TYPE (arg0);
3564 if (TREE_CODE_CLASS (code) == '2'
3565 || TREE_CODE_CLASS (code) == '<'
3566 || (TREE_CODE_CLASS (code) == 'e'
3567 && TREE_CODE_LENGTH (code) > 1))
3568 arg1 = TREE_OPERAND (exp, 1);
3573 case TRUTH_NOT_EXPR:
3574 in_p = ! in_p, exp = arg0;
3577 case EQ_EXPR: case NE_EXPR:
3578 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3579 /* We can only do something if the range is testing for zero
3580 and if the second operand is an integer constant. Note that
3581 saying something is "in" the range we make is done by
3582 complementing IN_P since it will set in the initial case of
3583 being not equal to zero; "out" is leaving it alone. */
3584 if (low == 0 || high == 0
3585 || ! integer_zerop (low) || ! integer_zerop (high)
3586 || TREE_CODE (arg1) != INTEGER_CST)
3591 case NE_EXPR: /* - [c, c] */
3594 case EQ_EXPR: /* + [c, c] */
3595 in_p = ! in_p, low = high = arg1;
3597 case GT_EXPR: /* - [-, c] */
3598 low = 0, high = arg1;
3600 case GE_EXPR: /* + [c, -] */
3601 in_p = ! in_p, low = arg1, high = 0;
3603 case LT_EXPR: /* - [c, -] */
3604 low = arg1, high = 0;
3606 case LE_EXPR: /* + [-, c] */
3607 in_p = ! in_p, low = 0, high = arg1;
3613 /* If this is an unsigned comparison, we also know that EXP is
3614 greater than or equal to zero. We base the range tests we make
3615 on that fact, so we record it here so we can parse existing
3616 range tests. We test arg0_type since often the return type
3617 of, e.g. EQ_EXPR, is boolean. */
3618 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3620 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3621 1, fold_convert (arg0_type, integer_zero_node),
3625 in_p = n_in_p, low = n_low, high = n_high;
3627 /* If the high bound is missing, but we have a nonzero low
3628 bound, reverse the range so it goes from zero to the low bound
3630 if (high == 0 && low && ! integer_zerop (low))
3633 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3634 integer_one_node, 0);
3635 low = fold_convert (arg0_type, integer_zero_node);
3643 /* (-x) IN [a,b] -> x in [-b, -a] */
3644 n_low = range_binop (MINUS_EXPR, exp_type,
3645 fold_convert (exp_type, integer_zero_node),
3647 n_high = range_binop (MINUS_EXPR, exp_type,
3648 fold_convert (exp_type, integer_zero_node),
3650 low = n_low, high = n_high;
3656 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3657 fold_convert (exp_type, integer_one_node));
3660 case PLUS_EXPR: case MINUS_EXPR:
3661 if (TREE_CODE (arg1) != INTEGER_CST)
3664 /* If EXP is signed, any overflow in the computation is undefined,
3665 so we don't worry about it so long as our computations on
3666 the bounds don't overflow. For unsigned, overflow is defined
3667 and this is exactly the right thing. */
3668 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3669 arg0_type, low, 0, arg1, 0);
3670 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3671 arg0_type, high, 1, arg1, 0);
3672 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3673 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3676 /* Check for an unsigned range which has wrapped around the maximum
3677 value thus making n_high < n_low, and normalize it. */
3678 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3680 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3681 integer_one_node, 0);
3682 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3683 integer_one_node, 0);
3685 /* If the range is of the form +/- [ x+1, x ], we won't
3686 be able to normalize it. But then, it represents the
3687 whole range or the empty set, so make it
3689 if (tree_int_cst_equal (n_low, low)
3690 && tree_int_cst_equal (n_high, high))
3696 low = n_low, high = n_high;
3701 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3702 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3705 if (! INTEGRAL_TYPE_P (arg0_type)
3706 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3707 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3710 n_low = low, n_high = high;
3713 n_low = fold_convert (arg0_type, n_low);
3716 n_high = fold_convert (arg0_type, n_high);
3719 /* If we're converting arg0 from an unsigned type, to exp,
3720 a signed type, we will be doing the comparison as unsigned.
3721 The tests above have already verified that LOW and HIGH
3724 So we have to ensure that we will handle large unsigned
3725 values the same way that the current signed bounds treat
3728 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3731 tree equiv_type = lang_hooks.types.type_for_mode
3732 (TYPE_MODE (arg0_type), 1);
3734 /* A range without an upper bound is, naturally, unbounded.
3735 Since convert would have cropped a very large value, use
3736 the max value for the destination type. */
3738 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3739 : TYPE_MAX_VALUE (arg0_type);
3741 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3742 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3743 fold_convert (arg0_type,
3745 fold_convert (arg0_type,
3746 integer_one_node)));
3748 /* If the low bound is specified, "and" the range with the
3749 range for which the original unsigned value will be
3753 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3754 1, n_low, n_high, 1,
3755 fold_convert (arg0_type, integer_zero_node),
3759 in_p = (n_in_p == in_p);
3763 /* Otherwise, "or" the range with the range of the input
3764 that will be interpreted as negative. */
3765 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3766 0, n_low, n_high, 1,
3767 fold_convert (arg0_type, integer_zero_node),
3771 in_p = (in_p != n_in_p);
3776 low = n_low, high = n_high;
3786 /* If EXP is a constant, we can evaluate whether this is true or false. */
3787 if (TREE_CODE (exp) == INTEGER_CST)
3789 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3791 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3797 *pin_p = in_p, *plow = low, *phigh = high;
3801 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3802 type, TYPE, return an expression to test if EXP is in (or out of, depending
3803 on IN_P) the range. Return 0 if the test couldn't be created. */
3806 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3808 tree etype = TREE_TYPE (exp);
3813 value = build_range_check (type, exp, 1, low, high);
3815 return invert_truthvalue (value);
3820 if (low == 0 && high == 0)
3821 return fold_convert (type, integer_one_node);
3824 return fold (build2 (LE_EXPR, type, exp, high));
3827 return fold (build2 (GE_EXPR, type, exp, low));
3829 if (operand_equal_p (low, high, 0))
3830 return fold (build2 (EQ_EXPR, type, exp, low));
3832 if (integer_zerop (low))
3834 if (! TYPE_UNSIGNED (etype))
3836 etype = lang_hooks.types.unsigned_type (etype);
3837 high = fold_convert (etype, high);
3838 exp = fold_convert (etype, exp);
3840 return build_range_check (type, exp, 1, 0, high);
3843 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3844 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3846 unsigned HOST_WIDE_INT lo;
3850 prec = TYPE_PRECISION (etype);
3851 if (prec <= HOST_BITS_PER_WIDE_INT)
3854 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3858 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3859 lo = (unsigned HOST_WIDE_INT) -1;
3862 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3864 if (TYPE_UNSIGNED (etype))
3866 etype = lang_hooks.types.signed_type (etype);
3867 exp = fold_convert (etype, exp);
3869 return fold (build2 (GT_EXPR, type, exp,
3870 fold_convert (etype, integer_zero_node)));
3874 value = const_binop (MINUS_EXPR, high, low, 0);
3875 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3877 tree utype, minv, maxv;
3879 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3880 for the type in question, as we rely on this here. */
3881 switch (TREE_CODE (etype))
3886 utype = lang_hooks.types.unsigned_type (etype);
3887 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3888 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3889 integer_one_node, 1);
3890 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3891 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3895 high = fold_convert (etype, high);
3896 low = fold_convert (etype, low);
3897 exp = fold_convert (etype, exp);
3898 value = const_binop (MINUS_EXPR, high, low, 0);
3906 if (value != 0 && ! TREE_OVERFLOW (value))
3907 return build_range_check (type,
3908 fold (build2 (MINUS_EXPR, etype, exp, low)),
3909 1, fold_convert (etype, integer_zero_node),
3915 /* Given two ranges, see if we can merge them into one. Return 1 if we
3916 can, 0 if we can't. Set the output range into the specified parameters. */
3919 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3920 tree high0, int in1_p, tree low1, tree high1)
3928 int lowequal = ((low0 == 0 && low1 == 0)
3929 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3930 low0, 0, low1, 0)));
3931 int highequal = ((high0 == 0 && high1 == 0)
3932 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3933 high0, 1, high1, 1)));
3935 /* Make range 0 be the range that starts first, or ends last if they
3936 start at the same value. Swap them if it isn't. */
3937 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3940 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3941 high1, 1, high0, 1))))
3943 temp = in0_p, in0_p = in1_p, in1_p = temp;
3944 tem = low0, low0 = low1, low1 = tem;
3945 tem = high0, high0 = high1, high1 = tem;
3948 /* Now flag two cases, whether the ranges are disjoint or whether the
3949 second range is totally subsumed in the first. Note that the tests
3950 below are simplified by the ones above. */
3951 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3952 high0, 1, low1, 0));
3953 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3954 high1, 1, high0, 1));
3956 /* We now have four cases, depending on whether we are including or
3957 excluding the two ranges. */
3960 /* If they don't overlap, the result is false. If the second range
3961 is a subset it is the result. Otherwise, the range is from the start
3962 of the second to the end of the first. */
3964 in_p = 0, low = high = 0;
3966 in_p = 1, low = low1, high = high1;
3968 in_p = 1, low = low1, high = high0;
3971 else if (in0_p && ! in1_p)
3973 /* If they don't overlap, the result is the first range. If they are
3974 equal, the result is false. If the second range is a subset of the
3975 first, and the ranges begin at the same place, we go from just after
3976 the end of the first range to the end of the second. If the second
3977 range is not a subset of the first, or if it is a subset and both
3978 ranges end at the same place, the range starts at the start of the
3979 first range and ends just before the second range.
3980 Otherwise, we can't describe this as a single range. */
3982 in_p = 1, low = low0, high = high0;
3983 else if (lowequal && highequal)
3984 in_p = 0, low = high = 0;
3985 else if (subset && lowequal)
3987 in_p = 1, high = high0;
3988 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3989 integer_one_node, 0);
3991 else if (! subset || highequal)
3993 in_p = 1, low = low0;
3994 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3995 integer_one_node, 0);
4001 else if (! in0_p && in1_p)
4003 /* If they don't overlap, the result is the second range. If the second
4004 is a subset of the first, the result is false. Otherwise,
4005 the range starts just after the first range and ends at the
4006 end of the second. */
4008 in_p = 1, low = low1, high = high1;
4009 else if (subset || highequal)
4010 in_p = 0, low = high = 0;
4013 in_p = 1, high = high1;
4014 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4015 integer_one_node, 0);
4021 /* The case where we are excluding both ranges. Here the complex case
4022 is if they don't overlap. In that case, the only time we have a
4023 range is if they are adjacent. If the second is a subset of the
4024 first, the result is the first. Otherwise, the range to exclude
4025 starts at the beginning of the first range and ends at the end of the
4029 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4030 range_binop (PLUS_EXPR, NULL_TREE,
4032 integer_one_node, 1),
4034 in_p = 0, low = low0, high = high1;
4037 /* Canonicalize - [min, x] into - [-, x]. */
4038 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4039 switch (TREE_CODE (TREE_TYPE (low0)))
4042 if (TYPE_PRECISION (TREE_TYPE (low0))
4043 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4048 if (tree_int_cst_equal (low0,
4049 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4053 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4054 && integer_zerop (low0))
4061 /* Canonicalize - [x, max] into - [x, -]. */
4062 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4063 switch (TREE_CODE (TREE_TYPE (high1)))
4066 if (TYPE_PRECISION (TREE_TYPE (high1))
4067 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4072 if (tree_int_cst_equal (high1,
4073 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4077 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4078 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4080 integer_one_node, 1)))
4087 /* The ranges might be also adjacent between the maximum and
4088 minimum values of the given type. For
4089 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4090 return + [x + 1, y - 1]. */
4091 if (low0 == 0 && high1 == 0)
4093 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4094 integer_one_node, 1);
4095 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4096 integer_one_node, 0);
4097 if (low == 0 || high == 0)
4107 in_p = 0, low = low0, high = high0;
4109 in_p = 0, low = low0, high = high1;
4112 *pin_p = in_p, *plow = low, *phigh = high;
4117 /* Subroutine of fold, looking inside expressions of the form
4118 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4119 of the COND_EXPR. This function is being used also to optimize
4120 A op B ? C : A, by reversing the comparison first.
4122 Return a folded expression whose code is not a COND_EXPR
4123 anymore, or NULL_TREE if no folding opportunity is found. */
4126 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4128 enum tree_code comp_code = TREE_CODE (arg0);
4129 tree arg00 = TREE_OPERAND (arg0, 0);
4130 tree arg01 = TREE_OPERAND (arg0, 1);
4131 tree arg1_type = TREE_TYPE (arg1);
4137 /* If we have A op 0 ? A : -A, consider applying the following
4140 A == 0? A : -A same as -A
4141 A != 0? A : -A same as A
4142 A >= 0? A : -A same as abs (A)
4143 A > 0? A : -A same as abs (A)
4144 A <= 0? A : -A same as -abs (A)
4145 A < 0? A : -A same as -abs (A)
4147 None of these transformations work for modes with signed
4148 zeros. If A is +/-0, the first two transformations will
4149 change the sign of the result (from +0 to -0, or vice
4150 versa). The last four will fix the sign of the result,
4151 even though the original expressions could be positive or
4152 negative, depending on the sign of A.
4154 Note that all these transformations are correct if A is
4155 NaN, since the two alternatives (A and -A) are also NaNs. */
4156 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4157 ? real_zerop (arg01)
4158 : integer_zerop (arg01))
4159 && TREE_CODE (arg2) == NEGATE_EXPR
4160 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4164 tem = fold_convert (arg1_type, arg1);
4165 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4167 return pedantic_non_lvalue (fold_convert (type, arg1));
4170 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4171 arg1 = fold_convert (lang_hooks.types.signed_type
4172 (TREE_TYPE (arg1)), arg1);
4173 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4174 return pedantic_non_lvalue (fold_convert (type, tem));
4177 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4178 arg1 = fold_convert (lang_hooks.types.signed_type
4179 (TREE_TYPE (arg1)), arg1);
4180 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4181 return negate_expr (fold_convert (type, tem));
4186 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4187 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4188 both transformations are correct when A is NaN: A != 0
4189 is then true, and A == 0 is false. */
4191 if (integer_zerop (arg01) && integer_zerop (arg2))
4193 if (comp_code == NE_EXPR)
4194 return pedantic_non_lvalue (fold_convert (type, arg1));
4195 else if (comp_code == EQ_EXPR)
4196 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4199 /* Try some transformations of A op B ? A : B.
4201 A == B? A : B same as B
4202 A != B? A : B same as A
4203 A >= B? A : B same as max (A, B)
4204 A > B? A : B same as max (B, A)
4205 A <= B? A : B same as min (A, B)
4206 A < B? A : B same as min (B, A)
4208 As above, these transformations don't work in the presence
4209 of signed zeros. For example, if A and B are zeros of
4210 opposite sign, the first two transformations will change
4211 the sign of the result. In the last four, the original
4212 expressions give different results for (A=+0, B=-0) and
4213 (A=-0, B=+0), but the transformed expressions do not.
4215 The first two transformations are correct if either A or B
4216 is a NaN. In the first transformation, the condition will
4217 be false, and B will indeed be chosen. In the case of the
4218 second transformation, the condition A != B will be true,
4219 and A will be chosen.
4221 The conversions to max() and min() are not correct if B is
4222 a number and A is not. The conditions in the original
4223 expressions will be false, so all four give B. The min()
4224 and max() versions would give a NaN instead. */
4225 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4227 tree comp_op0 = arg00;
4228 tree comp_op1 = arg01;
4229 tree comp_type = TREE_TYPE (comp_op0);
4231 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4232 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4242 return pedantic_non_lvalue (fold_convert (type, arg2));
4244 return pedantic_non_lvalue (fold_convert (type, arg1));
4247 /* In C++ a ?: expression can be an lvalue, so put the
4248 operand which will be used if they are equal first
4249 so that we can convert this back to the
4250 corresponding COND_EXPR. */
4251 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4252 return pedantic_non_lvalue (
4253 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4254 (comp_code == LE_EXPR
4255 ? comp_op0 : comp_op1),
4256 (comp_code == LE_EXPR
4257 ? comp_op1 : comp_op0)))));
4261 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4262 return pedantic_non_lvalue (
4263 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4264 (comp_code == GE_EXPR
4265 ? comp_op0 : comp_op1),
4266 (comp_code == GE_EXPR
4267 ? comp_op1 : comp_op0)))));
4274 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4275 we might still be able to simplify this. For example,
4276 if C1 is one less or one more than C2, this might have started
4277 out as a MIN or MAX and been transformed by this function.
4278 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4280 if (INTEGRAL_TYPE_P (type)
4281 && TREE_CODE (arg01) == INTEGER_CST
4282 && TREE_CODE (arg2) == INTEGER_CST)
4286 /* We can replace A with C1 in this case. */
4287 arg1 = fold_convert (type, arg01);
4288 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4291 /* If C1 is C2 + 1, this is min(A, C2). */
4292 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4294 && operand_equal_p (arg01,
4295 const_binop (PLUS_EXPR, arg2,
4296 integer_one_node, 0),
4298 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4299 type, arg1, arg2)));
4303 /* If C1 is C2 - 1, this is min(A, C2). */
4304 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4306 && operand_equal_p (arg01,
4307 const_binop (MINUS_EXPR, arg2,
4308 integer_one_node, 0),
4310 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4311 type, arg1, arg2)));
4315 /* If C1 is C2 - 1, this is max(A, C2). */
4316 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4318 && operand_equal_p (arg01,
4319 const_binop (MINUS_EXPR, arg2,
4320 integer_one_node, 0),
4322 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4323 type, arg1, arg2)));
4327 /* If C1 is C2 + 1, this is max(A, C2). */
4328 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4330 && operand_equal_p (arg01,
4331 const_binop (PLUS_EXPR, arg2,
4332 integer_one_node, 0),
4334 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4335 type, arg1, arg2)));
4348 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4349 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4352 /* EXP is some logical combination of boolean tests. See if we can
4353 merge it into some range test. Return the new tree if so. */
4356 fold_range_test (tree exp)
4358 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4359 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4360 int in0_p, in1_p, in_p;
4361 tree low0, low1, low, high0, high1, high;
4362 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4363 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4366 /* If this is an OR operation, invert both sides; we will invert
4367 again at the end. */
4369 in0_p = ! in0_p, in1_p = ! in1_p;
4371 /* If both expressions are the same, if we can merge the ranges, and we
4372 can build the range test, return it or it inverted. If one of the
4373 ranges is always true or always false, consider it to be the same
4374 expression as the other. */
4375 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4376 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4378 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4380 : rhs != 0 ? rhs : integer_zero_node,
4382 return or_op ? invert_truthvalue (tem) : tem;
4384 /* On machines where the branch cost is expensive, if this is a
4385 short-circuited branch and the underlying object on both sides
4386 is the same, make a non-short-circuit operation. */
4387 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4388 && lhs != 0 && rhs != 0
4389 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4390 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4391 && operand_equal_p (lhs, rhs, 0))
4393 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4394 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4395 which cases we can't do this. */
4396 if (simple_operand_p (lhs))
4397 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4398 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4399 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4400 TREE_OPERAND (exp, 1));
4402 else if (lang_hooks.decls.global_bindings_p () == 0
4403 && ! CONTAINS_PLACEHOLDER_P (lhs))
4405 tree common = save_expr (lhs);
4407 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4408 or_op ? ! in0_p : in0_p,
4410 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4411 or_op ? ! in1_p : in1_p,
4413 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4414 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4415 TREE_TYPE (exp), lhs, rhs);
4422 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4423 bit value. Arrange things so the extra bits will be set to zero if and
4424 only if C is signed-extended to its full width. If MASK is nonzero,
4425 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4428 unextend (tree c, int p, int unsignedp, tree mask)
4430 tree type = TREE_TYPE (c);
4431 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4434 if (p == modesize || unsignedp)
4437 /* We work by getting just the sign bit into the low-order bit, then
4438 into the high-order bit, then sign-extend. We then XOR that value
4440 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4441 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4443 /* We must use a signed type in order to get an arithmetic right shift.
4444 However, we must also avoid introducing accidental overflows, so that
4445 a subsequent call to integer_zerop will work. Hence we must
4446 do the type conversion here. At this point, the constant is either
4447 zero or one, and the conversion to a signed type can never overflow.
4448 We could get an overflow if this conversion is done anywhere else. */
4449 if (TYPE_UNSIGNED (type))
4450 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4452 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4453 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4455 temp = const_binop (BIT_AND_EXPR, temp,
4456 fold_convert (TREE_TYPE (c), mask), 0);
4457 /* If necessary, convert the type back to match the type of C. */
4458 if (TYPE_UNSIGNED (type))
4459 temp = fold_convert (type, temp);
4461 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4464 /* Find ways of folding logical expressions of LHS and RHS:
4465 Try to merge two comparisons to the same innermost item.
4466 Look for range tests like "ch >= '0' && ch <= '9'".
4467 Look for combinations of simple terms on machines with expensive branches
4468 and evaluate the RHS unconditionally.
4470 For example, if we have p->a == 2 && p->b == 4 and we can make an
4471 object large enough to span both A and B, we can do this with a comparison
4472 against the object ANDed with the a mask.
4474 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4475 operations to do this with one comparison.
4477 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4478 function and the one above.
4480 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4481 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4483 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4486 We return the simplified tree or 0 if no optimization is possible. */
4489 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4491 /* If this is the "or" of two comparisons, we can do something if
4492 the comparisons are NE_EXPR. If this is the "and", we can do something
4493 if the comparisons are EQ_EXPR. I.e.,
4494 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4496 WANTED_CODE is this operation code. For single bit fields, we can
4497 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4498 comparison for one-bit fields. */
4500 enum tree_code wanted_code;
4501 enum tree_code lcode, rcode;
4502 tree ll_arg, lr_arg, rl_arg, rr_arg;
4503 tree ll_inner, lr_inner, rl_inner, rr_inner;
4504 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4505 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4506 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4507 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4508 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4509 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4510 enum machine_mode lnmode, rnmode;
4511 tree ll_mask, lr_mask, rl_mask, rr_mask;
4512 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4513 tree l_const, r_const;
4514 tree lntype, rntype, result;
4515 int first_bit, end_bit;
4518 /* Start by getting the comparison codes. Fail if anything is volatile.
4519 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4520 it were surrounded with a NE_EXPR. */
4522 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4525 lcode = TREE_CODE (lhs);
4526 rcode = TREE_CODE (rhs);
4528 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4530 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4534 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4536 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4540 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4543 ll_arg = TREE_OPERAND (lhs, 0);
4544 lr_arg = TREE_OPERAND (lhs, 1);
4545 rl_arg = TREE_OPERAND (rhs, 0);
4546 rr_arg = TREE_OPERAND (rhs, 1);
4548 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4549 if (simple_operand_p (ll_arg)
4550 && simple_operand_p (lr_arg))
4553 if (operand_equal_p (ll_arg, rl_arg, 0)
4554 && operand_equal_p (lr_arg, rr_arg, 0))
4556 result = combine_comparisons (code, lcode, rcode,
4557 truth_type, ll_arg, lr_arg);
4561 else if (operand_equal_p (ll_arg, rr_arg, 0)
4562 && operand_equal_p (lr_arg, rl_arg, 0))
4564 result = combine_comparisons (code, lcode,
4565 swap_tree_comparison (rcode),
4566 truth_type, ll_arg, lr_arg);
4572 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4573 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4575 /* If the RHS can be evaluated unconditionally and its operands are
4576 simple, it wins to evaluate the RHS unconditionally on machines
4577 with expensive branches. In this case, this isn't a comparison
4578 that can be merged. Avoid doing this if the RHS is a floating-point
4579 comparison since those can trap. */
4581 if (BRANCH_COST >= 2
4582 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4583 && simple_operand_p (rl_arg)
4584 && simple_operand_p (rr_arg))
4586 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4587 if (code == TRUTH_OR_EXPR
4588 && lcode == NE_EXPR && integer_zerop (lr_arg)
4589 && rcode == NE_EXPR && integer_zerop (rr_arg)
4590 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4591 return build2 (NE_EXPR, truth_type,
4592 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4594 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4596 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4597 if (code == TRUTH_AND_EXPR
4598 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4599 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4600 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4601 return build2 (EQ_EXPR, truth_type,
4602 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4604 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4606 return build2 (code, truth_type, lhs, rhs);
4609 /* See if the comparisons can be merged. Then get all the parameters for
4612 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4613 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4617 ll_inner = decode_field_reference (ll_arg,
4618 &ll_bitsize, &ll_bitpos, &ll_mode,
4619 &ll_unsignedp, &volatilep, &ll_mask,
4621 lr_inner = decode_field_reference (lr_arg,
4622 &lr_bitsize, &lr_bitpos, &lr_mode,
4623 &lr_unsignedp, &volatilep, &lr_mask,
4625 rl_inner = decode_field_reference (rl_arg,
4626 &rl_bitsize, &rl_bitpos, &rl_mode,
4627 &rl_unsignedp, &volatilep, &rl_mask,
4629 rr_inner = decode_field_reference (rr_arg,
4630 &rr_bitsize, &rr_bitpos, &rr_mode,
4631 &rr_unsignedp, &volatilep, &rr_mask,
4634 /* It must be true that the inner operation on the lhs of each
4635 comparison must be the same if we are to be able to do anything.
4636 Then see if we have constants. If not, the same must be true for
4638 if (volatilep || ll_inner == 0 || rl_inner == 0
4639 || ! operand_equal_p (ll_inner, rl_inner, 0))
4642 if (TREE_CODE (lr_arg) == INTEGER_CST
4643 && TREE_CODE (rr_arg) == INTEGER_CST)
4644 l_const = lr_arg, r_const = rr_arg;
4645 else if (lr_inner == 0 || rr_inner == 0
4646 || ! operand_equal_p (lr_inner, rr_inner, 0))
4649 l_const = r_const = 0;
4651 /* If either comparison code is not correct for our logical operation,
4652 fail. However, we can convert a one-bit comparison against zero into
4653 the opposite comparison against that bit being set in the field. */
4655 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4656 if (lcode != wanted_code)
4658 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4660 /* Make the left operand unsigned, since we are only interested
4661 in the value of one bit. Otherwise we are doing the wrong
4670 /* This is analogous to the code for l_const above. */
4671 if (rcode != wanted_code)
4673 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4682 /* After this point all optimizations will generate bit-field
4683 references, which we might not want. */
4684 if (! lang_hooks.can_use_bit_fields_p ())
4687 /* See if we can find a mode that contains both fields being compared on
4688 the left. If we can't, fail. Otherwise, update all constants and masks
4689 to be relative to a field of that size. */
4690 first_bit = MIN (ll_bitpos, rl_bitpos);
4691 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4692 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4693 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4695 if (lnmode == VOIDmode)
4698 lnbitsize = GET_MODE_BITSIZE (lnmode);
4699 lnbitpos = first_bit & ~ (lnbitsize - 1);
4700 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4701 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4703 if (BYTES_BIG_ENDIAN)
4705 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4706 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4709 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4710 size_int (xll_bitpos), 0);
4711 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4712 size_int (xrl_bitpos), 0);
4716 l_const = fold_convert (lntype, l_const);
4717 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4718 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4719 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4720 fold (build1 (BIT_NOT_EXPR,
4724 warning ("comparison is always %d", wanted_code == NE_EXPR);
4726 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4731 r_const = fold_convert (lntype, r_const);
4732 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4733 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4734 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4735 fold (build1 (BIT_NOT_EXPR,
4739 warning ("comparison is always %d", wanted_code == NE_EXPR);
4741 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4745 /* If the right sides are not constant, do the same for it. Also,
4746 disallow this optimization if a size or signedness mismatch occurs
4747 between the left and right sides. */
4750 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4751 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4752 /* Make sure the two fields on the right
4753 correspond to the left without being swapped. */
4754 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4757 first_bit = MIN (lr_bitpos, rr_bitpos);
4758 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4759 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4760 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4762 if (rnmode == VOIDmode)
4765 rnbitsize = GET_MODE_BITSIZE (rnmode);
4766 rnbitpos = first_bit & ~ (rnbitsize - 1);
4767 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4768 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4770 if (BYTES_BIG_ENDIAN)
4772 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4773 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4776 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4777 size_int (xlr_bitpos), 0);
4778 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4779 size_int (xrr_bitpos), 0);
4781 /* Make a mask that corresponds to both fields being compared.
4782 Do this for both items being compared. If the operands are the
4783 same size and the bits being compared are in the same position
4784 then we can do this by masking both and comparing the masked
4786 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4787 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4788 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4790 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4791 ll_unsignedp || rl_unsignedp);
4792 if (! all_ones_mask_p (ll_mask, lnbitsize))
4793 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4795 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4796 lr_unsignedp || rr_unsignedp);
4797 if (! all_ones_mask_p (lr_mask, rnbitsize))
4798 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4800 return build2 (wanted_code, truth_type, lhs, rhs);
4803 /* There is still another way we can do something: If both pairs of
4804 fields being compared are adjacent, we may be able to make a wider
4805 field containing them both.
4807 Note that we still must mask the lhs/rhs expressions. Furthermore,
4808 the mask must be shifted to account for the shift done by
4809 make_bit_field_ref. */
4810 if ((ll_bitsize + ll_bitpos == rl_bitpos
4811 && lr_bitsize + lr_bitpos == rr_bitpos)
4812 || (ll_bitpos == rl_bitpos + rl_bitsize
4813 && lr_bitpos == rr_bitpos + rr_bitsize))
4817 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4818 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4819 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4820 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4822 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4823 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4824 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4825 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4827 /* Convert to the smaller type before masking out unwanted bits. */
4829 if (lntype != rntype)
4831 if (lnbitsize > rnbitsize)
4833 lhs = fold_convert (rntype, lhs);
4834 ll_mask = fold_convert (rntype, ll_mask);
4837 else if (lnbitsize < rnbitsize)
4839 rhs = fold_convert (lntype, rhs);
4840 lr_mask = fold_convert (lntype, lr_mask);
4845 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4846 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4848 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4849 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4851 return build2 (wanted_code, truth_type, lhs, rhs);
4857 /* Handle the case of comparisons with constants. If there is something in
4858 common between the masks, those bits of the constants must be the same.
4859 If not, the condition is always false. Test for this to avoid generating
4860 incorrect code below. */
4861 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4862 if (! integer_zerop (result)
4863 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4864 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4866 if (wanted_code == NE_EXPR)
4868 warning ("`or' of unmatched not-equal tests is always 1");
4869 return constant_boolean_node (true, truth_type);
4873 warning ("`and' of mutually exclusive equal-tests is always 0");
4874 return constant_boolean_node (false, truth_type);
4878 /* Construct the expression we will return. First get the component
4879 reference we will make. Unless the mask is all ones the width of
4880 that field, perform the mask operation. Then compare with the
4882 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4883 ll_unsignedp || rl_unsignedp);
4885 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4886 if (! all_ones_mask_p (ll_mask, lnbitsize))
4887 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4889 return build2 (wanted_code, truth_type, result,
4890 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4893 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4897 optimize_minmax_comparison (tree t)
4899 tree type = TREE_TYPE (t);
4900 tree arg0 = TREE_OPERAND (t, 0);
4901 enum tree_code op_code;
4902 tree comp_const = TREE_OPERAND (t, 1);
4904 int consts_equal, consts_lt;
4907 STRIP_SIGN_NOPS (arg0);
4909 op_code = TREE_CODE (arg0);
4910 minmax_const = TREE_OPERAND (arg0, 1);
4911 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4912 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4913 inner = TREE_OPERAND (arg0, 0);
4915 /* If something does not permit us to optimize, return the original tree. */
4916 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4917 || TREE_CODE (comp_const) != INTEGER_CST
4918 || TREE_CONSTANT_OVERFLOW (comp_const)
4919 || TREE_CODE (minmax_const) != INTEGER_CST
4920 || TREE_CONSTANT_OVERFLOW (minmax_const))
4923 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4924 and GT_EXPR, doing the rest with recursive calls using logical
4926 switch (TREE_CODE (t))
4928 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4930 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4934 fold (build2 (TRUTH_ORIF_EXPR, type,
4935 optimize_minmax_comparison
4936 (build2 (EQ_EXPR, type, arg0, comp_const)),
4937 optimize_minmax_comparison
4938 (build2 (GT_EXPR, type, arg0, comp_const))));
4941 if (op_code == MAX_EXPR && consts_equal)
4942 /* MAX (X, 0) == 0 -> X <= 0 */
4943 return fold (build2 (LE_EXPR, type, inner, comp_const));
4945 else if (op_code == MAX_EXPR && consts_lt)
4946 /* MAX (X, 0) == 5 -> X == 5 */
4947 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4949 else if (op_code == MAX_EXPR)
4950 /* MAX (X, 0) == -1 -> false */
4951 return omit_one_operand (type, integer_zero_node, inner);
4953 else if (consts_equal)
4954 /* MIN (X, 0) == 0 -> X >= 0 */
4955 return fold (build2 (GE_EXPR, type, inner, comp_const));
4958 /* MIN (X, 0) == 5 -> false */
4959 return omit_one_operand (type, integer_zero_node, inner);
4962 /* MIN (X, 0) == -1 -> X == -1 */
4963 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4966 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4967 /* MAX (X, 0) > 0 -> X > 0
4968 MAX (X, 0) > 5 -> X > 5 */
4969 return fold (build2 (GT_EXPR, type, inner, comp_const));
4971 else if (op_code == MAX_EXPR)
4972 /* MAX (X, 0) > -1 -> true */
4973 return omit_one_operand (type, integer_one_node, inner);
4975 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4976 /* MIN (X, 0) > 0 -> false
4977 MIN (X, 0) > 5 -> false */
4978 return omit_one_operand (type, integer_zero_node, inner);
4981 /* MIN (X, 0) > -1 -> X > -1 */
4982 return fold (build2 (GT_EXPR, type, inner, comp_const));
4989 /* T is an integer expression that is being multiplied, divided, or taken a
4990 modulus (CODE says which and what kind of divide or modulus) by a
4991 constant C. See if we can eliminate that operation by folding it with
4992 other operations already in T. WIDE_TYPE, if non-null, is a type that
4993 should be used for the computation if wider than our type.
4995 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4996 (X * 2) + (Y * 4). We must, however, be assured that either the original
4997 expression would not overflow or that overflow is undefined for the type
4998 in the language in question.
5000 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5001 the machine has a multiply-accumulate insn or that this is part of an
5002 addressing calculation.
5004 If we return a non-null expression, it is an equivalent form of the
5005 original computation, but need not be in the original type. */
5008 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5010 /* To avoid exponential search depth, refuse to allow recursion past
5011 three levels. Beyond that (1) it's highly unlikely that we'll find
5012 something interesting and (2) we've probably processed it before
5013 when we built the inner expression. */
5022 ret = extract_muldiv_1 (t, c, code, wide_type);
5029 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5031 tree type = TREE_TYPE (t);
5032 enum tree_code tcode = TREE_CODE (t);
5033 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5034 > GET_MODE_SIZE (TYPE_MODE (type)))
5035 ? wide_type : type);
5037 int same_p = tcode == code;
5038 tree op0 = NULL_TREE, op1 = NULL_TREE;
5040 /* Don't deal with constants of zero here; they confuse the code below. */
5041 if (integer_zerop (c))
5044 if (TREE_CODE_CLASS (tcode) == '1')
5045 op0 = TREE_OPERAND (t, 0);
5047 if (TREE_CODE_CLASS (tcode) == '2')
5048 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5050 /* Note that we need not handle conditional operations here since fold
5051 already handles those cases. So just do arithmetic here. */
5055 /* For a constant, we can always simplify if we are a multiply
5056 or (for divide and modulus) if it is a multiple of our constant. */
5057 if (code == MULT_EXPR
5058 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5059 return const_binop (code, fold_convert (ctype, t),
5060 fold_convert (ctype, c), 0);
5063 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5064 /* If op0 is an expression ... */
5065 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5066 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5067 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5068 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5069 /* ... and is unsigned, and its type is smaller than ctype,
5070 then we cannot pass through as widening. */
5071 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5072 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5073 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5074 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5075 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5076 /* ... or its type is larger than ctype,
5077 then we cannot pass through this truncation. */
5078 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5079 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5080 /* ... or signedness changes for division or modulus,
5081 then we cannot pass through this conversion. */
5082 || (code != MULT_EXPR
5083 && (TYPE_UNSIGNED (ctype)
5084 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5087 /* Pass the constant down and see if we can make a simplification. If
5088 we can, replace this expression with the inner simplification for
5089 possible later conversion to our or some other type. */
5090 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5091 && TREE_CODE (t2) == INTEGER_CST
5092 && ! TREE_CONSTANT_OVERFLOW (t2)
5093 && (0 != (t1 = extract_muldiv (op0, t2, code,
5095 ? ctype : NULL_TREE))))
5099 case NEGATE_EXPR: case ABS_EXPR:
5100 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5101 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5104 case MIN_EXPR: case MAX_EXPR:
5105 /* If widening the type changes the signedness, then we can't perform
5106 this optimization as that changes the result. */
5107 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5110 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5111 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5112 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5114 if (tree_int_cst_sgn (c) < 0)
5115 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5117 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5118 fold_convert (ctype, t2)));
5122 case LSHIFT_EXPR: case RSHIFT_EXPR:
5123 /* If the second operand is constant, this is a multiplication
5124 or floor division, by a power of two, so we can treat it that
5125 way unless the multiplier or divisor overflows. */
5126 if (TREE_CODE (op1) == INTEGER_CST
5127 /* const_binop may not detect overflow correctly,
5128 so check for it explicitly here. */
5129 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5130 && TREE_INT_CST_HIGH (op1) == 0
5131 && 0 != (t1 = fold_convert (ctype,
5132 const_binop (LSHIFT_EXPR,
5135 && ! TREE_OVERFLOW (t1))
5136 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5137 ? MULT_EXPR : FLOOR_DIV_EXPR,
5138 ctype, fold_convert (ctype, op0), t1),
5139 c, code, wide_type);
5142 case PLUS_EXPR: case MINUS_EXPR:
5143 /* See if we can eliminate the operation on both sides. If we can, we
5144 can return a new PLUS or MINUS. If we can't, the only remaining
5145 cases where we can do anything are if the second operand is a
5147 t1 = extract_muldiv (op0, c, code, wide_type);
5148 t2 = extract_muldiv (op1, c, code, wide_type);
5149 if (t1 != 0 && t2 != 0
5150 && (code == MULT_EXPR
5151 /* If not multiplication, we can only do this if both operands
5152 are divisible by c. */
5153 || (multiple_of_p (ctype, op0, c)
5154 && multiple_of_p (ctype, op1, c))))
5155 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5156 fold_convert (ctype, t2)));
5158 /* If this was a subtraction, negate OP1 and set it to be an addition.
5159 This simplifies the logic below. */
5160 if (tcode == MINUS_EXPR)
5161 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5163 if (TREE_CODE (op1) != INTEGER_CST)
5166 /* If either OP1 or C are negative, this optimization is not safe for
5167 some of the division and remainder types while for others we need
5168 to change the code. */
5169 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5171 if (code == CEIL_DIV_EXPR)
5172 code = FLOOR_DIV_EXPR;
5173 else if (code == FLOOR_DIV_EXPR)
5174 code = CEIL_DIV_EXPR;
5175 else if (code != MULT_EXPR
5176 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5180 /* If it's a multiply or a division/modulus operation of a multiple
5181 of our constant, do the operation and verify it doesn't overflow. */
5182 if (code == MULT_EXPR
5183 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5185 op1 = const_binop (code, fold_convert (ctype, op1),
5186 fold_convert (ctype, c), 0);
5187 /* We allow the constant to overflow with wrapping semantics. */
5189 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5195 /* If we have an unsigned type is not a sizetype, we cannot widen
5196 the operation since it will change the result if the original
5197 computation overflowed. */
5198 if (TYPE_UNSIGNED (ctype)
5199 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5203 /* If we were able to eliminate our operation from the first side,
5204 apply our operation to the second side and reform the PLUS. */
5205 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5206 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5208 /* The last case is if we are a multiply. In that case, we can
5209 apply the distributive law to commute the multiply and addition
5210 if the multiplication of the constants doesn't overflow. */
5211 if (code == MULT_EXPR)
5212 return fold (build2 (tcode, ctype,
5213 fold (build2 (code, ctype,
5214 fold_convert (ctype, op0),
5215 fold_convert (ctype, c))),
5221 /* We have a special case here if we are doing something like
5222 (C * 8) % 4 since we know that's zero. */
5223 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5224 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5225 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5226 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5227 return omit_one_operand (type, integer_zero_node, op0);
5229 /* ... fall through ... */
5231 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5232 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5233 /* If we can extract our operation from the LHS, do so and return a
5234 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5235 do something only if the second operand is a constant. */
5237 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5238 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5239 fold_convert (ctype, op1)));
5240 else if (tcode == MULT_EXPR && code == MULT_EXPR
5241 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5242 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5243 fold_convert (ctype, t1)));
5244 else if (TREE_CODE (op1) != INTEGER_CST)
5247 /* If these are the same operation types, we can associate them
5248 assuming no overflow. */
5250 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5251 fold_convert (ctype, c), 0))
5252 && ! TREE_OVERFLOW (t1))
5253 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5255 /* If these operations "cancel" each other, we have the main
5256 optimizations of this pass, which occur when either constant is a
5257 multiple of the other, in which case we replace this with either an
5258 operation or CODE or TCODE.
5260 If we have an unsigned type that is not a sizetype, we cannot do
5261 this since it will change the result if the original computation
5263 if ((! TYPE_UNSIGNED (ctype)
5264 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5266 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5267 || (tcode == MULT_EXPR
5268 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5269 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5271 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5272 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5273 fold_convert (ctype,
5274 const_binop (TRUNC_DIV_EXPR,
5276 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5277 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5278 fold_convert (ctype,
5279 const_binop (TRUNC_DIV_EXPR,
5291 /* Return a node which has the indicated constant VALUE (either 0 or
5292 1), and is of the indicated TYPE. */
5295 constant_boolean_node (int value, tree type)
5297 if (type == integer_type_node)
5298 return value ? integer_one_node : integer_zero_node;
5299 else if (type == boolean_type_node)
5300 return value ? boolean_true_node : boolean_false_node;
5301 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5302 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5303 : integer_zero_node);
5306 tree t = build_int_2 (value, 0);
5308 TREE_TYPE (t) = type;
5313 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5314 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5315 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5316 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5317 COND is the first argument to CODE; otherwise (as in the example
5318 given here), it is the second argument. TYPE is the type of the
5319 original expression. Return NULL_TREE if no simplification is
5323 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5324 tree cond, tree arg, int cond_first_p)
5326 tree test, true_value, false_value;
5327 tree lhs = NULL_TREE;
5328 tree rhs = NULL_TREE;
5330 /* This transformation is only worthwhile if we don't have to wrap
5331 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5332 one of the branches once its pushed inside the COND_EXPR. */
5333 if (!TREE_CONSTANT (arg))
5336 if (TREE_CODE (cond) == COND_EXPR)
5338 test = TREE_OPERAND (cond, 0);
5339 true_value = TREE_OPERAND (cond, 1);
5340 false_value = TREE_OPERAND (cond, 2);
5341 /* If this operand throws an expression, then it does not make
5342 sense to try to perform a logical or arithmetic operation
5344 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5346 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5351 tree testtype = TREE_TYPE (cond);
5353 true_value = constant_boolean_node (true, testtype);
5354 false_value = constant_boolean_node (false, testtype);
5358 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5359 : build2 (code, type, arg, true_value));
5361 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5362 : build2 (code, type, arg, false_value));
5364 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5365 return fold_convert (type, test);
5369 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5371 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5372 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5373 ADDEND is the same as X.
5375 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5376 and finite. The problematic cases are when X is zero, and its mode
5377 has signed zeros. In the case of rounding towards -infinity,
5378 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5379 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5382 fold_real_zero_addition_p (tree type, tree addend, int negate)
5384 if (!real_zerop (addend))
5387 /* Don't allow the fold with -fsignaling-nans. */
5388 if (HONOR_SNANS (TYPE_MODE (type)))
5391 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5392 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5395 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5396 if (TREE_CODE (addend) == REAL_CST
5397 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5400 /* The mode has signed zeros, and we have to honor their sign.
5401 In this situation, there is only one case we can return true for.
5402 X - 0 is the same as X unless rounding towards -infinity is
5404 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5407 /* Subroutine of fold() that checks comparisons of built-in math
5408 functions against real constants.
5410 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5411 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5412 is the type of the result and ARG0 and ARG1 are the operands of the
5413 comparison. ARG1 must be a TREE_REAL_CST.
5415 The function returns the constant folded tree if a simplification
5416 can be made, and NULL_TREE otherwise. */
5419 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5420 tree type, tree arg0, tree arg1)
5424 if (BUILTIN_SQRT_P (fcode))
5426 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5427 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5429 c = TREE_REAL_CST (arg1);
5430 if (REAL_VALUE_NEGATIVE (c))
5432 /* sqrt(x) < y is always false, if y is negative. */
5433 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5434 return omit_one_operand (type, integer_zero_node, arg);
5436 /* sqrt(x) > y is always true, if y is negative and we
5437 don't care about NaNs, i.e. negative values of x. */
5438 if (code == NE_EXPR || !HONOR_NANS (mode))
5439 return omit_one_operand (type, integer_one_node, arg);
5441 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5442 return fold (build2 (GE_EXPR, type, arg,
5443 build_real (TREE_TYPE (arg), dconst0)));
5445 else if (code == GT_EXPR || code == GE_EXPR)
5449 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5450 real_convert (&c2, mode, &c2);
5452 if (REAL_VALUE_ISINF (c2))
5454 /* sqrt(x) > y is x == +Inf, when y is very large. */
5455 if (HONOR_INFINITIES (mode))
5456 return fold (build2 (EQ_EXPR, type, arg,
5457 build_real (TREE_TYPE (arg), c2)));
5459 /* sqrt(x) > y is always false, when y is very large
5460 and we don't care about infinities. */
5461 return omit_one_operand (type, integer_zero_node, arg);
5464 /* sqrt(x) > c is the same as x > c*c. */
5465 return fold (build2 (code, type, arg,
5466 build_real (TREE_TYPE (arg), c2)));
5468 else if (code == LT_EXPR || code == LE_EXPR)
5472 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5473 real_convert (&c2, mode, &c2);
5475 if (REAL_VALUE_ISINF (c2))
5477 /* sqrt(x) < y is always true, when y is a very large
5478 value and we don't care about NaNs or Infinities. */
5479 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5480 return omit_one_operand (type, integer_one_node, arg);
5482 /* sqrt(x) < y is x != +Inf when y is very large and we
5483 don't care about NaNs. */
5484 if (! HONOR_NANS (mode))
5485 return fold (build2 (NE_EXPR, type, arg,
5486 build_real (TREE_TYPE (arg), c2)));
5488 /* sqrt(x) < y is x >= 0 when y is very large and we
5489 don't care about Infinities. */
5490 if (! HONOR_INFINITIES (mode))
5491 return fold (build2 (GE_EXPR, type, arg,
5492 build_real (TREE_TYPE (arg), dconst0)));
5494 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5495 if (lang_hooks.decls.global_bindings_p () != 0
5496 || CONTAINS_PLACEHOLDER_P (arg))
5499 arg = save_expr (arg);
5500 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5501 fold (build2 (GE_EXPR, type, arg,
5502 build_real (TREE_TYPE (arg),
5504 fold (build2 (NE_EXPR, type, arg,
5505 build_real (TREE_TYPE (arg),
5509 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5510 if (! HONOR_NANS (mode))
5511 return fold (build2 (code, type, arg,
5512 build_real (TREE_TYPE (arg), c2)));
5514 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5515 if (lang_hooks.decls.global_bindings_p () == 0
5516 && ! CONTAINS_PLACEHOLDER_P (arg))
5518 arg = save_expr (arg);
5519 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5520 fold (build2 (GE_EXPR, type, arg,
5521 build_real (TREE_TYPE (arg),
5523 fold (build2 (code, type, arg,
5524 build_real (TREE_TYPE (arg),
5533 /* Subroutine of fold() that optimizes comparisons against Infinities,
5534 either +Inf or -Inf.
5536 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5537 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5538 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5540 The function returns the constant folded tree if a simplification
5541 can be made, and NULL_TREE otherwise. */
5544 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5546 enum machine_mode mode;
5547 REAL_VALUE_TYPE max;
5551 mode = TYPE_MODE (TREE_TYPE (arg0));
5553 /* For negative infinity swap the sense of the comparison. */
5554 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5556 code = swap_tree_comparison (code);
5561 /* x > +Inf is always false, if with ignore sNANs. */
5562 if (HONOR_SNANS (mode))
5564 return omit_one_operand (type, integer_zero_node, arg0);
5567 /* x <= +Inf is always true, if we don't case about NaNs. */
5568 if (! HONOR_NANS (mode))
5569 return omit_one_operand (type, integer_one_node, arg0);
5571 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5572 if (lang_hooks.decls.global_bindings_p () == 0
5573 && ! CONTAINS_PLACEHOLDER_P (arg0))
5575 arg0 = save_expr (arg0);
5576 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5582 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5583 real_maxval (&max, neg, mode);
5584 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5585 arg0, build_real (TREE_TYPE (arg0), max)));
5588 /* x < +Inf is always equal to x <= DBL_MAX. */
5589 real_maxval (&max, neg, mode);
5590 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5591 arg0, build_real (TREE_TYPE (arg0), max)));
5594 /* x != +Inf is always equal to !(x > DBL_MAX). */
5595 real_maxval (&max, neg, mode);
5596 if (! HONOR_NANS (mode))
5597 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5598 arg0, build_real (TREE_TYPE (arg0), max)));
5600 /* The transformation below creates non-gimple code and thus is
5601 not appropriate if we are in gimple form. */
5605 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5606 arg0, build_real (TREE_TYPE (arg0), max)));
5607 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5616 /* Subroutine of fold() that optimizes comparisons of a division by
5617 a nonzero integer constant against an integer constant, i.e.
5620 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5621 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5622 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5624 The function returns the constant folded tree if a simplification
5625 can be made, and NULL_TREE otherwise. */
5628 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5630 tree prod, tmp, hi, lo;
5631 tree arg00 = TREE_OPERAND (arg0, 0);
5632 tree arg01 = TREE_OPERAND (arg0, 1);
5633 unsigned HOST_WIDE_INT lpart;
5634 HOST_WIDE_INT hpart;
5637 /* We have to do this the hard way to detect unsigned overflow.
5638 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5639 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5640 TREE_INT_CST_HIGH (arg01),
5641 TREE_INT_CST_LOW (arg1),
5642 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5643 prod = build_int_2 (lpart, hpart);
5644 TREE_TYPE (prod) = TREE_TYPE (arg00);
5645 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5646 || TREE_INT_CST_HIGH (prod) != hpart
5647 || TREE_INT_CST_LOW (prod) != lpart;
5648 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5650 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5652 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5655 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5656 overflow = add_double (TREE_INT_CST_LOW (prod),
5657 TREE_INT_CST_HIGH (prod),
5658 TREE_INT_CST_LOW (tmp),
5659 TREE_INT_CST_HIGH (tmp),
5661 hi = build_int_2 (lpart, hpart);
5662 TREE_TYPE (hi) = TREE_TYPE (arg00);
5663 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5664 || TREE_INT_CST_HIGH (hi) != hpart
5665 || TREE_INT_CST_LOW (hi) != lpart
5666 || TREE_OVERFLOW (prod);
5667 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5669 else if (tree_int_cst_sgn (arg01) >= 0)
5671 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5672 switch (tree_int_cst_sgn (arg1))
5675 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5680 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5685 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5695 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5696 switch (tree_int_cst_sgn (arg1))
5699 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5704 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5709 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5721 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5722 return omit_one_operand (type, integer_zero_node, arg00);
5723 if (TREE_OVERFLOW (hi))
5724 return fold (build2 (GE_EXPR, type, arg00, lo));
5725 if (TREE_OVERFLOW (lo))
5726 return fold (build2 (LE_EXPR, type, arg00, hi));
5727 return build_range_check (type, arg00, 1, lo, hi);
5730 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5731 return omit_one_operand (type, integer_one_node, arg00);
5732 if (TREE_OVERFLOW (hi))
5733 return fold (build2 (LT_EXPR, type, arg00, lo));
5734 if (TREE_OVERFLOW (lo))
5735 return fold (build2 (GT_EXPR, type, arg00, hi));
5736 return build_range_check (type, arg00, 0, lo, hi);
5739 if (TREE_OVERFLOW (lo))
5740 return omit_one_operand (type, integer_zero_node, arg00);
5741 return fold (build2 (LT_EXPR, type, arg00, lo));
5744 if (TREE_OVERFLOW (hi))
5745 return omit_one_operand (type, integer_one_node, arg00);
5746 return fold (build2 (LE_EXPR, type, arg00, hi));
5749 if (TREE_OVERFLOW (hi))
5750 return omit_one_operand (type, integer_zero_node, arg00);
5751 return fold (build2 (GT_EXPR, type, arg00, hi));
5754 if (TREE_OVERFLOW (lo))
5755 return omit_one_operand (type, integer_one_node, arg00);
5756 return fold (build2 (GE_EXPR, type, arg00, lo));
5766 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5767 equality/inequality test, then return a simplified form of
5768 the test using shifts and logical operations. Otherwise return
5769 NULL. TYPE is the desired result type. */
5772 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5775 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5777 if (code == TRUTH_NOT_EXPR)
5779 code = TREE_CODE (arg0);
5780 if (code != NE_EXPR && code != EQ_EXPR)
5783 /* Extract the arguments of the EQ/NE. */
5784 arg1 = TREE_OPERAND (arg0, 1);
5785 arg0 = TREE_OPERAND (arg0, 0);
5787 /* This requires us to invert the code. */
5788 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5791 /* If this is testing a single bit, we can optimize the test. */
5792 if ((code == NE_EXPR || code == EQ_EXPR)
5793 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5794 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5796 tree inner = TREE_OPERAND (arg0, 0);
5797 tree type = TREE_TYPE (arg0);
5798 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5799 enum machine_mode operand_mode = TYPE_MODE (type);
5801 tree signed_type, unsigned_type, intermediate_type;
5804 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5805 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5806 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5807 if (arg00 != NULL_TREE
5808 /* This is only a win if casting to a signed type is cheap,
5809 i.e. when arg00's type is not a partial mode. */
5810 && TYPE_PRECISION (TREE_TYPE (arg00))
5811 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5813 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5814 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5815 result_type, fold_convert (stype, arg00),
5816 fold_convert (stype, integer_zero_node)));
5819 /* Otherwise we have (A & C) != 0 where C is a single bit,
5820 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5821 Similarly for (A & C) == 0. */
5823 /* If INNER is a right shift of a constant and it plus BITNUM does
5824 not overflow, adjust BITNUM and INNER. */
5825 if (TREE_CODE (inner) == RSHIFT_EXPR
5826 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5827 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5828 && bitnum < TYPE_PRECISION (type)
5829 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5830 bitnum - TYPE_PRECISION (type)))
5832 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5833 inner = TREE_OPERAND (inner, 0);
5836 /* If we are going to be able to omit the AND below, we must do our
5837 operations as unsigned. If we must use the AND, we have a choice.
5838 Normally unsigned is faster, but for some machines signed is. */
5839 #ifdef LOAD_EXTEND_OP
5840 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5845 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5846 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5847 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5848 inner = fold_convert (intermediate_type, inner);
5851 inner = build2 (RSHIFT_EXPR, intermediate_type,
5852 inner, size_int (bitnum));
5854 if (code == EQ_EXPR)
5855 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5856 inner, integer_one_node);
5858 /* Put the AND last so it can combine with more things. */
5859 inner = build2 (BIT_AND_EXPR, intermediate_type,
5860 inner, integer_one_node);
5862 /* Make sure to return the proper type. */
5863 inner = fold_convert (result_type, inner);
5870 /* Check whether we are allowed to reorder operands arg0 and arg1,
5871 such that the evaluation of arg1 occurs before arg0. */
5874 reorder_operands_p (tree arg0, tree arg1)
5876 if (! flag_evaluation_order)
5878 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5880 return ! TREE_SIDE_EFFECTS (arg0)
5881 && ! TREE_SIDE_EFFECTS (arg1);
5884 /* Test whether it is preferable two swap two operands, ARG0 and
5885 ARG1, for example because ARG0 is an integer constant and ARG1
5886 isn't. If REORDER is true, only recommend swapping if we can
5887 evaluate the operands in reverse order. */
5890 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5892 STRIP_SIGN_NOPS (arg0);
5893 STRIP_SIGN_NOPS (arg1);
5895 if (TREE_CODE (arg1) == INTEGER_CST)
5897 if (TREE_CODE (arg0) == INTEGER_CST)
5900 if (TREE_CODE (arg1) == REAL_CST)
5902 if (TREE_CODE (arg0) == REAL_CST)
5905 if (TREE_CODE (arg1) == COMPLEX_CST)
5907 if (TREE_CODE (arg0) == COMPLEX_CST)
5910 if (TREE_CONSTANT (arg1))
5912 if (TREE_CONSTANT (arg0))
5918 if (reorder && flag_evaluation_order
5919 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5927 if (reorder && flag_evaluation_order
5928 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5936 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5937 for commutative and comparison operators. Ensuring a canonical
5938 form allows the optimizers to find additional redundancies without
5939 having to explicitly check for both orderings. */
5940 if (TREE_CODE (arg0) == SSA_NAME
5941 && TREE_CODE (arg1) == SSA_NAME
5942 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5948 /* Perform constant folding and related simplification of EXPR.
5949 The related simplifications include x*1 => x, x*0 => 0, etc.,
5950 and application of the associative law.
5951 NOP_EXPR conversions may be removed freely (as long as we
5952 are careful not to change the type of the overall expression).
5953 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5954 but we can constant-fold them if they have constant operands. */
5956 #ifdef ENABLE_FOLD_CHECKING
5957 # define fold(x) fold_1 (x)
5958 static tree fold_1 (tree);
5964 const tree t = expr;
5965 const tree type = TREE_TYPE (expr);
5966 tree t1 = NULL_TREE;
5968 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5969 enum tree_code code = TREE_CODE (t);
5970 int kind = TREE_CODE_CLASS (code);
5972 /* WINS will be nonzero when the switch is done
5973 if all operands are constant. */
5976 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5977 Likewise for a SAVE_EXPR that's already been evaluated. */
5978 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5981 /* Return right away if a constant. */
5985 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5989 /* Special case for conversion ops that can have fixed point args. */
5990 arg0 = TREE_OPERAND (t, 0);
5992 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5994 STRIP_SIGN_NOPS (arg0);
5996 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5997 subop = TREE_REALPART (arg0);
6001 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6002 && TREE_CODE (subop) != REAL_CST)
6003 /* Note that TREE_CONSTANT isn't enough:
6004 static var addresses are constant but we can't
6005 do arithmetic on them. */
6008 else if (IS_EXPR_CODE_CLASS (kind))
6010 int len = first_rtl_op (code);
6012 for (i = 0; i < len; i++)
6014 tree op = TREE_OPERAND (t, i);
6018 continue; /* Valid for CALL_EXPR, at least. */
6020 /* Strip any conversions that don't change the mode. This is
6021 safe for every expression, except for a comparison expression
6022 because its signedness is derived from its operands. So, in
6023 the latter case, only strip conversions that don't change the
6026 Note that this is done as an internal manipulation within the
6027 constant folder, in order to find the simplest representation
6028 of the arguments so that their form can be studied. In any
6029 cases, the appropriate type conversions should be put back in
6030 the tree that will get out of the constant folder. */
6032 STRIP_SIGN_NOPS (op);
6036 if (TREE_CODE (op) == COMPLEX_CST)
6037 subop = TREE_REALPART (op);
6041 if (TREE_CODE (subop) != INTEGER_CST
6042 && TREE_CODE (subop) != REAL_CST)
6043 /* Note that TREE_CONSTANT isn't enough:
6044 static var addresses are constant but we can't
6045 do arithmetic on them. */
6055 /* If this is a commutative operation, and ARG0 is a constant, move it
6056 to ARG1 to reduce the number of tests below. */
6057 if (commutative_tree_code (code)
6058 && tree_swap_operands_p (arg0, arg1, true))
6059 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6060 TREE_OPERAND (t, 0)));
6062 /* Now WINS is set as described above,
6063 ARG0 is the first operand of EXPR,
6064 and ARG1 is the second operand (if it has more than one operand).
6066 First check for cases where an arithmetic operation is applied to a
6067 compound, conditional, or comparison operation. Push the arithmetic
6068 operation inside the compound or conditional to see if any folding
6069 can then be done. Convert comparison to conditional for this purpose.
6070 The also optimizes non-constant cases that used to be done in
6073 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6074 one of the operands is a comparison and the other is a comparison, a
6075 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6076 code below would make the expression more complex. Change it to a
6077 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6078 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6080 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6081 || code == EQ_EXPR || code == NE_EXPR)
6082 && ((truth_value_p (TREE_CODE (arg0))
6083 && (truth_value_p (TREE_CODE (arg1))
6084 || (TREE_CODE (arg1) == BIT_AND_EXPR
6085 && integer_onep (TREE_OPERAND (arg1, 1)))))
6086 || (truth_value_p (TREE_CODE (arg1))
6087 && (truth_value_p (TREE_CODE (arg0))
6088 || (TREE_CODE (arg0) == BIT_AND_EXPR
6089 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6091 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6092 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6094 type, fold_convert (boolean_type_node, arg0),
6095 fold_convert (boolean_type_node, arg1)));
6097 if (code == EQ_EXPR)
6098 tem = invert_truthvalue (tem);
6103 if (TREE_CODE_CLASS (code) == '1')
6105 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6106 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6107 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6108 else if (TREE_CODE (arg0) == COND_EXPR)
6110 tree arg01 = TREE_OPERAND (arg0, 1);
6111 tree arg02 = TREE_OPERAND (arg0, 2);
6112 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6113 arg01 = fold (build1 (code, type, arg01));
6114 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6115 arg02 = fold (build1 (code, type, arg02));
6116 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6119 /* If this was a conversion, and all we did was to move into
6120 inside the COND_EXPR, bring it back out. But leave it if
6121 it is a conversion from integer to integer and the
6122 result precision is no wider than a word since such a
6123 conversion is cheap and may be optimized away by combine,
6124 while it couldn't if it were outside the COND_EXPR. Then return
6125 so we don't get into an infinite recursion loop taking the
6126 conversion out and then back in. */
6128 if ((code == NOP_EXPR || code == CONVERT_EXPR
6129 || code == NON_LVALUE_EXPR)
6130 && TREE_CODE (tem) == COND_EXPR
6131 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6132 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6133 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6134 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6135 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6136 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6137 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6139 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6140 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6141 tem = build1 (code, type,
6143 TREE_TYPE (TREE_OPERAND
6144 (TREE_OPERAND (tem, 1), 0)),
6145 TREE_OPERAND (tem, 0),
6146 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6147 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6150 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6152 if (TREE_CODE (type) == BOOLEAN_TYPE)
6154 arg0 = copy_node (arg0);
6155 TREE_TYPE (arg0) = type;
6158 else if (TREE_CODE (type) != INTEGER_TYPE)
6159 return fold (build3 (COND_EXPR, type, arg0,
6160 fold (build1 (code, type,
6162 fold (build1 (code, type,
6163 integer_zero_node))));
6166 else if (TREE_CODE_CLASS (code) == '<'
6167 && TREE_CODE (arg0) == COMPOUND_EXPR)
6168 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6169 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6170 else if (TREE_CODE_CLASS (code) == '<'
6171 && TREE_CODE (arg1) == COMPOUND_EXPR)
6172 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6173 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6174 else if (TREE_CODE_CLASS (code) == '2'
6175 || TREE_CODE_CLASS (code) == '<')
6177 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6178 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6179 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6181 if (TREE_CODE (arg1) == COMPOUND_EXPR
6182 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6183 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6184 fold (build2 (code, type,
6185 arg0, TREE_OPERAND (arg1, 1))));
6187 if (TREE_CODE (arg0) == COND_EXPR
6188 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6190 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6191 /*cond_first_p=*/1);
6192 if (tem != NULL_TREE)
6196 if (TREE_CODE (arg1) == COND_EXPR
6197 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6199 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6200 /*cond_first_p=*/0);
6201 if (tem != NULL_TREE)
6209 return fold (DECL_INITIAL (t));
6214 case FIX_TRUNC_EXPR:
6216 case FIX_FLOOR_EXPR:
6217 case FIX_ROUND_EXPR:
6218 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6219 return TREE_OPERAND (t, 0);
6221 /* Handle cases of two conversions in a row. */
6222 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6223 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6225 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6226 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6227 int inside_int = INTEGRAL_TYPE_P (inside_type);
6228 int inside_ptr = POINTER_TYPE_P (inside_type);
6229 int inside_float = FLOAT_TYPE_P (inside_type);
6230 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6231 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6232 int inter_int = INTEGRAL_TYPE_P (inter_type);
6233 int inter_ptr = POINTER_TYPE_P (inter_type);
6234 int inter_float = FLOAT_TYPE_P (inter_type);
6235 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6236 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6237 int final_int = INTEGRAL_TYPE_P (type);
6238 int final_ptr = POINTER_TYPE_P (type);
6239 int final_float = FLOAT_TYPE_P (type);
6240 unsigned int final_prec = TYPE_PRECISION (type);
6241 int final_unsignedp = TYPE_UNSIGNED (type);
6243 /* In addition to the cases of two conversions in a row
6244 handled below, if we are converting something to its own
6245 type via an object of identical or wider precision, neither
6246 conversion is needed. */
6247 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6248 && ((inter_int && final_int) || (inter_float && final_float))
6249 && inter_prec >= final_prec)
6250 return fold (build1 (code, type,
6251 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6253 /* Likewise, if the intermediate and final types are either both
6254 float or both integer, we don't need the middle conversion if
6255 it is wider than the final type and doesn't change the signedness
6256 (for integers). Avoid this if the final type is a pointer
6257 since then we sometimes need the inner conversion. Likewise if
6258 the outer has a precision not equal to the size of its mode. */
6259 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6260 || (inter_float && inside_float))
6261 && inter_prec >= inside_prec
6262 && (inter_float || inter_unsignedp == inside_unsignedp)
6263 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6264 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6266 return fold (build1 (code, type,
6267 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6269 /* If we have a sign-extension of a zero-extended value, we can
6270 replace that by a single zero-extension. */
6271 if (inside_int && inter_int && final_int
6272 && inside_prec < inter_prec && inter_prec < final_prec
6273 && inside_unsignedp && !inter_unsignedp)
6274 return fold (build1 (code, type,
6275 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6277 /* Two conversions in a row are not needed unless:
6278 - some conversion is floating-point (overstrict for now), or
6279 - the intermediate type is narrower than both initial and
6281 - the intermediate type and innermost type differ in signedness,
6282 and the outermost type is wider than the intermediate, or
6283 - the initial type is a pointer type and the precisions of the
6284 intermediate and final types differ, or
6285 - the final type is a pointer type and the precisions of the
6286 initial and intermediate types differ. */
6287 if (! inside_float && ! inter_float && ! final_float
6288 && (inter_prec > inside_prec || inter_prec > final_prec)
6289 && ! (inside_int && inter_int
6290 && inter_unsignedp != inside_unsignedp
6291 && inter_prec < final_prec)
6292 && ((inter_unsignedp && inter_prec > inside_prec)
6293 == (final_unsignedp && final_prec > inter_prec))
6294 && ! (inside_ptr && inter_prec != final_prec)
6295 && ! (final_ptr && inside_prec != inter_prec)
6296 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6297 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6299 return fold (build1 (code, type,
6300 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6303 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6304 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6305 /* Detect assigning a bitfield. */
6306 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6307 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6309 /* Don't leave an assignment inside a conversion
6310 unless assigning a bitfield. */
6311 tree prev = TREE_OPERAND (t, 0);
6312 tem = copy_node (t);
6313 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6314 /* First do the assignment, then return converted constant. */
6315 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6316 TREE_NO_WARNING (tem) = 1;
6317 TREE_USED (tem) = 1;
6321 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6322 constants (if x has signed type, the sign bit cannot be set
6323 in c). This folds extension into the BIT_AND_EXPR. */
6324 if (INTEGRAL_TYPE_P (type)
6325 && TREE_CODE (type) != BOOLEAN_TYPE
6326 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6327 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6329 tree and = TREE_OPERAND (t, 0);
6330 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6333 if (TYPE_UNSIGNED (TREE_TYPE (and))
6334 || (TYPE_PRECISION (type)
6335 <= TYPE_PRECISION (TREE_TYPE (and))))
6337 else if (TYPE_PRECISION (TREE_TYPE (and1))
6338 <= HOST_BITS_PER_WIDE_INT
6339 && host_integerp (and1, 1))
6341 unsigned HOST_WIDE_INT cst;
6343 cst = tree_low_cst (and1, 1);
6344 cst &= (HOST_WIDE_INT) -1
6345 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6346 change = (cst == 0);
6347 #ifdef LOAD_EXTEND_OP
6349 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6352 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6353 and0 = fold_convert (uns, and0);
6354 and1 = fold_convert (uns, and1);
6359 return fold (build2 (BIT_AND_EXPR, type,
6360 fold_convert (type, and0),
6361 fold_convert (type, and1)));
6364 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6365 T2 being pointers to types of the same size. */
6366 if (POINTER_TYPE_P (TREE_TYPE (t))
6367 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6368 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6369 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6371 tree arg00 = TREE_OPERAND (arg0, 0);
6372 tree t0 = TREE_TYPE (t);
6373 tree t1 = TREE_TYPE (arg00);
6374 tree tt0 = TREE_TYPE (t0);
6375 tree tt1 = TREE_TYPE (t1);
6376 tree s0 = TYPE_SIZE (tt0);
6377 tree s1 = TYPE_SIZE (tt1);
6379 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6380 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6381 TREE_OPERAND (arg0, 1));
6384 tem = fold_convert_const (code, type, arg0);
6385 return tem ? tem : t;
6387 case VIEW_CONVERT_EXPR:
6388 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6389 return build1 (VIEW_CONVERT_EXPR, type,
6390 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6394 if (TREE_CODE (arg0) == CONSTRUCTOR
6395 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6397 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6399 return TREE_VALUE (m);
6404 if (TREE_CONSTANT (t) != wins)
6406 tem = copy_node (t);
6407 TREE_CONSTANT (tem) = wins;
6408 TREE_INVARIANT (tem) = wins;
6414 if (negate_expr_p (arg0))
6415 return fold_convert (type, negate_expr (arg0));
6419 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6420 return fold_abs_const (arg0, type);
6421 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6422 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6423 /* Convert fabs((double)float) into (double)fabsf(float). */
6424 else if (TREE_CODE (arg0) == NOP_EXPR
6425 && TREE_CODE (type) == REAL_TYPE)
6427 tree targ0 = strip_float_extensions (arg0);
6429 return fold_convert (type, fold (build1 (ABS_EXPR,
6433 else if (tree_expr_nonnegative_p (arg0))
6438 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6439 return fold_convert (type, arg0);
6440 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6441 return build2 (COMPLEX_EXPR, type,
6442 TREE_OPERAND (arg0, 0),
6443 negate_expr (TREE_OPERAND (arg0, 1)));
6444 else if (TREE_CODE (arg0) == COMPLEX_CST)
6445 return build_complex (type, TREE_REALPART (arg0),
6446 negate_expr (TREE_IMAGPART (arg0)));
6447 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6448 return fold (build2 (TREE_CODE (arg0), type,
6449 fold (build1 (CONJ_EXPR, type,
6450 TREE_OPERAND (arg0, 0))),
6451 fold (build1 (CONJ_EXPR, type,
6452 TREE_OPERAND (arg0, 1)))));
6453 else if (TREE_CODE (arg0) == CONJ_EXPR)
6454 return TREE_OPERAND (arg0, 0);
6458 if (TREE_CODE (arg0) == INTEGER_CST)
6459 return fold_not_const (arg0, type);
6460 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6461 return TREE_OPERAND (arg0, 0);
6465 /* A + (-B) -> A - B */
6466 if (TREE_CODE (arg1) == NEGATE_EXPR)
6467 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6468 /* (-A) + B -> B - A */
6469 if (TREE_CODE (arg0) == NEGATE_EXPR
6470 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6471 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6472 if (! FLOAT_TYPE_P (type))
6474 if (integer_zerop (arg1))
6475 return non_lvalue (fold_convert (type, arg0));
6477 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6478 with a constant, and the two constants have no bits in common,
6479 we should treat this as a BIT_IOR_EXPR since this may produce more
6481 if (TREE_CODE (arg0) == BIT_AND_EXPR
6482 && TREE_CODE (arg1) == BIT_AND_EXPR
6483 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6484 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6485 && integer_zerop (const_binop (BIT_AND_EXPR,
6486 TREE_OPERAND (arg0, 1),
6487 TREE_OPERAND (arg1, 1), 0)))
6489 code = BIT_IOR_EXPR;
6493 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6494 (plus (plus (mult) (mult)) (foo)) so that we can
6495 take advantage of the factoring cases below. */
6496 if ((TREE_CODE (arg0) == PLUS_EXPR
6497 && TREE_CODE (arg1) == MULT_EXPR)
6498 || (TREE_CODE (arg1) == PLUS_EXPR
6499 && TREE_CODE (arg0) == MULT_EXPR))
6501 tree parg0, parg1, parg, marg;
6503 if (TREE_CODE (arg0) == PLUS_EXPR)
6504 parg = arg0, marg = arg1;
6506 parg = arg1, marg = arg0;
6507 parg0 = TREE_OPERAND (parg, 0);
6508 parg1 = TREE_OPERAND (parg, 1);
6512 if (TREE_CODE (parg0) == MULT_EXPR
6513 && TREE_CODE (parg1) != MULT_EXPR)
6514 return fold (build2 (PLUS_EXPR, type,
6515 fold (build2 (PLUS_EXPR, type,
6516 fold_convert (type, parg0),
6517 fold_convert (type, marg))),
6518 fold_convert (type, parg1)));
6519 if (TREE_CODE (parg0) != MULT_EXPR
6520 && TREE_CODE (parg1) == MULT_EXPR)
6521 return fold (build2 (PLUS_EXPR, type,
6522 fold (build2 (PLUS_EXPR, type,
6523 fold_convert (type, parg1),
6524 fold_convert (type, marg))),
6525 fold_convert (type, parg0)));
6528 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6530 tree arg00, arg01, arg10, arg11;
6531 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6533 /* (A * C) + (B * C) -> (A+B) * C.
6534 We are most concerned about the case where C is a constant,
6535 but other combinations show up during loop reduction. Since
6536 it is not difficult, try all four possibilities. */
6538 arg00 = TREE_OPERAND (arg0, 0);
6539 arg01 = TREE_OPERAND (arg0, 1);
6540 arg10 = TREE_OPERAND (arg1, 0);
6541 arg11 = TREE_OPERAND (arg1, 1);
6544 if (operand_equal_p (arg01, arg11, 0))
6545 same = arg01, alt0 = arg00, alt1 = arg10;
6546 else if (operand_equal_p (arg00, arg10, 0))
6547 same = arg00, alt0 = arg01, alt1 = arg11;
6548 else if (operand_equal_p (arg00, arg11, 0))
6549 same = arg00, alt0 = arg01, alt1 = arg10;
6550 else if (operand_equal_p (arg01, arg10, 0))
6551 same = arg01, alt0 = arg00, alt1 = arg11;
6553 /* No identical multiplicands; see if we can find a common
6554 power-of-two factor in non-power-of-two multiplies. This
6555 can help in multi-dimensional array access. */
6556 else if (TREE_CODE (arg01) == INTEGER_CST
6557 && TREE_CODE (arg11) == INTEGER_CST
6558 && TREE_INT_CST_HIGH (arg01) == 0
6559 && TREE_INT_CST_HIGH (arg11) == 0)
6561 HOST_WIDE_INT int01, int11, tmp;
6562 int01 = TREE_INT_CST_LOW (arg01);
6563 int11 = TREE_INT_CST_LOW (arg11);
6565 /* Move min of absolute values to int11. */
6566 if ((int01 >= 0 ? int01 : -int01)
6567 < (int11 >= 0 ? int11 : -int11))
6569 tmp = int01, int01 = int11, int11 = tmp;
6570 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6571 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6574 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6576 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6577 build_int_2 (int01 / int11, 0)));
6584 return fold (build2 (MULT_EXPR, type,
6585 fold (build2 (PLUS_EXPR, type,
6592 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6593 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6594 return non_lvalue (fold_convert (type, arg0));
6596 /* Likewise if the operands are reversed. */
6597 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6598 return non_lvalue (fold_convert (type, arg1));
6600 /* Convert x+x into x*2.0. */
6601 if (operand_equal_p (arg0, arg1, 0)
6602 && SCALAR_FLOAT_TYPE_P (type))
6603 return fold (build2 (MULT_EXPR, type, arg0,
6604 build_real (type, dconst2)));
6606 /* Convert x*c+x into x*(c+1). */
6607 if (flag_unsafe_math_optimizations
6608 && TREE_CODE (arg0) == MULT_EXPR
6609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6610 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6615 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6616 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6617 return fold (build2 (MULT_EXPR, type, arg1,
6618 build_real (type, c)));
6621 /* Convert x+x*c into x*(c+1). */
6622 if (flag_unsafe_math_optimizations
6623 && TREE_CODE (arg1) == MULT_EXPR
6624 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6625 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6626 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6630 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6631 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6632 return fold (build2 (MULT_EXPR, type, arg0,
6633 build_real (type, c)));
6636 /* Convert x*c1+x*c2 into x*(c1+c2). */
6637 if (flag_unsafe_math_optimizations
6638 && TREE_CODE (arg0) == MULT_EXPR
6639 && TREE_CODE (arg1) == MULT_EXPR
6640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6641 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6642 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6643 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6644 && operand_equal_p (TREE_OPERAND (arg0, 0),
6645 TREE_OPERAND (arg1, 0), 0))
6647 REAL_VALUE_TYPE c1, c2;
6649 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6650 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6651 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6652 return fold (build2 (MULT_EXPR, type,
6653 TREE_OPERAND (arg0, 0),
6654 build_real (type, c1)));
6656 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6657 if (flag_unsafe_math_optimizations
6658 && TREE_CODE (arg1) == PLUS_EXPR
6659 && TREE_CODE (arg0) != MULT_EXPR)
6661 tree tree10 = TREE_OPERAND (arg1, 0);
6662 tree tree11 = TREE_OPERAND (arg1, 1);
6663 if (TREE_CODE (tree11) == MULT_EXPR
6664 && TREE_CODE (tree10) == MULT_EXPR)
6667 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6668 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6671 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6672 if (flag_unsafe_math_optimizations
6673 && TREE_CODE (arg0) == PLUS_EXPR
6674 && TREE_CODE (arg1) != MULT_EXPR)
6676 tree tree00 = TREE_OPERAND (arg0, 0);
6677 tree tree01 = TREE_OPERAND (arg0, 1);
6678 if (TREE_CODE (tree01) == MULT_EXPR
6679 && TREE_CODE (tree00) == MULT_EXPR)
6682 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6683 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6689 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6690 is a rotate of A by C1 bits. */
6691 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6692 is a rotate of A by B bits. */
6694 enum tree_code code0, code1;
6695 code0 = TREE_CODE (arg0);
6696 code1 = TREE_CODE (arg1);
6697 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6698 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6699 && operand_equal_p (TREE_OPERAND (arg0, 0),
6700 TREE_OPERAND (arg1, 0), 0)
6701 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6703 tree tree01, tree11;
6704 enum tree_code code01, code11;
6706 tree01 = TREE_OPERAND (arg0, 1);
6707 tree11 = TREE_OPERAND (arg1, 1);
6708 STRIP_NOPS (tree01);
6709 STRIP_NOPS (tree11);
6710 code01 = TREE_CODE (tree01);
6711 code11 = TREE_CODE (tree11);
6712 if (code01 == INTEGER_CST
6713 && code11 == INTEGER_CST
6714 && TREE_INT_CST_HIGH (tree01) == 0
6715 && TREE_INT_CST_HIGH (tree11) == 0
6716 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6717 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6718 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6719 code0 == LSHIFT_EXPR ? tree01 : tree11);
6720 else if (code11 == MINUS_EXPR)
6722 tree tree110, tree111;
6723 tree110 = TREE_OPERAND (tree11, 0);
6724 tree111 = TREE_OPERAND (tree11, 1);
6725 STRIP_NOPS (tree110);
6726 STRIP_NOPS (tree111);
6727 if (TREE_CODE (tree110) == INTEGER_CST
6728 && 0 == compare_tree_int (tree110,
6730 (TREE_TYPE (TREE_OPERAND
6732 && operand_equal_p (tree01, tree111, 0))
6733 return build2 ((code0 == LSHIFT_EXPR
6736 type, TREE_OPERAND (arg0, 0), tree01);
6738 else if (code01 == MINUS_EXPR)
6740 tree tree010, tree011;
6741 tree010 = TREE_OPERAND (tree01, 0);
6742 tree011 = TREE_OPERAND (tree01, 1);
6743 STRIP_NOPS (tree010);
6744 STRIP_NOPS (tree011);
6745 if (TREE_CODE (tree010) == INTEGER_CST
6746 && 0 == compare_tree_int (tree010,
6748 (TREE_TYPE (TREE_OPERAND
6750 && operand_equal_p (tree11, tree011, 0))
6751 return build2 ((code0 != LSHIFT_EXPR
6754 type, TREE_OPERAND (arg0, 0), tree11);
6760 /* In most languages, can't associate operations on floats through
6761 parentheses. Rather than remember where the parentheses were, we
6762 don't associate floats at all, unless the user has specified
6763 -funsafe-math-optimizations. */
6766 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6768 tree var0, con0, lit0, minus_lit0;
6769 tree var1, con1, lit1, minus_lit1;
6771 /* Split both trees into variables, constants, and literals. Then
6772 associate each group together, the constants with literals,
6773 then the result with variables. This increases the chances of
6774 literals being recombined later and of generating relocatable
6775 expressions for the sum of a constant and literal. */
6776 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6777 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6778 code == MINUS_EXPR);
6780 /* Only do something if we found more than two objects. Otherwise,
6781 nothing has changed and we risk infinite recursion. */
6782 if (2 < ((var0 != 0) + (var1 != 0)
6783 + (con0 != 0) + (con1 != 0)
6784 + (lit0 != 0) + (lit1 != 0)
6785 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6787 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6788 if (code == MINUS_EXPR)
6791 var0 = associate_trees (var0, var1, code, type);
6792 con0 = associate_trees (con0, con1, code, type);
6793 lit0 = associate_trees (lit0, lit1, code, type);
6794 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6796 /* Preserve the MINUS_EXPR if the negative part of the literal is
6797 greater than the positive part. Otherwise, the multiplicative
6798 folding code (i.e extract_muldiv) may be fooled in case
6799 unsigned constants are subtracted, like in the following
6800 example: ((X*2 + 4) - 8U)/2. */
6801 if (minus_lit0 && lit0)
6803 if (TREE_CODE (lit0) == INTEGER_CST
6804 && TREE_CODE (minus_lit0) == INTEGER_CST
6805 && tree_int_cst_lt (lit0, minus_lit0))
6807 minus_lit0 = associate_trees (minus_lit0, lit0,
6813 lit0 = associate_trees (lit0, minus_lit0,
6821 return fold_convert (type,
6822 associate_trees (var0, minus_lit0,
6826 con0 = associate_trees (con0, minus_lit0,
6828 return fold_convert (type,
6829 associate_trees (var0, con0,
6834 con0 = associate_trees (con0, lit0, code, type);
6835 return fold_convert (type, associate_trees (var0, con0,
6842 t1 = const_binop (code, arg0, arg1, 0);
6843 if (t1 != NULL_TREE)
6845 /* The return value should always have
6846 the same type as the original expression. */
6847 if (TREE_TYPE (t1) != type)
6848 t1 = fold_convert (type, t1);
6855 /* A - (-B) -> A + B */
6856 if (TREE_CODE (arg1) == NEGATE_EXPR)
6857 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6858 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6859 if (TREE_CODE (arg0) == NEGATE_EXPR
6860 && (FLOAT_TYPE_P (type)
6861 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6862 && negate_expr_p (arg1)
6863 && reorder_operands_p (arg0, arg1))
6864 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6865 TREE_OPERAND (arg0, 0)));
6867 if (! FLOAT_TYPE_P (type))
6869 if (! wins && integer_zerop (arg0))
6870 return negate_expr (fold_convert (type, arg1));
6871 if (integer_zerop (arg1))
6872 return non_lvalue (fold_convert (type, arg0));
6874 /* Fold A - (A & B) into ~B & A. */
6875 if (!TREE_SIDE_EFFECTS (arg0)
6876 && TREE_CODE (arg1) == BIT_AND_EXPR)
6878 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6879 return fold (build2 (BIT_AND_EXPR, type,
6880 fold (build1 (BIT_NOT_EXPR, type,
6881 TREE_OPERAND (arg1, 0))),
6883 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6884 return fold (build2 (BIT_AND_EXPR, type,
6885 fold (build1 (BIT_NOT_EXPR, type,
6886 TREE_OPERAND (arg1, 1))),
6890 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6891 any power of 2 minus 1. */
6892 if (TREE_CODE (arg0) == BIT_AND_EXPR
6893 && TREE_CODE (arg1) == BIT_AND_EXPR
6894 && operand_equal_p (TREE_OPERAND (arg0, 0),
6895 TREE_OPERAND (arg1, 0), 0))
6897 tree mask0 = TREE_OPERAND (arg0, 1);
6898 tree mask1 = TREE_OPERAND (arg1, 1);
6899 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6901 if (operand_equal_p (tem, mask1, 0))
6903 tem = fold (build2 (BIT_XOR_EXPR, type,
6904 TREE_OPERAND (arg0, 0), mask1));
6905 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6910 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6911 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6912 return non_lvalue (fold_convert (type, arg0));
6914 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6915 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6916 (-ARG1 + ARG0) reduces to -ARG1. */
6917 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6918 return negate_expr (fold_convert (type, arg1));
6920 /* Fold &x - &x. This can happen from &x.foo - &x.
6921 This is unsafe for certain floats even in non-IEEE formats.
6922 In IEEE, it is unsafe because it does wrong for NaNs.
6923 Also note that operand_equal_p is always false if an operand
6926 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6927 && operand_equal_p (arg0, arg1, 0))
6928 return fold_convert (type, integer_zero_node);
6930 /* A - B -> A + (-B) if B is easily negatable. */
6931 if (!wins && negate_expr_p (arg1)
6932 && (FLOAT_TYPE_P (type)
6933 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6934 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6936 if (TREE_CODE (arg0) == MULT_EXPR
6937 && TREE_CODE (arg1) == MULT_EXPR
6938 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6940 /* (A * C) - (B * C) -> (A-B) * C. */
6941 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6942 TREE_OPERAND (arg1, 1), 0))
6943 return fold (build2 (MULT_EXPR, type,
6944 fold (build2 (MINUS_EXPR, type,
6945 TREE_OPERAND (arg0, 0),
6946 TREE_OPERAND (arg1, 0))),
6947 TREE_OPERAND (arg0, 1)));
6948 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6949 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6950 TREE_OPERAND (arg1, 0), 0))
6951 return fold (build2 (MULT_EXPR, type,
6952 TREE_OPERAND (arg0, 0),
6953 fold (build2 (MINUS_EXPR, type,
6954 TREE_OPERAND (arg0, 1),
6955 TREE_OPERAND (arg1, 1)))));
6961 /* (-A) * (-B) -> A * B */
6962 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6963 return fold (build2 (MULT_EXPR, type,
6964 TREE_OPERAND (arg0, 0),
6965 negate_expr (arg1)));
6966 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6967 return fold (build2 (MULT_EXPR, type,
6969 TREE_OPERAND (arg1, 0)));
6971 if (! FLOAT_TYPE_P (type))
6973 if (integer_zerop (arg1))
6974 return omit_one_operand (type, arg1, arg0);
6975 if (integer_onep (arg1))
6976 return non_lvalue (fold_convert (type, arg0));
6978 /* (a * (1 << b)) is (a << b) */
6979 if (TREE_CODE (arg1) == LSHIFT_EXPR
6980 && integer_onep (TREE_OPERAND (arg1, 0)))
6981 return fold (build2 (LSHIFT_EXPR, type, arg0,
6982 TREE_OPERAND (arg1, 1)));
6983 if (TREE_CODE (arg0) == LSHIFT_EXPR
6984 && integer_onep (TREE_OPERAND (arg0, 0)))
6985 return fold (build2 (LSHIFT_EXPR, type, arg1,
6986 TREE_OPERAND (arg0, 1)));
6988 if (TREE_CODE (arg1) == INTEGER_CST
6989 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6990 fold_convert (type, arg1),
6992 return fold_convert (type, tem);
6997 /* Maybe fold x * 0 to 0. The expressions aren't the same
6998 when x is NaN, since x * 0 is also NaN. Nor are they the
6999 same in modes with signed zeros, since multiplying a
7000 negative value by 0 gives -0, not +0. */
7001 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7002 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7003 && real_zerop (arg1))
7004 return omit_one_operand (type, arg1, arg0);
7005 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7006 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7007 && real_onep (arg1))
7008 return non_lvalue (fold_convert (type, arg0));
7010 /* Transform x * -1.0 into -x. */
7011 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7012 && real_minus_onep (arg1))
7013 return fold_convert (type, negate_expr (arg0));
7015 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7016 if (flag_unsafe_math_optimizations
7017 && TREE_CODE (arg0) == RDIV_EXPR
7018 && TREE_CODE (arg1) == REAL_CST
7019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7021 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7024 return fold (build2 (RDIV_EXPR, type, tem,
7025 TREE_OPERAND (arg0, 1)));
7028 if (flag_unsafe_math_optimizations)
7030 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7031 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7033 /* Optimizations of root(...)*root(...). */
7034 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7036 tree rootfn, arg, arglist;
7037 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7038 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7040 /* Optimize sqrt(x)*sqrt(x) as x. */
7041 if (BUILTIN_SQRT_P (fcode0)
7042 && operand_equal_p (arg00, arg10, 0)
7043 && ! HONOR_SNANS (TYPE_MODE (type)))
7046 /* Optimize root(x)*root(y) as root(x*y). */
7047 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7048 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7049 arglist = build_tree_list (NULL_TREE, arg);
7050 return build_function_call_expr (rootfn, arglist);
7053 /* Optimize expN(x)*expN(y) as expN(x+y). */
7054 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7056 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7057 tree arg = build2 (PLUS_EXPR, type,
7058 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7059 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7060 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7061 return build_function_call_expr (expfn, arglist);
7064 /* Optimizations of pow(...)*pow(...). */
7065 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7066 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7067 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7069 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7070 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7072 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7073 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7076 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7077 if (operand_equal_p (arg01, arg11, 0))
7079 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7080 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7081 tree arglist = tree_cons (NULL_TREE, fold (arg),
7082 build_tree_list (NULL_TREE,
7084 return build_function_call_expr (powfn, arglist);
7087 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7088 if (operand_equal_p (arg00, arg10, 0))
7090 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7091 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7092 tree arglist = tree_cons (NULL_TREE, arg00,
7093 build_tree_list (NULL_TREE,
7095 return build_function_call_expr (powfn, arglist);
7099 /* Optimize tan(x)*cos(x) as sin(x). */
7100 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7101 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7102 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7103 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7104 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7105 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7106 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7107 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7109 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7111 if (sinfn != NULL_TREE)
7112 return build_function_call_expr (sinfn,
7113 TREE_OPERAND (arg0, 1));
7116 /* Optimize x*pow(x,c) as pow(x,c+1). */
7117 if (fcode1 == BUILT_IN_POW
7118 || fcode1 == BUILT_IN_POWF
7119 || fcode1 == BUILT_IN_POWL)
7121 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7122 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7124 if (TREE_CODE (arg11) == REAL_CST
7125 && ! TREE_CONSTANT_OVERFLOW (arg11)
7126 && operand_equal_p (arg0, arg10, 0))
7128 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7132 c = TREE_REAL_CST (arg11);
7133 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7134 arg = build_real (type, c);
7135 arglist = build_tree_list (NULL_TREE, arg);
7136 arglist = tree_cons (NULL_TREE, arg0, arglist);
7137 return build_function_call_expr (powfn, arglist);
7141 /* Optimize pow(x,c)*x as pow(x,c+1). */
7142 if (fcode0 == BUILT_IN_POW
7143 || fcode0 == BUILT_IN_POWF
7144 || fcode0 == BUILT_IN_POWL)
7146 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7147 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7149 if (TREE_CODE (arg01) == REAL_CST
7150 && ! TREE_CONSTANT_OVERFLOW (arg01)
7151 && operand_equal_p (arg1, arg00, 0))
7153 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7157 c = TREE_REAL_CST (arg01);
7158 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7159 arg = build_real (type, c);
7160 arglist = build_tree_list (NULL_TREE, arg);
7161 arglist = tree_cons (NULL_TREE, arg1, arglist);
7162 return build_function_call_expr (powfn, arglist);
7166 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7168 && operand_equal_p (arg0, arg1, 0))
7170 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7174 tree arg = build_real (type, dconst2);
7175 tree arglist = build_tree_list (NULL_TREE, arg);
7176 arglist = tree_cons (NULL_TREE, arg0, arglist);
7177 return build_function_call_expr (powfn, arglist);
7186 if (integer_all_onesp (arg1))
7187 return omit_one_operand (type, arg1, arg0);
7188 if (integer_zerop (arg1))
7189 return non_lvalue (fold_convert (type, arg0));
7190 if (operand_equal_p (arg0, arg1, 0))
7191 return non_lvalue (fold_convert (type, arg0));
7194 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7195 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7197 t1 = build_int_2 (-1, -1);
7198 TREE_TYPE (t1) = type;
7199 force_fit_type (t1, 0);
7200 return omit_one_operand (type, t1, arg1);
7204 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7207 t1 = build_int_2 (-1, -1);
7208 TREE_TYPE (t1) = type;
7209 force_fit_type (t1, 0);
7210 return omit_one_operand (type, t1, arg0);
7213 t1 = distribute_bit_expr (code, type, arg0, arg1);
7214 if (t1 != NULL_TREE)
7217 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7219 This results in more efficient code for machines without a NAND
7220 instruction. Combine will canonicalize to the first form
7221 which will allow use of NAND instructions provided by the
7222 backend if they exist. */
7223 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7224 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7226 return fold (build1 (BIT_NOT_EXPR, type,
7227 build2 (BIT_AND_EXPR, type,
7228 TREE_OPERAND (arg0, 0),
7229 TREE_OPERAND (arg1, 0))));
7232 /* See if this can be simplified into a rotate first. If that
7233 is unsuccessful continue in the association code. */
7237 if (integer_zerop (arg1))
7238 return non_lvalue (fold_convert (type, arg0));
7239 if (integer_all_onesp (arg1))
7240 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7241 if (operand_equal_p (arg0, arg1, 0))
7242 return omit_one_operand (type, integer_zero_node, arg0);
7245 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7246 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7248 t1 = build_int_2 (-1, -1);
7249 TREE_TYPE (t1) = type;
7250 force_fit_type (t1, 0);
7251 return omit_one_operand (type, t1, arg1);
7255 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7256 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7258 t1 = build_int_2 (-1, -1);
7259 TREE_TYPE (t1) = type;
7260 force_fit_type (t1, 0);
7261 return omit_one_operand (type, t1, arg0);
7264 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7265 with a constant, and the two constants have no bits in common,
7266 we should treat this as a BIT_IOR_EXPR since this may produce more
7268 if (TREE_CODE (arg0) == BIT_AND_EXPR
7269 && TREE_CODE (arg1) == BIT_AND_EXPR
7270 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7271 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7272 && integer_zerop (const_binop (BIT_AND_EXPR,
7273 TREE_OPERAND (arg0, 1),
7274 TREE_OPERAND (arg1, 1), 0)))
7276 code = BIT_IOR_EXPR;
7280 /* See if this can be simplified into a rotate first. If that
7281 is unsuccessful continue in the association code. */
7285 if (integer_all_onesp (arg1))
7286 return non_lvalue (fold_convert (type, arg0));
7287 if (integer_zerop (arg1))
7288 return omit_one_operand (type, arg1, arg0);
7289 if (operand_equal_p (arg0, arg1, 0))
7290 return non_lvalue (fold_convert (type, arg0));
7292 /* ~X & X is always zero. */
7293 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7294 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7295 return omit_one_operand (type, integer_zero_node, arg1);
7297 /* X & ~X is always zero. */
7298 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7299 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7300 return omit_one_operand (type, integer_zero_node, arg0);
7302 t1 = distribute_bit_expr (code, type, arg0, arg1);
7303 if (t1 != NULL_TREE)
7305 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7306 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7307 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7310 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7312 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7313 && (~TREE_INT_CST_LOW (arg1)
7314 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7315 return fold_convert (type, TREE_OPERAND (arg0, 0));
7318 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7320 This results in more efficient code for machines without a NOR
7321 instruction. Combine will canonicalize to the first form
7322 which will allow use of NOR instructions provided by the
7323 backend if they exist. */
7324 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7325 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7327 return fold (build1 (BIT_NOT_EXPR, type,
7328 build2 (BIT_IOR_EXPR, type,
7329 TREE_OPERAND (arg0, 0),
7330 TREE_OPERAND (arg1, 0))));
7336 /* Don't touch a floating-point divide by zero unless the mode
7337 of the constant can represent infinity. */
7338 if (TREE_CODE (arg1) == REAL_CST
7339 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7340 && real_zerop (arg1))
7343 /* (-A) / (-B) -> A / B */
7344 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7345 return fold (build2 (RDIV_EXPR, type,
7346 TREE_OPERAND (arg0, 0),
7347 negate_expr (arg1)));
7348 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7349 return fold (build2 (RDIV_EXPR, type,
7351 TREE_OPERAND (arg1, 0)));
7353 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7355 && real_onep (arg1))
7356 return non_lvalue (fold_convert (type, arg0));
7358 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7359 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7360 && real_minus_onep (arg1))
7361 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7363 /* If ARG1 is a constant, we can convert this to a multiply by the
7364 reciprocal. This does not have the same rounding properties,
7365 so only do this if -funsafe-math-optimizations. We can actually
7366 always safely do it if ARG1 is a power of two, but it's hard to
7367 tell if it is or not in a portable manner. */
7368 if (TREE_CODE (arg1) == REAL_CST)
7370 if (flag_unsafe_math_optimizations
7371 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7373 return fold (build2 (MULT_EXPR, type, arg0, tem));
7374 /* Find the reciprocal if optimizing and the result is exact. */
7378 r = TREE_REAL_CST (arg1);
7379 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7381 tem = build_real (type, r);
7382 return fold (build2 (MULT_EXPR, type, arg0, tem));
7386 /* Convert A/B/C to A/(B*C). */
7387 if (flag_unsafe_math_optimizations
7388 && TREE_CODE (arg0) == RDIV_EXPR)
7389 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7390 fold (build2 (MULT_EXPR, type,
7391 TREE_OPERAND (arg0, 1), arg1))));
7393 /* Convert A/(B/C) to (A/B)*C. */
7394 if (flag_unsafe_math_optimizations
7395 && TREE_CODE (arg1) == RDIV_EXPR)
7396 return fold (build2 (MULT_EXPR, type,
7397 fold (build2 (RDIV_EXPR, type, arg0,
7398 TREE_OPERAND (arg1, 0))),
7399 TREE_OPERAND (arg1, 1)));
7401 /* Convert C1/(X*C2) into (C1/C2)/X. */
7402 if (flag_unsafe_math_optimizations
7403 && TREE_CODE (arg1) == MULT_EXPR
7404 && TREE_CODE (arg0) == REAL_CST
7405 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7407 tree tem = const_binop (RDIV_EXPR, arg0,
7408 TREE_OPERAND (arg1, 1), 0);
7410 return fold (build2 (RDIV_EXPR, type, tem,
7411 TREE_OPERAND (arg1, 0)));
7414 if (flag_unsafe_math_optimizations)
7416 enum built_in_function fcode = builtin_mathfn_code (arg1);
7417 /* Optimize x/expN(y) into x*expN(-y). */
7418 if (BUILTIN_EXPONENT_P (fcode))
7420 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7421 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7422 tree arglist = build_tree_list (NULL_TREE,
7423 fold_convert (type, arg));
7424 arg1 = build_function_call_expr (expfn, arglist);
7425 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7428 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7429 if (fcode == BUILT_IN_POW
7430 || fcode == BUILT_IN_POWF
7431 || fcode == BUILT_IN_POWL)
7433 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7434 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7435 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7436 tree neg11 = fold_convert (type, negate_expr (arg11));
7437 tree arglist = tree_cons(NULL_TREE, arg10,
7438 build_tree_list (NULL_TREE, neg11));
7439 arg1 = build_function_call_expr (powfn, arglist);
7440 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7444 if (flag_unsafe_math_optimizations)
7446 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7447 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7449 /* Optimize sin(x)/cos(x) as tan(x). */
7450 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7451 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7452 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7453 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7454 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7456 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7458 if (tanfn != NULL_TREE)
7459 return build_function_call_expr (tanfn,
7460 TREE_OPERAND (arg0, 1));
7463 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7464 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7465 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7466 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7467 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7468 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7470 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7472 if (tanfn != NULL_TREE)
7474 tree tmp = TREE_OPERAND (arg0, 1);
7475 tmp = build_function_call_expr (tanfn, tmp);
7476 return fold (build2 (RDIV_EXPR, type,
7477 build_real (type, dconst1), tmp));
7481 /* Optimize pow(x,c)/x as pow(x,c-1). */
7482 if (fcode0 == BUILT_IN_POW
7483 || fcode0 == BUILT_IN_POWF
7484 || fcode0 == BUILT_IN_POWL)
7486 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7487 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7488 if (TREE_CODE (arg01) == REAL_CST
7489 && ! TREE_CONSTANT_OVERFLOW (arg01)
7490 && operand_equal_p (arg1, arg00, 0))
7492 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7496 c = TREE_REAL_CST (arg01);
7497 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7498 arg = build_real (type, c);
7499 arglist = build_tree_list (NULL_TREE, arg);
7500 arglist = tree_cons (NULL_TREE, arg1, arglist);
7501 return build_function_call_expr (powfn, arglist);
7507 case TRUNC_DIV_EXPR:
7508 case ROUND_DIV_EXPR:
7509 case FLOOR_DIV_EXPR:
7511 case EXACT_DIV_EXPR:
7512 if (integer_onep (arg1))
7513 return non_lvalue (fold_convert (type, arg0));
7514 if (integer_zerop (arg1))
7517 if (!TYPE_UNSIGNED (type)
7518 && TREE_CODE (arg1) == INTEGER_CST
7519 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7520 && TREE_INT_CST_HIGH (arg1) == -1)
7521 return fold_convert (type, negate_expr (arg0));
7523 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7524 operation, EXACT_DIV_EXPR.
7526 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7527 At one time others generated faster code, it's not clear if they do
7528 after the last round to changes to the DIV code in expmed.c. */
7529 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7530 && multiple_of_p (type, arg0, arg1))
7531 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7533 if (TREE_CODE (arg1) == INTEGER_CST
7534 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7536 return fold_convert (type, tem);
7541 case FLOOR_MOD_EXPR:
7542 case ROUND_MOD_EXPR:
7543 case TRUNC_MOD_EXPR:
7544 if (integer_onep (arg1))
7545 return omit_one_operand (type, integer_zero_node, arg0);
7546 if (integer_zerop (arg1))
7548 /* X % -1 is zero. */
7549 if (!TYPE_UNSIGNED (type)
7550 && TREE_CODE (arg1) == INTEGER_CST
7551 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7552 && TREE_INT_CST_HIGH (arg1) == -1)
7553 return omit_one_operand (type, integer_zero_node, arg0);
7555 if (TREE_CODE (arg1) == INTEGER_CST
7556 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7558 return fold_convert (type, tem);
7564 if (integer_all_onesp (arg0))
7565 return omit_one_operand (type, arg0, arg1);
7569 /* Optimize -1 >> x for arithmetic right shifts. */
7570 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7571 return omit_one_operand (type, arg0, arg1);
7572 /* ... fall through ... */
7576 if (integer_zerop (arg1))
7577 return non_lvalue (fold_convert (type, arg0));
7578 if (integer_zerop (arg0))
7579 return omit_one_operand (type, arg0, arg1);
7581 /* Since negative shift count is not well-defined,
7582 don't try to compute it in the compiler. */
7583 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7585 /* Rewrite an LROTATE_EXPR by a constant into an
7586 RROTATE_EXPR by a new constant. */
7587 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7589 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7590 tem = fold_convert (TREE_TYPE (arg1), tem);
7591 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7592 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7595 /* If we have a rotate of a bit operation with the rotate count and
7596 the second operand of the bit operation both constant,
7597 permute the two operations. */
7598 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7599 && (TREE_CODE (arg0) == BIT_AND_EXPR
7600 || TREE_CODE (arg0) == BIT_IOR_EXPR
7601 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7603 return fold (build2 (TREE_CODE (arg0), type,
7604 fold (build2 (code, type,
7605 TREE_OPERAND (arg0, 0), arg1)),
7606 fold (build2 (code, type,
7607 TREE_OPERAND (arg0, 1), arg1))));
7609 /* Two consecutive rotates adding up to the width of the mode can
7611 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7612 && TREE_CODE (arg0) == RROTATE_EXPR
7613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7614 && TREE_INT_CST_HIGH (arg1) == 0
7615 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7616 && ((TREE_INT_CST_LOW (arg1)
7617 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7618 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7619 return TREE_OPERAND (arg0, 0);
7624 if (operand_equal_p (arg0, arg1, 0))
7625 return omit_one_operand (type, arg0, arg1);
7626 if (INTEGRAL_TYPE_P (type)
7627 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7628 return omit_one_operand (type, arg1, arg0);
7632 if (operand_equal_p (arg0, arg1, 0))
7633 return omit_one_operand (type, arg0, arg1);
7634 if (INTEGRAL_TYPE_P (type)
7635 && TYPE_MAX_VALUE (type)
7636 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7637 return omit_one_operand (type, arg1, arg0);
7640 case TRUTH_NOT_EXPR:
7641 /* The argument to invert_truthvalue must have Boolean type. */
7642 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7643 arg0 = fold_convert (boolean_type_node, arg0);
7645 /* Note that the operand of this must be an int
7646 and its values must be 0 or 1.
7647 ("true" is a fixed value perhaps depending on the language,
7648 but we don't handle values other than 1 correctly yet.) */
7649 tem = invert_truthvalue (arg0);
7650 /* Avoid infinite recursion. */
7651 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7653 tem = fold_single_bit_test (code, arg0, arg1, type);
7658 return fold_convert (type, tem);
7660 case TRUTH_ANDIF_EXPR:
7661 /* Note that the operands of this must be ints
7662 and their values must be 0 or 1.
7663 ("true" is a fixed value perhaps depending on the language.) */
7664 /* If first arg is constant zero, return it. */
7665 if (integer_zerop (arg0))
7666 return fold_convert (type, arg0);
7667 case TRUTH_AND_EXPR:
7668 /* If either arg is constant true, drop it. */
7669 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7670 return non_lvalue (fold_convert (type, arg1));
7671 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7672 /* Preserve sequence points. */
7673 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7674 return non_lvalue (fold_convert (type, arg0));
7675 /* If second arg is constant zero, result is zero, but first arg
7676 must be evaluated. */
7677 if (integer_zerop (arg1))
7678 return omit_one_operand (type, arg1, arg0);
7679 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7680 case will be handled here. */
7681 if (integer_zerop (arg0))
7682 return omit_one_operand (type, arg0, arg1);
7684 /* !X && X is always false. */
7685 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7686 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7687 return omit_one_operand (type, integer_zero_node, arg1);
7688 /* X && !X is always false. */
7689 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7691 return omit_one_operand (type, integer_zero_node, arg0);
7694 /* We only do these simplifications if we are optimizing. */
7698 /* Check for things like (A || B) && (A || C). We can convert this
7699 to A || (B && C). Note that either operator can be any of the four
7700 truth and/or operations and the transformation will still be
7701 valid. Also note that we only care about order for the
7702 ANDIF and ORIF operators. If B contains side effects, this
7703 might change the truth-value of A. */
7704 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7705 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7706 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7707 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7708 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7709 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7711 tree a00 = TREE_OPERAND (arg0, 0);
7712 tree a01 = TREE_OPERAND (arg0, 1);
7713 tree a10 = TREE_OPERAND (arg1, 0);
7714 tree a11 = TREE_OPERAND (arg1, 1);
7715 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7716 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7717 && (code == TRUTH_AND_EXPR
7718 || code == TRUTH_OR_EXPR));
7720 if (operand_equal_p (a00, a10, 0))
7721 return fold (build2 (TREE_CODE (arg0), type, a00,
7722 fold (build2 (code, type, a01, a11))));
7723 else if (commutative && operand_equal_p (a00, a11, 0))
7724 return fold (build2 (TREE_CODE (arg0), type, a00,
7725 fold (build2 (code, type, a01, a10))));
7726 else if (commutative && operand_equal_p (a01, a10, 0))
7727 return fold (build2 (TREE_CODE (arg0), type, a01,
7728 fold (build2 (code, type, a00, a11))));
7730 /* This case if tricky because we must either have commutative
7731 operators or else A10 must not have side-effects. */
7733 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7734 && operand_equal_p (a01, a11, 0))
7735 return fold (build2 (TREE_CODE (arg0), type,
7736 fold (build2 (code, type, a00, a10)),
7740 /* See if we can build a range comparison. */
7741 if (0 != (tem = fold_range_test (t)))
7744 /* Check for the possibility of merging component references. If our
7745 lhs is another similar operation, try to merge its rhs with our
7746 rhs. Then try to merge our lhs and rhs. */
7747 if (TREE_CODE (arg0) == code
7748 && 0 != (tem = fold_truthop (code, type,
7749 TREE_OPERAND (arg0, 1), arg1)))
7750 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7752 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7757 case TRUTH_ORIF_EXPR:
7758 /* Note that the operands of this must be ints
7759 and their values must be 0 or true.
7760 ("true" is a fixed value perhaps depending on the language.) */
7761 /* If first arg is constant true, return it. */
7762 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7763 return fold_convert (type, arg0);
7765 /* If either arg is constant zero, drop it. */
7766 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7767 return non_lvalue (fold_convert (type, arg1));
7768 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7769 /* Preserve sequence points. */
7770 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7771 return non_lvalue (fold_convert (type, arg0));
7772 /* If second arg is constant true, result is true, but we must
7773 evaluate first arg. */
7774 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7775 return omit_one_operand (type, arg1, arg0);
7776 /* Likewise for first arg, but note this only occurs here for
7778 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7779 return omit_one_operand (type, arg0, arg1);
7781 /* !X || X is always true. */
7782 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7783 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7784 return omit_one_operand (type, integer_one_node, arg1);
7785 /* X || !X is always true. */
7786 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7787 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7788 return omit_one_operand (type, integer_one_node, arg0);
7792 case TRUTH_XOR_EXPR:
7793 /* If the second arg is constant zero, drop it. */
7794 if (integer_zerop (arg1))
7795 return non_lvalue (fold_convert (type, arg0));
7796 /* If the second arg is constant true, this is a logical inversion. */
7797 if (integer_onep (arg1))
7798 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7799 /* Identical arguments cancel to zero. */
7800 if (operand_equal_p (arg0, arg1, 0))
7801 return omit_one_operand (type, integer_zero_node, arg0);
7803 /* !X ^ X is always true. */
7804 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7805 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7806 return omit_one_operand (type, integer_one_node, arg1);
7808 /* X ^ !X is always true. */
7809 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7810 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7811 return omit_one_operand (type, integer_one_node, arg0);
7821 /* If one arg is a real or integer constant, put it last. */
7822 if (tree_swap_operands_p (arg0, arg1, true))
7823 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7825 /* If this is an equality comparison of the address of a non-weak
7826 object against zero, then we know the result. */
7827 if ((code == EQ_EXPR || code == NE_EXPR)
7828 && TREE_CODE (arg0) == ADDR_EXPR
7829 && DECL_P (TREE_OPERAND (arg0, 0))
7830 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7831 && integer_zerop (arg1))
7832 return constant_boolean_node (code != EQ_EXPR, type);
7834 /* If this is an equality comparison of the address of two non-weak,
7835 unaliased symbols neither of which are extern (since we do not
7836 have access to attributes for externs), then we know the result. */
7837 if ((code == EQ_EXPR || code == NE_EXPR)
7838 && TREE_CODE (arg0) == ADDR_EXPR
7839 && DECL_P (TREE_OPERAND (arg0, 0))
7840 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7841 && ! lookup_attribute ("alias",
7842 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7843 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7844 && TREE_CODE (arg1) == ADDR_EXPR
7845 && DECL_P (TREE_OPERAND (arg1, 0))
7846 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7847 && ! lookup_attribute ("alias",
7848 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7849 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7850 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7851 ? code == EQ_EXPR : code != EQ_EXPR,
7854 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7856 tree targ0 = strip_float_extensions (arg0);
7857 tree targ1 = strip_float_extensions (arg1);
7858 tree newtype = TREE_TYPE (targ0);
7860 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7861 newtype = TREE_TYPE (targ1);
7863 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7864 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7865 return fold (build2 (code, type, fold_convert (newtype, targ0),
7866 fold_convert (newtype, targ1)));
7868 /* (-a) CMP (-b) -> b CMP a */
7869 if (TREE_CODE (arg0) == NEGATE_EXPR
7870 && TREE_CODE (arg1) == NEGATE_EXPR)
7871 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7872 TREE_OPERAND (arg0, 0)));
7874 if (TREE_CODE (arg1) == REAL_CST)
7876 REAL_VALUE_TYPE cst;
7877 cst = TREE_REAL_CST (arg1);
7879 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7880 if (TREE_CODE (arg0) == NEGATE_EXPR)
7882 fold (build2 (swap_tree_comparison (code), type,
7883 TREE_OPERAND (arg0, 0),
7884 build_real (TREE_TYPE (arg1),
7885 REAL_VALUE_NEGATE (cst))));
7887 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7888 /* a CMP (-0) -> a CMP 0 */
7889 if (REAL_VALUE_MINUS_ZERO (cst))
7890 return fold (build2 (code, type, arg0,
7891 build_real (TREE_TYPE (arg1), dconst0)));
7893 /* x != NaN is always true, other ops are always false. */
7894 if (REAL_VALUE_ISNAN (cst)
7895 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7897 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7898 return omit_one_operand (type, tem, arg0);
7901 /* Fold comparisons against infinity. */
7902 if (REAL_VALUE_ISINF (cst))
7904 tem = fold_inf_compare (code, type, arg0, arg1);
7905 if (tem != NULL_TREE)
7910 /* If this is a comparison of a real constant with a PLUS_EXPR
7911 or a MINUS_EXPR of a real constant, we can convert it into a
7912 comparison with a revised real constant as long as no overflow
7913 occurs when unsafe_math_optimizations are enabled. */
7914 if (flag_unsafe_math_optimizations
7915 && TREE_CODE (arg1) == REAL_CST
7916 && (TREE_CODE (arg0) == PLUS_EXPR
7917 || TREE_CODE (arg0) == MINUS_EXPR)
7918 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7919 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7920 ? MINUS_EXPR : PLUS_EXPR,
7921 arg1, TREE_OPERAND (arg0, 1), 0))
7922 && ! TREE_CONSTANT_OVERFLOW (tem))
7923 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7925 /* Likewise, we can simplify a comparison of a real constant with
7926 a MINUS_EXPR whose first operand is also a real constant, i.e.
7927 (c1 - x) < c2 becomes x > c1-c2. */
7928 if (flag_unsafe_math_optimizations
7929 && TREE_CODE (arg1) == REAL_CST
7930 && TREE_CODE (arg0) == MINUS_EXPR
7931 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7932 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7934 && ! TREE_CONSTANT_OVERFLOW (tem))
7935 return fold (build2 (swap_tree_comparison (code), type,
7936 TREE_OPERAND (arg0, 1), tem));
7938 /* Fold comparisons against built-in math functions. */
7939 if (TREE_CODE (arg1) == REAL_CST
7940 && flag_unsafe_math_optimizations
7941 && ! flag_errno_math)
7943 enum built_in_function fcode = builtin_mathfn_code (arg0);
7945 if (fcode != END_BUILTINS)
7947 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7948 if (tem != NULL_TREE)
7954 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7955 if (TREE_CONSTANT (arg1)
7956 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7957 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7958 /* This optimization is invalid for ordered comparisons
7959 if CONST+INCR overflows or if foo+incr might overflow.
7960 This optimization is invalid for floating point due to rounding.
7961 For pointer types we assume overflow doesn't happen. */
7962 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7963 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7964 && (code == EQ_EXPR || code == NE_EXPR))))
7966 tree varop, newconst;
7968 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7970 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7971 arg1, TREE_OPERAND (arg0, 1)));
7972 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7973 TREE_OPERAND (arg0, 0),
7974 TREE_OPERAND (arg0, 1));
7978 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7979 arg1, TREE_OPERAND (arg0, 1)));
7980 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7981 TREE_OPERAND (arg0, 0),
7982 TREE_OPERAND (arg0, 1));
7986 /* If VAROP is a reference to a bitfield, we must mask
7987 the constant by the width of the field. */
7988 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7989 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7990 && host_integerp (DECL_SIZE (TREE_OPERAND
7991 (TREE_OPERAND (varop, 0), 1)), 1))
7993 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7994 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7995 tree folded_compare, shift;
7997 /* First check whether the comparison would come out
7998 always the same. If we don't do that we would
7999 change the meaning with the masking. */
8000 folded_compare = fold (build2 (code, type,
8001 TREE_OPERAND (varop, 0), arg1));
8002 if (integer_zerop (folded_compare)
8003 || integer_onep (folded_compare))
8004 return omit_one_operand (type, folded_compare, varop);
8006 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
8008 shift = fold_convert (TREE_TYPE (varop), shift);
8009 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8011 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8015 return fold (build2 (code, type, varop, newconst));
8018 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8019 This transformation affects the cases which are handled in later
8020 optimizations involving comparisons with non-negative constants. */
8021 if (TREE_CODE (arg1) == INTEGER_CST
8022 && TREE_CODE (arg0) != INTEGER_CST
8023 && tree_int_cst_sgn (arg1) > 0)
8028 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8029 return fold (build2 (GT_EXPR, type, arg0, arg1));
8032 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8033 return fold (build2 (LE_EXPR, type, arg0, arg1));
8040 /* Comparisons with the highest or lowest possible integer of
8041 the specified size will have known values.
8043 This is quite similar to fold_relational_hi_lo; however, my
8044 attempts to share the code have been nothing but trouble.
8045 I give up for now. */
8047 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8049 if (TREE_CODE (arg1) == INTEGER_CST
8050 && ! TREE_CONSTANT_OVERFLOW (arg1)
8051 && width <= HOST_BITS_PER_WIDE_INT
8052 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8053 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8055 unsigned HOST_WIDE_INT signed_max;
8056 unsigned HOST_WIDE_INT max, min;
8058 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8060 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8062 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8068 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8071 if (TREE_INT_CST_HIGH (arg1) == 0
8072 && TREE_INT_CST_LOW (arg1) == max)
8076 return omit_one_operand (type, integer_zero_node, arg0);
8079 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8082 return omit_one_operand (type, integer_one_node, arg0);
8085 return fold (build2 (NE_EXPR, type, arg0, arg1));
8087 /* The GE_EXPR and LT_EXPR cases above are not normally
8088 reached because of previous transformations. */
8093 else if (TREE_INT_CST_HIGH (arg1) == 0
8094 && TREE_INT_CST_LOW (arg1) == max - 1)
8098 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8099 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8101 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8102 return fold (build2 (NE_EXPR, type, arg0, arg1));
8106 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8107 && TREE_INT_CST_LOW (arg1) == min)
8111 return omit_one_operand (type, integer_zero_node, arg0);
8114 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8117 return omit_one_operand (type, integer_one_node, arg0);
8120 return fold (build2 (NE_EXPR, type, arg0, arg1));
8125 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8126 && TREE_INT_CST_LOW (arg1) == min + 1)
8130 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8131 return fold (build2 (NE_EXPR, type, arg0, arg1));
8133 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8134 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8139 else if (!in_gimple_form
8140 && TREE_INT_CST_HIGH (arg1) == 0
8141 && TREE_INT_CST_LOW (arg1) == signed_max
8142 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8143 /* signed_type does not work on pointer types. */
8144 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8146 /* The following case also applies to X < signed_max+1
8147 and X >= signed_max+1 because previous transformations. */
8148 if (code == LE_EXPR || code == GT_EXPR)
8151 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8152 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8154 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8155 type, fold_convert (st0, arg0),
8156 fold_convert (st1, integer_zero_node)));
8162 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8163 a MINUS_EXPR of a constant, we can convert it into a comparison with
8164 a revised constant as long as no overflow occurs. */
8165 if ((code == EQ_EXPR || code == NE_EXPR)
8166 && TREE_CODE (arg1) == INTEGER_CST
8167 && (TREE_CODE (arg0) == PLUS_EXPR
8168 || TREE_CODE (arg0) == MINUS_EXPR)
8169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8170 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8171 ? MINUS_EXPR : PLUS_EXPR,
8172 arg1, TREE_OPERAND (arg0, 1), 0))
8173 && ! TREE_CONSTANT_OVERFLOW (tem))
8174 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8176 /* Similarly for a NEGATE_EXPR. */
8177 else if ((code == EQ_EXPR || code == NE_EXPR)
8178 && TREE_CODE (arg0) == NEGATE_EXPR
8179 && TREE_CODE (arg1) == INTEGER_CST
8180 && 0 != (tem = negate_expr (arg1))
8181 && TREE_CODE (tem) == INTEGER_CST
8182 && ! TREE_CONSTANT_OVERFLOW (tem))
8183 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8185 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8186 for !=. Don't do this for ordered comparisons due to overflow. */
8187 else if ((code == NE_EXPR || code == EQ_EXPR)
8188 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8189 return fold (build2 (code, type,
8190 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8192 /* If we are widening one operand of an integer comparison,
8193 see if the other operand is similarly being widened. Perhaps we
8194 can do the comparison in the narrower type. */
8195 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8196 && TREE_CODE (arg0) == NOP_EXPR
8197 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8198 && (code == EQ_EXPR || code == NE_EXPR
8199 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8200 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8201 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8202 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8203 || (TREE_CODE (t1) == INTEGER_CST
8204 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8205 return fold (build2 (code, type, tem,
8206 fold_convert (TREE_TYPE (tem), t1)));
8208 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8209 constant, we can simplify it. */
8210 else if (TREE_CODE (arg1) == INTEGER_CST
8211 && (TREE_CODE (arg0) == MIN_EXPR
8212 || TREE_CODE (arg0) == MAX_EXPR)
8213 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8214 return optimize_minmax_comparison (t);
8216 /* If we are comparing an ABS_EXPR with a constant, we can
8217 convert all the cases into explicit comparisons, but they may
8218 well not be faster than doing the ABS and one comparison.
8219 But ABS (X) <= C is a range comparison, which becomes a subtraction
8220 and a comparison, and is probably faster. */
8221 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8222 && TREE_CODE (arg0) == ABS_EXPR
8223 && ! TREE_SIDE_EFFECTS (arg0)
8224 && (0 != (tem = negate_expr (arg1)))
8225 && TREE_CODE (tem) == INTEGER_CST
8226 && ! TREE_CONSTANT_OVERFLOW (tem))
8227 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8228 build2 (GE_EXPR, type,
8229 TREE_OPERAND (arg0, 0), tem),
8230 build2 (LE_EXPR, type,
8231 TREE_OPERAND (arg0, 0), arg1)));
8233 /* If this is an EQ or NE comparison with zero and ARG0 is
8234 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8235 two operations, but the latter can be done in one less insn
8236 on machines that have only two-operand insns or on which a
8237 constant cannot be the first operand. */
8238 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8239 && TREE_CODE (arg0) == BIT_AND_EXPR)
8241 tree arg00 = TREE_OPERAND (arg0, 0);
8242 tree arg01 = TREE_OPERAND (arg0, 1);
8243 if (TREE_CODE (arg00) == LSHIFT_EXPR
8244 && integer_onep (TREE_OPERAND (arg00, 0)))
8246 fold (build2 (code, type,
8247 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8248 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8249 arg01, TREE_OPERAND (arg00, 1)),
8250 fold_convert (TREE_TYPE (arg0),
8253 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8254 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8256 fold (build2 (code, type,
8257 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8258 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8259 arg00, TREE_OPERAND (arg01, 1)),
8260 fold_convert (TREE_TYPE (arg0),
8265 /* If this is an NE or EQ comparison of zero against the result of a
8266 signed MOD operation whose second operand is a power of 2, make
8267 the MOD operation unsigned since it is simpler and equivalent. */
8268 if ((code == NE_EXPR || code == EQ_EXPR)
8269 && integer_zerop (arg1)
8270 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8271 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8272 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8273 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8274 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8275 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8277 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8278 tree newmod = build2 (TREE_CODE (arg0), newtype,
8279 fold_convert (newtype,
8280 TREE_OPERAND (arg0, 0)),
8281 fold_convert (newtype,
8282 TREE_OPERAND (arg0, 1)));
8284 return build2 (code, type, newmod, fold_convert (newtype, arg1));
8287 /* If this is an NE comparison of zero with an AND of one, remove the
8288 comparison since the AND will give the correct value. */
8289 if (code == NE_EXPR && integer_zerop (arg1)
8290 && TREE_CODE (arg0) == BIT_AND_EXPR
8291 && integer_onep (TREE_OPERAND (arg0, 1)))
8292 return fold_convert (type, arg0);
8294 /* If we have (A & C) == C where C is a power of 2, convert this into
8295 (A & C) != 0. Similarly for NE_EXPR. */
8296 if ((code == EQ_EXPR || code == NE_EXPR)
8297 && TREE_CODE (arg0) == BIT_AND_EXPR
8298 && integer_pow2p (TREE_OPERAND (arg0, 1))
8299 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8300 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8301 arg0, integer_zero_node));
8303 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8304 2, then fold the expression into shifts and logical operations. */
8305 tem = fold_single_bit_test (code, arg0, arg1, type);
8309 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8310 Similarly for NE_EXPR. */
8311 if ((code == EQ_EXPR || code == NE_EXPR)
8312 && TREE_CODE (arg0) == BIT_AND_EXPR
8313 && TREE_CODE (arg1) == INTEGER_CST
8314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8317 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8318 arg1, build1 (BIT_NOT_EXPR,
8319 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8320 TREE_OPERAND (arg0, 1))));
8321 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8322 if (integer_nonzerop (dandnotc))
8323 return omit_one_operand (type, rslt, arg0);
8326 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8327 Similarly for NE_EXPR. */
8328 if ((code == EQ_EXPR || code == NE_EXPR)
8329 && TREE_CODE (arg0) == BIT_IOR_EXPR
8330 && TREE_CODE (arg1) == INTEGER_CST
8331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8334 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8335 TREE_OPERAND (arg0, 1),
8336 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8337 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8338 if (integer_nonzerop (candnotd))
8339 return omit_one_operand (type, rslt, arg0);
8342 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8343 and similarly for >= into !=. */
8344 if ((code == LT_EXPR || code == GE_EXPR)
8345 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8346 && TREE_CODE (arg1) == LSHIFT_EXPR
8347 && integer_onep (TREE_OPERAND (arg1, 0)))
8348 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8349 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8350 TREE_OPERAND (arg1, 1)),
8351 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8353 else if ((code == LT_EXPR || code == GE_EXPR)
8354 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8355 && (TREE_CODE (arg1) == NOP_EXPR
8356 || TREE_CODE (arg1) == CONVERT_EXPR)
8357 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8358 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8360 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8361 fold_convert (TREE_TYPE (arg0),
8362 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8363 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8365 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8367 /* Simplify comparison of something with itself. (For IEEE
8368 floating-point, we can only do some of these simplifications.) */
8369 if (operand_equal_p (arg0, arg1, 0))
8374 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8375 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8376 return constant_boolean_node (1, type);
8381 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8382 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8383 return constant_boolean_node (1, type);
8384 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8387 /* For NE, we can only do this simplification if integer
8388 or we don't honor IEEE floating point NaNs. */
8389 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8390 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8392 /* ... fall through ... */
8395 return constant_boolean_node (0, type);
8401 /* If we are comparing an expression that just has comparisons
8402 of two integer values, arithmetic expressions of those comparisons,
8403 and constants, we can simplify it. There are only three cases
8404 to check: the two values can either be equal, the first can be
8405 greater, or the second can be greater. Fold the expression for
8406 those three values. Since each value must be 0 or 1, we have
8407 eight possibilities, each of which corresponds to the constant 0
8408 or 1 or one of the six possible comparisons.
8410 This handles common cases like (a > b) == 0 but also handles
8411 expressions like ((x > y) - (y > x)) > 0, which supposedly
8412 occur in macroized code. */
8414 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8416 tree cval1 = 0, cval2 = 0;
8419 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8420 /* Don't handle degenerate cases here; they should already
8421 have been handled anyway. */
8422 && cval1 != 0 && cval2 != 0
8423 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8424 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8425 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8426 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8427 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8428 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8429 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8431 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8432 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8434 /* We can't just pass T to eval_subst in case cval1 or cval2
8435 was the same as ARG1. */
8438 = fold (build2 (code, type,
8439 eval_subst (arg0, cval1, maxval,
8443 = fold (build2 (code, type,
8444 eval_subst (arg0, cval1, maxval,
8448 = fold (build2 (code, type,
8449 eval_subst (arg0, cval1, minval,
8453 /* All three of these results should be 0 or 1. Confirm they
8454 are. Then use those values to select the proper code
8457 if ((integer_zerop (high_result)
8458 || integer_onep (high_result))
8459 && (integer_zerop (equal_result)
8460 || integer_onep (equal_result))
8461 && (integer_zerop (low_result)
8462 || integer_onep (low_result)))
8464 /* Make a 3-bit mask with the high-order bit being the
8465 value for `>', the next for '=', and the low for '<'. */
8466 switch ((integer_onep (high_result) * 4)
8467 + (integer_onep (equal_result) * 2)
8468 + integer_onep (low_result))
8472 return omit_one_operand (type, integer_zero_node, arg0);
8493 return omit_one_operand (type, integer_one_node, arg0);
8496 tem = build2 (code, type, cval1, cval2);
8498 return save_expr (tem);
8505 /* If this is a comparison of a field, we may be able to simplify it. */
8506 if (((TREE_CODE (arg0) == COMPONENT_REF
8507 && lang_hooks.can_use_bit_fields_p ())
8508 || TREE_CODE (arg0) == BIT_FIELD_REF)
8509 && (code == EQ_EXPR || code == NE_EXPR)
8510 /* Handle the constant case even without -O
8511 to make sure the warnings are given. */
8512 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8514 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8519 /* If this is a comparison of complex values and either or both sides
8520 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8521 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8522 This may prevent needless evaluations. */
8523 if ((code == EQ_EXPR || code == NE_EXPR)
8524 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8525 && (TREE_CODE (arg0) == COMPLEX_EXPR
8526 || TREE_CODE (arg1) == COMPLEX_EXPR
8527 || TREE_CODE (arg0) == COMPLEX_CST
8528 || TREE_CODE (arg1) == COMPLEX_CST))
8530 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8531 tree real0, imag0, real1, imag1;
8533 arg0 = save_expr (arg0);
8534 arg1 = save_expr (arg1);
8535 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8536 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8537 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8538 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8540 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8543 fold (build2 (code, type, real0, real1)),
8544 fold (build2 (code, type, imag0, imag1))));
8547 /* Optimize comparisons of strlen vs zero to a compare of the
8548 first character of the string vs zero. To wit,
8549 strlen(ptr) == 0 => *ptr == 0
8550 strlen(ptr) != 0 => *ptr != 0
8551 Other cases should reduce to one of these two (or a constant)
8552 due to the return value of strlen being unsigned. */
8553 if ((code == EQ_EXPR || code == NE_EXPR)
8554 && integer_zerop (arg1)
8555 && TREE_CODE (arg0) == CALL_EXPR)
8557 tree fndecl = get_callee_fndecl (arg0);
8561 && DECL_BUILT_IN (fndecl)
8562 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8563 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8564 && (arglist = TREE_OPERAND (arg0, 1))
8565 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8566 && ! TREE_CHAIN (arglist))
8567 return fold (build2 (code, type,
8568 build1 (INDIRECT_REF, char_type_node,
8569 TREE_VALUE(arglist)),
8570 integer_zero_node));
8573 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8574 into a single range test. */
8575 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8576 && TREE_CODE (arg1) == INTEGER_CST
8577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8578 && !integer_zerop (TREE_OPERAND (arg0, 1))
8579 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8580 && !TREE_OVERFLOW (arg1))
8582 t1 = fold_div_compare (code, type, arg0, arg1);
8583 if (t1 != NULL_TREE)
8587 /* Both ARG0 and ARG1 are known to be constants at this point. */
8588 t1 = fold_relational_const (code, type, arg0, arg1);
8589 return (t1 == NULL_TREE ? t : t1);
8591 case UNORDERED_EXPR:
8599 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8601 t1 = fold_relational_const (code, type, arg0, arg1);
8602 if (t1 != NULL_TREE)
8606 /* If the first operand is NaN, the result is constant. */
8607 if (TREE_CODE (arg0) == REAL_CST
8608 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8609 && (code != LTGT_EXPR || ! flag_trapping_math))
8611 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8614 return omit_one_operand (type, t1, arg1);
8617 /* If the second operand is NaN, the result is constant. */
8618 if (TREE_CODE (arg1) == REAL_CST
8619 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8620 && (code != LTGT_EXPR || ! flag_trapping_math))
8622 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8625 return omit_one_operand (type, t1, arg0);
8628 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8630 tree targ0 = strip_float_extensions (arg0);
8631 tree targ1 = strip_float_extensions (arg1);
8632 tree newtype = TREE_TYPE (targ0);
8634 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8635 newtype = TREE_TYPE (targ1);
8637 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8638 return fold (build2 (code, type, fold_convert (newtype, targ0),
8639 fold_convert (newtype, targ1)));
8645 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8646 so all simple results must be passed through pedantic_non_lvalue. */
8647 if (TREE_CODE (arg0) == INTEGER_CST)
8649 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8650 /* Only optimize constant conditions when the selected branch
8651 has the same type as the COND_EXPR. This avoids optimizing
8652 away "c ? x : throw", where the throw has a void type. */
8653 if (! VOID_TYPE_P (TREE_TYPE (tem))
8654 || VOID_TYPE_P (type))
8655 return pedantic_non_lvalue (tem);
8658 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8659 return pedantic_omit_one_operand (type, arg1, arg0);
8661 /* If we have A op B ? A : C, we may be able to convert this to a
8662 simpler expression, depending on the operation and the values
8663 of B and C. Signed zeros prevent all of these transformations,
8664 for reasons given above each one.
8666 Also try swapping the arguments and inverting the conditional. */
8667 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8668 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8669 arg1, TREE_OPERAND (arg0, 1))
8670 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8672 tem = fold_cond_expr_with_comparison (type, arg0,
8673 TREE_OPERAND (t, 1),
8674 TREE_OPERAND (t, 2));
8679 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8680 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8681 TREE_OPERAND (t, 2),
8682 TREE_OPERAND (arg0, 1))
8683 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8685 tem = invert_truthvalue (arg0);
8686 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8688 tem = fold_cond_expr_with_comparison (type, tem,
8689 TREE_OPERAND (t, 2),
8690 TREE_OPERAND (t, 1));
8696 /* If the second operand is simpler than the third, swap them
8697 since that produces better jump optimization results. */
8698 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8699 TREE_OPERAND (t, 2), false))
8701 /* See if this can be inverted. If it can't, possibly because
8702 it was a floating-point inequality comparison, don't do
8704 tem = invert_truthvalue (arg0);
8706 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8707 return fold (build3 (code, type, tem,
8708 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8711 /* Convert A ? 1 : 0 to simply A. */
8712 if (integer_onep (TREE_OPERAND (t, 1))
8713 && integer_zerop (TREE_OPERAND (t, 2))
8714 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8715 call to fold will try to move the conversion inside
8716 a COND, which will recurse. In that case, the COND_EXPR
8717 is probably the best choice, so leave it alone. */
8718 && type == TREE_TYPE (arg0))
8719 return pedantic_non_lvalue (arg0);
8721 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8722 over COND_EXPR in cases such as floating point comparisons. */
8723 if (integer_zerop (TREE_OPERAND (t, 1))
8724 && integer_onep (TREE_OPERAND (t, 2))
8725 && truth_value_p (TREE_CODE (arg0)))
8726 return pedantic_non_lvalue (fold_convert (type,
8727 invert_truthvalue (arg0)));
8729 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8730 if (TREE_CODE (arg0) == LT_EXPR
8731 && integer_zerop (TREE_OPERAND (arg0, 1))
8732 && integer_zerop (TREE_OPERAND (t, 2))
8733 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8734 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8735 TREE_TYPE (tem), tem, arg1)));
8737 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8738 already handled above. */
8739 if (TREE_CODE (arg0) == BIT_AND_EXPR
8740 && integer_onep (TREE_OPERAND (arg0, 1))
8741 && integer_zerop (TREE_OPERAND (t, 2))
8742 && integer_pow2p (arg1))
8744 tree tem = TREE_OPERAND (arg0, 0);
8746 if (TREE_CODE (tem) == RSHIFT_EXPR
8747 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8748 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8749 return fold (build2 (BIT_AND_EXPR, type,
8750 TREE_OPERAND (tem, 0), arg1));
8753 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8754 is probably obsolete because the first operand should be a
8755 truth value (that's why we have the two cases above), but let's
8756 leave it in until we can confirm this for all front-ends. */
8757 if (integer_zerop (TREE_OPERAND (t, 2))
8758 && TREE_CODE (arg0) == NE_EXPR
8759 && integer_zerop (TREE_OPERAND (arg0, 1))
8760 && integer_pow2p (arg1)
8761 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8762 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8763 arg1, OEP_ONLY_CONST))
8764 return pedantic_non_lvalue (fold_convert (type,
8765 TREE_OPERAND (arg0, 0)));
8767 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8768 if (integer_zerop (TREE_OPERAND (t, 2))
8769 && truth_value_p (TREE_CODE (arg0))
8770 && truth_value_p (TREE_CODE (arg1)))
8771 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8773 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8774 if (integer_onep (TREE_OPERAND (t, 2))
8775 && truth_value_p (TREE_CODE (arg0))
8776 && truth_value_p (TREE_CODE (arg1)))
8778 /* Only perform transformation if ARG0 is easily inverted. */
8779 tem = invert_truthvalue (arg0);
8780 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8781 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8784 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8785 if (integer_zerop (arg1)
8786 && truth_value_p (TREE_CODE (arg0))
8787 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8789 /* Only perform transformation if ARG0 is easily inverted. */
8790 tem = invert_truthvalue (arg0);
8791 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8792 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8793 TREE_OPERAND (t, 2)));
8796 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8797 if (integer_onep (arg1)
8798 && truth_value_p (TREE_CODE (arg0))
8799 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8800 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8801 TREE_OPERAND (t, 2)));
8806 /* When pedantic, a compound expression can be neither an lvalue
8807 nor an integer constant expression. */
8808 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8810 /* Don't let (0, 0) be null pointer constant. */
8811 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8812 : fold_convert (type, arg1);
8813 return pedantic_non_lvalue (tem);
8817 return build_complex (type, arg0, arg1);
8821 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8823 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8824 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8825 TREE_OPERAND (arg0, 1));
8826 else if (TREE_CODE (arg0) == COMPLEX_CST)
8827 return TREE_REALPART (arg0);
8828 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8829 return fold (build2 (TREE_CODE (arg0), type,
8830 fold (build1 (REALPART_EXPR, type,
8831 TREE_OPERAND (arg0, 0))),
8832 fold (build1 (REALPART_EXPR, type,
8833 TREE_OPERAND (arg0, 1)))));
8837 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8838 return fold_convert (type, integer_zero_node);
8839 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8840 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8841 TREE_OPERAND (arg0, 0));
8842 else if (TREE_CODE (arg0) == COMPLEX_CST)
8843 return TREE_IMAGPART (arg0);
8844 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8845 return fold (build2 (TREE_CODE (arg0), type,
8846 fold (build1 (IMAGPART_EXPR, type,
8847 TREE_OPERAND (arg0, 0))),
8848 fold (build1 (IMAGPART_EXPR, type,
8849 TREE_OPERAND (arg0, 1)))));
8852 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8854 case CLEANUP_POINT_EXPR:
8855 if (! has_cleanups (arg0))
8856 return TREE_OPERAND (t, 0);
8859 enum tree_code code0 = TREE_CODE (arg0);
8860 int kind0 = TREE_CODE_CLASS (code0);
8861 tree arg00 = TREE_OPERAND (arg0, 0);
8864 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8865 return fold (build1 (code0, type,
8866 fold (build1 (CLEANUP_POINT_EXPR,
8867 TREE_TYPE (arg00), arg00))));
8869 if (kind0 == '<' || kind0 == '2'
8870 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8871 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8872 || code0 == TRUTH_XOR_EXPR)
8874 arg01 = TREE_OPERAND (arg0, 1);
8876 if (TREE_CONSTANT (arg00)
8877 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8878 && ! has_cleanups (arg00)))
8879 return fold (build2 (code0, type, arg00,
8880 fold (build1 (CLEANUP_POINT_EXPR,
8881 TREE_TYPE (arg01), arg01))));
8883 if (TREE_CONSTANT (arg01))
8884 return fold (build2 (code0, type,
8885 fold (build1 (CLEANUP_POINT_EXPR,
8886 TREE_TYPE (arg00), arg00)),
8894 /* Check for a built-in function. */
8895 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8896 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8898 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8900 tree tmp = fold_builtin (t);
8908 } /* switch (code) */
8911 #ifdef ENABLE_FOLD_CHECKING
8914 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8915 static void fold_check_failed (tree, tree);
8916 void print_fold_checksum (tree);
8918 /* When --enable-checking=fold, compute a digest of expr before
8919 and after actual fold call to see if fold did not accidentally
8920 change original expr. */
8927 unsigned char checksum_before[16], checksum_after[16];
8930 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8931 md5_init_ctx (&ctx);
8932 fold_checksum_tree (expr, &ctx, ht);
8933 md5_finish_ctx (&ctx, checksum_before);
8936 ret = fold_1 (expr);
8938 md5_init_ctx (&ctx);
8939 fold_checksum_tree (expr, &ctx, ht);
8940 md5_finish_ctx (&ctx, checksum_after);
8943 if (memcmp (checksum_before, checksum_after, 16))
8944 fold_check_failed (expr, ret);
8950 print_fold_checksum (tree expr)
8953 unsigned char checksum[16], cnt;
8956 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8957 md5_init_ctx (&ctx);
8958 fold_checksum_tree (expr, &ctx, ht);
8959 md5_finish_ctx (&ctx, checksum);
8961 for (cnt = 0; cnt < 16; ++cnt)
8962 fprintf (stderr, "%02x", checksum[cnt]);
8963 putc ('\n', stderr);
8967 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8969 internal_error ("fold check: original tree changed by fold");
8973 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8976 enum tree_code code;
8977 char buf[sizeof (struct tree_decl)];
8980 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8981 > sizeof (struct tree_decl)
8982 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8986 slot = htab_find_slot (ht, expr, INSERT);
8990 code = TREE_CODE (expr);
8991 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8993 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8994 memcpy (buf, expr, tree_size (expr));
8996 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8998 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9000 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9001 memcpy (buf, expr, tree_size (expr));
9003 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9005 else if (TREE_CODE_CLASS (code) == 't'
9006 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9008 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9009 memcpy (buf, expr, tree_size (expr));
9011 TYPE_POINTER_TO (expr) = NULL;
9012 TYPE_REFERENCE_TO (expr) = NULL;
9014 md5_process_bytes (expr, tree_size (expr), ctx);
9015 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9016 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9017 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9018 len = TREE_CODE_LENGTH (code);
9019 switch (TREE_CODE_CLASS (code))
9025 md5_process_bytes (TREE_STRING_POINTER (expr),
9026 TREE_STRING_LENGTH (expr), ctx);
9029 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9030 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9033 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9043 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9044 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9047 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9048 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9057 case SAVE_EXPR: len = 2; break;
9058 case GOTO_SUBROUTINE_EXPR: len = 0; break;
9059 case RTL_EXPR: len = 0; break;
9060 case WITH_CLEANUP_EXPR: len = 2; break;
9069 for (i = 0; i < len; ++i)
9070 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9073 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9074 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9075 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9076 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9077 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9078 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9079 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9080 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9081 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9082 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9083 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9086 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9087 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9088 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9089 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9090 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9091 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9092 if (INTEGRAL_TYPE_P (expr)
9093 || SCALAR_FLOAT_TYPE_P (expr))
9095 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9096 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9098 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9099 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9100 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9109 /* Perform constant folding and related simplification of initializer
9110 expression EXPR. This behaves identically to "fold" but ignores
9111 potential run-time traps and exceptions that fold must preserve. */
9114 fold_initializer (tree expr)
9116 int saved_signaling_nans = flag_signaling_nans;
9117 int saved_trapping_math = flag_trapping_math;
9118 int saved_trapv = flag_trapv;
9121 flag_signaling_nans = 0;
9122 flag_trapping_math = 0;
9125 result = fold (expr);
9127 flag_signaling_nans = saved_signaling_nans;
9128 flag_trapping_math = saved_trapping_math;
9129 flag_trapv = saved_trapv;
9134 /* Determine if first argument is a multiple of second argument. Return 0 if
9135 it is not, or we cannot easily determined it to be.
9137 An example of the sort of thing we care about (at this point; this routine
9138 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9139 fold cases do now) is discovering that
9141 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9147 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9149 This code also handles discovering that
9151 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9153 is a multiple of 8 so we don't have to worry about dealing with a
9156 Note that we *look* inside a SAVE_EXPR only to determine how it was
9157 calculated; it is not safe for fold to do much of anything else with the
9158 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9159 at run time. For example, the latter example above *cannot* be implemented
9160 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9161 evaluation time of the original SAVE_EXPR is not necessarily the same at
9162 the time the new expression is evaluated. The only optimization of this
9163 sort that would be valid is changing
9165 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9169 SAVE_EXPR (I) * SAVE_EXPR (J)
9171 (where the same SAVE_EXPR (J) is used in the original and the
9172 transformed version). */
9175 multiple_of_p (tree type, tree top, tree bottom)
9177 if (operand_equal_p (top, bottom, 0))
9180 if (TREE_CODE (type) != INTEGER_TYPE)
9183 switch (TREE_CODE (top))
9186 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9187 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9191 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9192 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9195 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9199 op1 = TREE_OPERAND (top, 1);
9200 /* const_binop may not detect overflow correctly,
9201 so check for it explicitly here. */
9202 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9203 > TREE_INT_CST_LOW (op1)
9204 && TREE_INT_CST_HIGH (op1) == 0
9205 && 0 != (t1 = fold_convert (type,
9206 const_binop (LSHIFT_EXPR,
9209 && ! TREE_OVERFLOW (t1))
9210 return multiple_of_p (type, t1, bottom);
9215 /* Can't handle conversions from non-integral or wider integral type. */
9216 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9217 || (TYPE_PRECISION (type)
9218 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9221 /* .. fall through ... */
9224 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9227 if (TREE_CODE (bottom) != INTEGER_CST
9228 || (TYPE_UNSIGNED (type)
9229 && (tree_int_cst_sgn (top) < 0
9230 || tree_int_cst_sgn (bottom) < 0)))
9232 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9240 /* Return true if `t' is known to be non-negative. */
9243 tree_expr_nonnegative_p (tree t)
9245 switch (TREE_CODE (t))
9251 return tree_int_cst_sgn (t) >= 0;
9254 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9257 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9258 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9259 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9261 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9262 both unsigned and at least 2 bits shorter than the result. */
9263 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9264 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9265 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9267 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9268 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9269 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9270 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9272 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9273 TYPE_PRECISION (inner2)) + 1;
9274 return prec < TYPE_PRECISION (TREE_TYPE (t));
9280 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9282 /* x * x for floating point x is always non-negative. */
9283 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9285 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9286 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9289 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9290 both unsigned and their total bits is shorter than the result. */
9291 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9292 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9293 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9295 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9296 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9297 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9298 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9299 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9300 < TYPE_PRECISION (TREE_TYPE (t));
9304 case TRUNC_DIV_EXPR:
9306 case FLOOR_DIV_EXPR:
9307 case ROUND_DIV_EXPR:
9308 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9309 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9311 case TRUNC_MOD_EXPR:
9313 case FLOOR_MOD_EXPR:
9314 case ROUND_MOD_EXPR:
9315 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9318 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9319 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9322 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9323 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9326 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9327 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9331 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9332 tree outer_type = TREE_TYPE (t);
9334 if (TREE_CODE (outer_type) == REAL_TYPE)
9336 if (TREE_CODE (inner_type) == REAL_TYPE)
9337 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9338 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9340 if (TYPE_UNSIGNED (inner_type))
9342 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9345 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9347 if (TREE_CODE (inner_type) == REAL_TYPE)
9348 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9349 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9350 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9351 && TYPE_UNSIGNED (inner_type);
9357 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9358 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9360 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9362 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9363 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9365 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9366 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9370 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9372 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9373 case NON_LVALUE_EXPR:
9374 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9376 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9378 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9382 tree temp = TARGET_EXPR_SLOT (t);
9383 t = TARGET_EXPR_INITIAL (t);
9385 /* If the initializer is non-void, then it's a normal expression
9386 that will be assigned to the slot. */
9387 if (!VOID_TYPE_P (t))
9388 return tree_expr_nonnegative_p (t);
9390 /* Otherwise, the initializer sets the slot in some way. One common
9391 way is an assignment statement at the end of the initializer. */
9394 if (TREE_CODE (t) == BIND_EXPR)
9395 t = expr_last (BIND_EXPR_BODY (t));
9396 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9397 || TREE_CODE (t) == TRY_CATCH_EXPR)
9398 t = expr_last (TREE_OPERAND (t, 0));
9399 else if (TREE_CODE (t) == STATEMENT_LIST)
9404 if (TREE_CODE (t) == MODIFY_EXPR
9405 && TREE_OPERAND (t, 0) == temp)
9406 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9413 tree fndecl = get_callee_fndecl (t);
9414 tree arglist = TREE_OPERAND (t, 1);
9416 && DECL_BUILT_IN (fndecl)
9417 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9418 switch (DECL_FUNCTION_CODE (fndecl))
9420 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9421 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9422 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9423 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9425 CASE_BUILTIN_F (BUILT_IN_ACOS)
9426 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9427 CASE_BUILTIN_F (BUILT_IN_CABS)
9428 CASE_BUILTIN_F (BUILT_IN_COSH)
9429 CASE_BUILTIN_F (BUILT_IN_ERFC)
9430 CASE_BUILTIN_F (BUILT_IN_EXP)
9431 CASE_BUILTIN_F (BUILT_IN_EXP10)
9432 CASE_BUILTIN_F (BUILT_IN_EXP2)
9433 CASE_BUILTIN_F (BUILT_IN_FABS)
9434 CASE_BUILTIN_F (BUILT_IN_FDIM)
9435 CASE_BUILTIN_F (BUILT_IN_FREXP)
9436 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9437 CASE_BUILTIN_F (BUILT_IN_POW10)
9438 CASE_BUILTIN_I (BUILT_IN_FFS)
9439 CASE_BUILTIN_I (BUILT_IN_PARITY)
9440 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9444 CASE_BUILTIN_F (BUILT_IN_SQRT)
9445 /* sqrt(-0.0) is -0.0. */
9446 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9448 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9450 CASE_BUILTIN_F (BUILT_IN_ASINH)
9451 CASE_BUILTIN_F (BUILT_IN_ATAN)
9452 CASE_BUILTIN_F (BUILT_IN_ATANH)
9453 CASE_BUILTIN_F (BUILT_IN_CBRT)
9454 CASE_BUILTIN_F (BUILT_IN_CEIL)
9455 CASE_BUILTIN_F (BUILT_IN_ERF)
9456 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9457 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9458 CASE_BUILTIN_F (BUILT_IN_FMOD)
9459 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9460 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9461 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9462 CASE_BUILTIN_F (BUILT_IN_LRINT)
9463 CASE_BUILTIN_F (BUILT_IN_LROUND)
9464 CASE_BUILTIN_F (BUILT_IN_MODF)
9465 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9466 CASE_BUILTIN_F (BUILT_IN_POW)
9467 CASE_BUILTIN_F (BUILT_IN_RINT)
9468 CASE_BUILTIN_F (BUILT_IN_ROUND)
9469 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9470 CASE_BUILTIN_F (BUILT_IN_SINH)
9471 CASE_BUILTIN_F (BUILT_IN_TANH)
9472 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9473 /* True if the 1st argument is nonnegative. */
9474 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9476 CASE_BUILTIN_F (BUILT_IN_FMAX)
9477 /* True if the 1st OR 2nd arguments are nonnegative. */
9478 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9479 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9481 CASE_BUILTIN_F (BUILT_IN_FMIN)
9482 /* True if the 1st AND 2nd arguments are nonnegative. */
9483 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9484 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9486 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9487 /* True if the 2nd argument is nonnegative. */
9488 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9492 #undef CASE_BUILTIN_F
9493 #undef CASE_BUILTIN_I
9497 /* ... fall through ... */
9500 if (truth_value_p (TREE_CODE (t)))
9501 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9505 /* We don't know sign of `t', so be conservative and return false. */
9509 /* Return true when T is an address and is known to be nonzero.
9510 For floating point we further ensure that T is not denormal.
9511 Similar logic is present in nonzero_address in rtlanal.h */
9514 tree_expr_nonzero_p (tree t)
9516 tree type = TREE_TYPE (t);
9518 /* Doing something useful for floating point would need more work. */
9519 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9522 switch (TREE_CODE (t))
9525 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9526 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9529 return !integer_zerop (t);
9532 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9534 /* With the presence of negative values it is hard
9535 to say something. */
9536 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9537 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9539 /* One of operands must be positive and the other non-negative. */
9540 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9541 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9546 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9548 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9549 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9555 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9556 tree outer_type = TREE_TYPE (t);
9558 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9559 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9564 /* Weak declarations may link to NULL. */
9565 if (DECL_P (TREE_OPERAND (t, 0)))
9566 return !DECL_WEAK (TREE_OPERAND (t, 0));
9567 /* Constants and all other cases are never weak. */
9571 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9572 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9575 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9576 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9579 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9581 /* When both operands are nonzero, then MAX must be too. */
9582 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9585 /* MAX where operand 0 is positive is positive. */
9586 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9588 /* MAX where operand 1 is positive is positive. */
9589 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9590 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9597 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9600 case NON_LVALUE_EXPR:
9601 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9604 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9605 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9613 /* Return true if `r' is known to be non-negative.
9614 Only handles constants at the moment. */
9617 rtl_expr_nonnegative_p (rtx r)
9619 switch (GET_CODE (r))
9622 return INTVAL (r) >= 0;
9625 if (GET_MODE (r) == VOIDmode)
9626 return CONST_DOUBLE_HIGH (r) >= 0;
9634 units = CONST_VECTOR_NUNITS (r);
9636 for (i = 0; i < units; ++i)
9638 elt = CONST_VECTOR_ELT (r, i);
9639 if (!rtl_expr_nonnegative_p (elt))
9648 /* These are always nonnegative. */
9657 /* See if we are applying CODE, a relational to the highest or lowest
9658 possible integer of TYPE. If so, then the result is a compile
9662 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9667 enum tree_code code = *code_p;
9668 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9670 if (TREE_CODE (op1) == INTEGER_CST
9671 && ! TREE_CONSTANT_OVERFLOW (op1)
9672 && width <= HOST_BITS_PER_WIDE_INT
9673 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9674 || POINTER_TYPE_P (TREE_TYPE (op1))))
9676 unsigned HOST_WIDE_INT signed_max;
9677 unsigned HOST_WIDE_INT max, min;
9679 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9681 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9683 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9689 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9692 if (TREE_INT_CST_HIGH (op1) == 0
9693 && TREE_INT_CST_LOW (op1) == max)
9697 return omit_one_operand (type, integer_zero_node, op0);
9703 return omit_one_operand (type, integer_one_node, op0);
9709 /* The GE_EXPR and LT_EXPR cases above are not normally
9710 reached because of previous transformations. */
9715 else if (TREE_INT_CST_HIGH (op1) == 0
9716 && TREE_INT_CST_LOW (op1) == max - 1)
9721 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9725 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9730 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9731 && TREE_INT_CST_LOW (op1) == min)
9735 return omit_one_operand (type, integer_zero_node, op0);
9742 return omit_one_operand (type, integer_one_node, op0);
9751 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9752 && TREE_INT_CST_LOW (op1) == min + 1)
9757 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9761 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9767 else if (TREE_INT_CST_HIGH (op1) == 0
9768 && TREE_INT_CST_LOW (op1) == signed_max
9769 && TYPE_UNSIGNED (TREE_TYPE (op1))
9770 /* signed_type does not work on pointer types. */
9771 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9773 /* The following case also applies to X < signed_max+1
9774 and X >= signed_max+1 because previous transformations. */
9775 if (code == LE_EXPR || code == GT_EXPR)
9777 tree st0, st1, exp, retval;
9778 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9779 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9781 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9783 fold_convert (st0, op0),
9784 fold_convert (st1, integer_zero_node));
9787 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9789 TREE_OPERAND (exp, 0),
9790 TREE_OPERAND (exp, 1));
9792 /* If we are in gimple form, then returning EXP would create
9793 non-gimple expressions. Clearing it is safe and insures
9794 we do not allow a non-gimple expression to escape. */
9798 return (retval ? retval : exp);
9807 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9808 attempt to fold the expression to a constant without modifying TYPE,
9811 If the expression could be simplified to a constant, then return
9812 the constant. If the expression would not be simplified to a
9813 constant, then return NULL_TREE.
9815 Note this is primarily designed to be called after gimplification
9816 of the tree structures and when at least one operand is a constant.
9817 As a result of those simplifying assumptions this routine is far
9818 simpler than the generic fold routine. */
9821 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9829 /* If this is a commutative operation, and ARG0 is a constant, move it
9830 to ARG1 to reduce the number of tests below. */
9831 if (commutative_tree_code (code)
9832 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9839 /* If either operand is a complex type, extract its real component. */
9840 if (TREE_CODE (op0) == COMPLEX_CST)
9841 subop0 = TREE_REALPART (op0);
9845 if (TREE_CODE (op1) == COMPLEX_CST)
9846 subop1 = TREE_REALPART (op1);
9850 /* Note if either argument is not a real or integer constant.
9851 With a few exceptions, simplification is limited to cases
9852 where both arguments are constants. */
9853 if ((TREE_CODE (subop0) != INTEGER_CST
9854 && TREE_CODE (subop0) != REAL_CST)
9855 || (TREE_CODE (subop1) != INTEGER_CST
9856 && TREE_CODE (subop1) != REAL_CST))
9862 /* (plus (address) (const_int)) is a constant. */
9863 if (TREE_CODE (op0) == PLUS_EXPR
9864 && TREE_CODE (op1) == INTEGER_CST
9865 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9866 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9867 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9869 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9871 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9872 const_binop (PLUS_EXPR, op1,
9873 TREE_OPERAND (op0, 1), 0));
9881 /* Both arguments are constants. Simplify. */
9882 tem = const_binop (code, op0, op1, 0);
9883 if (tem != NULL_TREE)
9885 /* The return value should always have the same type as
9886 the original expression. */
9887 if (TREE_TYPE (tem) != type)
9888 tem = fold_convert (type, tem);
9895 /* Fold &x - &x. This can happen from &x.foo - &x.
9896 This is unsafe for certain floats even in non-IEEE formats.
9897 In IEEE, it is unsafe because it does wrong for NaNs.
9898 Also note that operand_equal_p is always false if an
9899 operand is volatile. */
9900 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9901 return fold_convert (type, integer_zero_node);
9907 /* Special case multiplication or bitwise AND where one argument
9909 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9910 return omit_one_operand (type, op1, op0);
9912 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9913 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9914 && real_zerop (op1))
9915 return omit_one_operand (type, op1, op0);
9920 /* Special case when we know the result will be all ones. */
9921 if (integer_all_onesp (op1))
9922 return omit_one_operand (type, op1, op0);
9926 case TRUNC_DIV_EXPR:
9927 case ROUND_DIV_EXPR:
9928 case FLOOR_DIV_EXPR:
9930 case EXACT_DIV_EXPR:
9931 case TRUNC_MOD_EXPR:
9932 case ROUND_MOD_EXPR:
9933 case FLOOR_MOD_EXPR:
9936 /* Division by zero is undefined. */
9937 if (integer_zerop (op1))
9940 if (TREE_CODE (op1) == REAL_CST
9941 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9942 && real_zerop (op1))
9948 if (INTEGRAL_TYPE_P (type)
9949 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9950 return omit_one_operand (type, op1, op0);
9955 if (INTEGRAL_TYPE_P (type)
9956 && TYPE_MAX_VALUE (type)
9957 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9958 return omit_one_operand (type, op1, op0);
9963 /* Optimize -1 >> x for arithmetic right shifts. */
9964 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9965 return omit_one_operand (type, op0, op1);
9966 /* ... fall through ... */
9969 if (integer_zerop (op0))
9970 return omit_one_operand (type, op0, op1);
9972 /* Since negative shift count is not well-defined, don't
9973 try to compute it in the compiler. */
9974 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9981 /* -1 rotated either direction by any amount is still -1. */
9982 if (integer_all_onesp (op0))
9983 return omit_one_operand (type, op0, op1);
9985 /* 0 rotated either direction by any amount is still zero. */
9986 if (integer_zerop (op0))
9987 return omit_one_operand (type, op0, op1);
9993 return build_complex (type, op0, op1);
10002 /* If one arg is a real or integer constant, put it last. */
10003 if ((TREE_CODE (op0) == INTEGER_CST
10004 && TREE_CODE (op1) != INTEGER_CST)
10005 || (TREE_CODE (op0) == REAL_CST
10006 && TREE_CODE (op0) != REAL_CST))
10013 code = swap_tree_comparison (code);
10016 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10017 This transformation affects the cases which are handled in later
10018 optimizations involving comparisons with non-negative constants. */
10019 if (TREE_CODE (op1) == INTEGER_CST
10020 && TREE_CODE (op0) != INTEGER_CST
10021 && tree_int_cst_sgn (op1) > 0)
10027 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10032 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10040 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10044 /* Fall through. */
10047 case UNORDERED_EXPR:
10057 return fold_relational_const (code, type, op0, op1);
10060 /* This could probably be handled. */
10063 case TRUTH_AND_EXPR:
10064 /* If second arg is constant zero, result is zero, but first arg
10065 must be evaluated. */
10066 if (integer_zerop (op1))
10067 return omit_one_operand (type, op1, op0);
10068 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10069 case will be handled here. */
10070 if (integer_zerop (op0))
10071 return omit_one_operand (type, op0, op1);
10072 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10073 return constant_boolean_node (true, type);
10076 case TRUTH_OR_EXPR:
10077 /* If second arg is constant true, result is true, but we must
10078 evaluate first arg. */
10079 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10080 return omit_one_operand (type, op1, op0);
10081 /* Likewise for first arg, but note this only occurs here for
10083 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10084 return omit_one_operand (type, op0, op1);
10085 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10086 return constant_boolean_node (false, type);
10089 case TRUTH_XOR_EXPR:
10090 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10092 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10093 return constant_boolean_node (x, type);
10102 /* Given the components of a unary expression CODE, TYPE and OP0,
10103 attempt to fold the expression to a constant without modifying
10106 If the expression could be simplified to a constant, then return
10107 the constant. If the expression would not be simplified to a
10108 constant, then return NULL_TREE.
10110 Note this is primarily designed to be called after gimplification
10111 of the tree structures and when op0 is a constant. As a result
10112 of those simplifying assumptions this routine is far simpler than
10113 the generic fold routine. */
10116 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10119 /* Make sure we have a suitable constant argument. */
10120 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10124 if (TREE_CODE (op0) == COMPLEX_CST)
10125 subop = TREE_REALPART (op0);
10129 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10138 case FIX_TRUNC_EXPR:
10139 case FIX_FLOOR_EXPR:
10140 case FIX_CEIL_EXPR:
10141 return fold_convert_const (code, type, op0);
10144 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10145 return fold_negate_const (op0, type);
10150 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10151 return fold_abs_const (op0, type);
10156 if (TREE_CODE (op0) == INTEGER_CST)
10157 return fold_not_const (op0, type);
10161 case REALPART_EXPR:
10162 if (TREE_CODE (op0) == COMPLEX_CST)
10163 return TREE_REALPART (op0);
10167 case IMAGPART_EXPR:
10168 if (TREE_CODE (op0) == COMPLEX_CST)
10169 return TREE_IMAGPART (op0);
10174 if (TREE_CODE (op0) == COMPLEX_CST
10175 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10176 return build_complex (type, TREE_REALPART (op0),
10177 negate_expr (TREE_IMAGPART (op0)));
10185 /* If EXP represents referencing an element in a constant string
10186 (either via pointer arithmetic or array indexing), return the
10187 tree representing the value accessed, otherwise return NULL. */
10190 fold_read_from_constant_string (tree exp)
10192 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10194 tree exp1 = TREE_OPERAND (exp, 0);
10198 if (TREE_CODE (exp) == INDIRECT_REF)
10199 string = string_constant (exp1, &index);
10202 tree low_bound = array_ref_low_bound (exp);
10203 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10205 /* Optimize the special-case of a zero lower bound.
10207 We convert the low_bound to sizetype to avoid some problems
10208 with constant folding. (E.g. suppose the lower bound is 1,
10209 and its mode is QI. Without the conversion,l (ARRAY
10210 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10211 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10212 if (! integer_zerop (low_bound))
10213 index = size_diffop (index, fold_convert (sizetype, low_bound));
10219 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10220 && TREE_CODE (string) == STRING_CST
10221 && TREE_CODE (index) == INTEGER_CST
10222 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10223 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10225 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10226 return fold_convert (TREE_TYPE (exp),
10227 build_int_2 ((TREE_STRING_POINTER (string)
10228 [TREE_INT_CST_LOW (index)]), 0));
10233 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10234 an integer constant or real constant.
10236 TYPE is the type of the result. */
10239 fold_negate_const (tree arg0, tree type)
10241 tree t = NULL_TREE;
10243 if (TREE_CODE (arg0) == INTEGER_CST)
10245 unsigned HOST_WIDE_INT low;
10246 HOST_WIDE_INT high;
10247 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10248 TREE_INT_CST_HIGH (arg0),
10250 t = build_int_2 (low, high);
10251 TREE_TYPE (t) = type;
10253 = (TREE_OVERFLOW (arg0)
10254 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10255 TREE_CONSTANT_OVERFLOW (t)
10256 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10258 else if (TREE_CODE (arg0) == REAL_CST)
10259 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10260 #ifdef ENABLE_CHECKING
10268 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10269 an integer constant or real constant.
10271 TYPE is the type of the result. */
10274 fold_abs_const (tree arg0, tree type)
10276 tree t = NULL_TREE;
10278 if (TREE_CODE (arg0) == INTEGER_CST)
10280 /* If the value is unsigned, then the absolute value is
10281 the same as the ordinary value. */
10282 if (TYPE_UNSIGNED (type))
10284 /* Similarly, if the value is non-negative. */
10285 else if (INT_CST_LT (integer_minus_one_node, arg0))
10287 /* If the value is negative, then the absolute value is
10291 unsigned HOST_WIDE_INT low;
10292 HOST_WIDE_INT high;
10293 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10294 TREE_INT_CST_HIGH (arg0),
10296 t = build_int_2 (low, high);
10297 TREE_TYPE (t) = type;
10299 = (TREE_OVERFLOW (arg0)
10300 | force_fit_type (t, overflow));
10301 TREE_CONSTANT_OVERFLOW (t)
10302 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10306 else if (TREE_CODE (arg0) == REAL_CST)
10308 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10309 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10313 #ifdef ENABLE_CHECKING
10321 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10322 constant. TYPE is the type of the result. */
10325 fold_not_const (tree arg0, tree type)
10327 tree t = NULL_TREE;
10329 if (TREE_CODE (arg0) == INTEGER_CST)
10331 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10332 ~ TREE_INT_CST_HIGH (arg0));
10333 TREE_TYPE (t) = type;
10334 force_fit_type (t, 0);
10335 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10336 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10338 #ifdef ENABLE_CHECKING
10346 /* Given CODE, a relational operator, the target type, TYPE and two
10347 constant operands OP0 and OP1, return the result of the
10348 relational operation. If the result is not a compile time
10349 constant, then return NULL_TREE. */
10352 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10354 int result, invert;
10356 /* From here on, the only cases we handle are when the result is
10357 known to be a constant. */
10359 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10361 /* Handle the cases where either operand is a NaN. */
10362 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10363 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10373 case UNORDERED_EXPR:
10387 if (flag_trapping_math)
10396 return constant_boolean_node (result, type);
10399 /* From here on we're sure there are no NaNs. */
10403 return constant_boolean_node (true, type);
10405 case UNORDERED_EXPR:
10406 return constant_boolean_node (false, type);
10432 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10434 To compute GT, swap the arguments and do LT.
10435 To compute GE, do LT and invert the result.
10436 To compute LE, swap the arguments, do LT and invert the result.
10437 To compute NE, do EQ and invert the result.
10439 Therefore, the code below must handle only EQ and LT. */
10441 if (code == LE_EXPR || code == GT_EXPR)
10446 code = swap_tree_comparison (code);
10449 /* Note that it is safe to invert for real values here because we
10450 have already handled the one case that it matters. */
10453 if (code == NE_EXPR || code == GE_EXPR)
10456 code = invert_tree_comparison (code, false);
10459 /* Compute a result for LT or EQ if args permit;
10460 Otherwise return T. */
10461 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10463 if (code == EQ_EXPR)
10464 result = tree_int_cst_equal (op0, op1);
10465 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10466 result = INT_CST_LT_UNSIGNED (op0, op1);
10468 result = INT_CST_LT (op0, op1);
10471 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10472 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10475 /* Two real constants can be compared explicitly. */
10476 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10478 if (code == EQ_EXPR)
10479 result = REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10480 TREE_REAL_CST (op1));
10482 result = REAL_VALUES_LESS (TREE_REAL_CST (op0),
10483 TREE_REAL_CST (op1));
10490 return constant_boolean_node (result, type);
10493 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10494 avoid confusing the gimplify process. */
10497 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10499 if (TREE_CODE (t) == INDIRECT_REF)
10501 t = TREE_OPERAND (t, 0);
10502 if (TREE_TYPE (t) != ptrtype)
10503 t = build1 (NOP_EXPR, ptrtype, t);
10509 while (handled_component_p (base)
10510 || TREE_CODE (base) == REALPART_EXPR
10511 || TREE_CODE (base) == IMAGPART_EXPR)
10512 base = TREE_OPERAND (base, 0);
10514 TREE_ADDRESSABLE (base) = 1;
10516 t = build1 (ADDR_EXPR, ptrtype, t);
10523 build_fold_addr_expr (tree t)
10525 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10528 /* Builds an expression for an indirection through T, simplifying some
10532 build_fold_indirect_ref (tree t)
10534 tree type = TREE_TYPE (TREE_TYPE (t));
10539 if (TREE_CODE (sub) == ADDR_EXPR)
10541 tree op = TREE_OPERAND (sub, 0);
10542 tree optype = TREE_TYPE (op);
10544 if (lang_hooks.types_compatible_p (type, optype))
10546 /* *(foo *)&fooarray => fooarray[0] */
10547 else if (TREE_CODE (optype) == ARRAY_TYPE
10548 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10549 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10552 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10553 subtype = TREE_TYPE (sub);
10554 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10555 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10557 sub = build_fold_indirect_ref (sub);
10558 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10561 return build1 (INDIRECT_REF, type, t);
10564 #include "gt-fold-const.h"