1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
33 /* If DISABLE is true, stop issuing warnings. This is used when
34 parsing code that we know will not be executed. This function may
35 be called multiple times, and works as a stack. */
38 c_disable_warnings (bool disable)
42 ++c_inhibit_evaluation_warnings;
43 fold_defer_overflow_warnings ();
47 /* If ENABLE is true, reenable issuing warnings. */
50 c_enable_warnings (bool enable)
54 --c_inhibit_evaluation_warnings;
55 fold_undefer_and_ignore_overflow_warnings ();
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60 normal fold, return NULL_TREE otherwise. */
63 c_fold_array_ref (tree type, tree ary, tree index)
65 if (TREE_CODE (ary) != STRING_CST
66 || TREE_CODE (index) != INTEGER_CST
67 || TREE_OVERFLOW (index)
68 || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69 || !tree_fits_uhwi_p (index))
72 tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73 unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 / TYPE_PRECISION (char_type_node));
75 unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76 tree nelts = array_type_nelts (TREE_TYPE (ary));
77 bool dummy1 = true, dummy2 = true;
78 nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79 unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80 if (!tree_int_cst_le (index, nelts)
82 || i + elem_nchars > len)
86 return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
88 const unsigned char *ptr
89 = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90 return native_interpret_expr (type, ptr, elem_nchars);
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94 constant expressions and null pointer constants) when being built
95 up. If IN_INIT, this is in a static initializer and certain
96 changes are made to the folding done. Clear *MAYBE_CONST if
97 MAYBE_CONST is not NULL and EXPR is definitely not a constant
98 expression because it contains an evaluated operator (in C99) or an
99 operator outside of sizeof returning an integer constant (in C90)
100 not permitted in constant expressions, or because it contains an
101 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
102 set to true by callers before calling this function.) Return the
103 folded expression. Function arguments have already been folded
104 before calling this function, as have the contents of SAVE_EXPR,
105 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106 C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an
110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
113 tree eptype = NULL_TREE;
115 bool maybe_const_itself = true;
116 location_t loc = EXPR_LOCATION (expr);
119 maybe_const = &dummy;
120 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
122 eptype = TREE_TYPE (expr);
123 expr = TREE_OPERAND (expr, 0);
125 ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 &maybe_const_itself, false, lval);
128 ret = fold_convert_loc (loc, eptype, ret);
129 *maybe_const &= maybe_const_itself;
133 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
134 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
135 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137 both evaluated and unevaluated subexpressions while
138 *MAYBE_CONST_ITSELF is carried from only evaluated
139 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
140 with integer constant operands, and if any of the operands doesn't
141 get folded to an integer constant, don't fold the expression itself.
142 LVAL indicates folding of lvalue, where we can't replace it with
146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 bool *maybe_const_itself, bool for_int_const, bool lval)
150 enum tree_code code = TREE_CODE (expr);
151 enum tree_code_class kind = TREE_CODE_CLASS (code);
152 location_t loc = EXPR_LOCATION (expr);
153 tree op0, op1, op2, op3;
154 tree orig_op0, orig_op1, orig_op2;
155 bool op0_const = true, op1_const = true, op2_const = true;
156 bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157 bool nowarning = TREE_NO_WARNING (expr);
159 bool op0_lval = false;
160 source_range old_range;
162 /* Constants, declarations, statements, errors, and anything else not
163 counted as an expression cannot usefully be folded further at this
165 if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
167 /* Except for variables which we can optimize to its initializer. */
168 if (VAR_P (expr) && !lval && (optimize || in_init))
170 ret = decl_constant_value (expr);
171 /* Avoid unwanted tree sharing between the initializer and current
172 function's body where the tree can be modified e.g. by the
174 if (ret != expr && TREE_STATIC (expr))
175 ret = unshare_expr (ret);
181 if (IS_EXPR_CODE_CLASS (kind))
182 old_range = EXPR_LOCATION_RANGE (expr);
184 /* Operands of variable-length expressions (function calls) have
185 already been folded, as have __builtin_* function calls, and such
186 expressions cannot occur in constant expressions. */
187 if (kind == tcc_vl_exp)
189 *maybe_const_operands = false;
194 if (code == C_MAYBE_CONST_EXPR)
196 tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
197 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
198 if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
199 *maybe_const_operands = false;
200 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
202 *maybe_const_itself = false;
203 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
204 maybe_const_itself, true, lval);
207 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
213 /* Assignment, increment, decrement, function call and comma
214 operators, and statement expressions, cannot occur in constant
215 expressions if evaluated / outside of sizeof. (Function calls
216 were handled above, though VA_ARG_EXPR is treated like a function
217 call here, and statement expressions are handled through
218 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
222 case PREDECREMENT_EXPR:
223 case PREINCREMENT_EXPR:
224 case POSTDECREMENT_EXPR:
225 case POSTINCREMENT_EXPR:
227 *maybe_const_operands = false;
234 *maybe_const_operands = false;
242 /* Fold individual tree codes as appropriate. */
245 case COMPOUND_LITERAL_EXPR:
246 /* Any non-constancy will have been marked in a containing
247 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
251 orig_op0 = op0 = TREE_OPERAND (expr, 0);
252 op1 = TREE_OPERAND (expr, 1);
253 op2 = TREE_OPERAND (expr, 2);
254 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
255 maybe_const_itself, for_int_const, lval);
256 STRIP_TYPE_NOPS (op0);
258 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
261 TREE_READONLY (ret) = TREE_READONLY (expr);
262 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
267 orig_op0 = op0 = TREE_OPERAND (expr, 0);
268 orig_op1 = op1 = TREE_OPERAND (expr, 1);
269 op2 = TREE_OPERAND (expr, 2);
270 op3 = TREE_OPERAND (expr, 3);
271 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
272 maybe_const_itself, for_int_const, lval);
273 STRIP_TYPE_NOPS (op0);
274 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
275 maybe_const_itself, for_int_const, false);
276 STRIP_TYPE_NOPS (op1);
277 /* Fold "foo"[2] in initializers. */
278 if (!lval && in_init)
280 ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
285 if (op0 != orig_op0 || op1 != orig_op1)
286 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
289 TREE_READONLY (ret) = TREE_READONLY (expr);
290 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
291 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
298 case PREDECREMENT_EXPR:
299 case PREINCREMENT_EXPR:
300 case POSTDECREMENT_EXPR:
301 case POSTINCREMENT_EXPR:
308 case POINTER_PLUS_EXPR:
337 /* Binary operations evaluating both arguments (increment and
338 decrement are binary internally in GCC). */
339 orig_op0 = op0 = TREE_OPERAND (expr, 0);
340 orig_op1 = op1 = TREE_OPERAND (expr, 1);
341 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
342 maybe_const_itself, for_int_const,
344 STRIP_TYPE_NOPS (op0);
345 /* The RHS of a MODIFY_EXPR was fully folded when building that
346 expression for the sake of conversion warnings. */
347 if (code != MODIFY_EXPR)
348 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
349 maybe_const_itself, for_int_const, false);
350 STRIP_TYPE_NOPS (op1);
352 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
353 || TREE_CODE (op1) != INTEGER_CST))
356 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
358 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
359 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
362 if (TREE_OVERFLOW_P (ret)
363 && !TREE_OVERFLOW_P (op0)
364 && !TREE_OVERFLOW_P (op1))
365 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
366 if (code == LSHIFT_EXPR
367 && TREE_CODE (orig_op0) != INTEGER_CST
368 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
369 && TREE_CODE (op0) == INTEGER_CST
370 && c_inhibit_evaluation_warnings == 0
371 && tree_int_cst_sgn (op0) < 0)
372 warning_at (loc, OPT_Wshift_negative_value,
373 "left shift of negative value");
374 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
375 && TREE_CODE (orig_op1) != INTEGER_CST
376 && TREE_CODE (op1) == INTEGER_CST
377 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
378 && c_inhibit_evaluation_warnings == 0)
380 if (tree_int_cst_sgn (op1) < 0)
381 warning_at (loc, OPT_Wshift_count_negative,
383 ? G_("left shift count is negative")
384 : G_("right shift count is negative")));
385 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
386 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
387 && compare_tree_int (op1,
388 TYPE_PRECISION (TREE_TYPE (orig_op0)))
390 warning_at (loc, OPT_Wshift_count_overflow,
392 ? G_("left shift count >= width of type")
393 : G_("right shift count >= width of type")));
394 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
395 && compare_tree_int (op1,
396 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
398 warning_at (loc, OPT_Wshift_count_overflow,
400 ? G_("left shift count >= width of vector element")
401 : G_("right shift count >= width of vector element"));
403 if (code == LSHIFT_EXPR
404 /* If either OP0 has been folded to INTEGER_CST... */
405 && ((TREE_CODE (orig_op0) != INTEGER_CST
406 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
407 && TREE_CODE (op0) == INTEGER_CST)
408 /* ...or if OP1 has been folded to INTEGER_CST... */
409 || (TREE_CODE (orig_op1) != INTEGER_CST
410 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
411 && TREE_CODE (op1) == INTEGER_CST))
412 && c_inhibit_evaluation_warnings == 0)
413 /* ...then maybe we can detect an overflow. */
414 maybe_warn_shift_overflow (loc, op0, op1);
415 if ((code == TRUNC_DIV_EXPR
416 || code == CEIL_DIV_EXPR
417 || code == FLOOR_DIV_EXPR
418 || code == EXACT_DIV_EXPR
419 || code == TRUNC_MOD_EXPR)
420 && TREE_CODE (orig_op1) != INTEGER_CST
421 && TREE_CODE (op1) == INTEGER_CST
422 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
423 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
424 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
425 warn_for_div_by_zero (loc, op1);
439 case ADDR_SPACE_CONVERT_EXPR:
440 case VIEW_CONVERT_EXPR:
441 case NON_LVALUE_EXPR:
447 /* Unary operations. */
448 orig_op0 = op0 = TREE_OPERAND (expr, 0);
449 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
450 maybe_const_itself, for_int_const,
452 STRIP_TYPE_NOPS (op0);
454 if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
457 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
458 not prepared to deal with them if they occur in initializers. */
461 && (op1 = get_base_address (op0)) != NULL_TREE
462 && INDIRECT_REF_P (op1)
463 && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
464 ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
465 else if (op0 != orig_op0 || in_init)
467 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
468 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
471 if (code == INDIRECT_REF
473 && INDIRECT_REF_P (ret))
475 TREE_READONLY (ret) = TREE_READONLY (expr);
476 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
477 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
484 /* Don't warn about explicit conversions. We will already
485 have warned about suspect implicit conversions. */
489 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
490 overflow_warning (EXPR_LOCATION (expr), ret, op0);
495 case TRUTH_ANDIF_EXPR:
496 case TRUTH_ORIF_EXPR:
497 /* Binary operations not necessarily evaluating both
499 orig_op0 = op0 = TREE_OPERAND (expr, 0);
500 orig_op1 = op1 = TREE_OPERAND (expr, 1);
501 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
502 for_int_const, false);
503 STRIP_TYPE_NOPS (op0);
505 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
506 ? truthvalue_false_node
507 : truthvalue_true_node));
508 c_disable_warnings (unused_p);
509 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
510 for_int_const, false);
511 STRIP_TYPE_NOPS (op1);
512 c_enable_warnings (unused_p);
515 && (TREE_CODE (op0) != INTEGER_CST
516 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
517 || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
520 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
522 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
523 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
526 *maybe_const_operands &= op0_const;
527 *maybe_const_itself &= op0_const_self;
531 && (code == TRUTH_ANDIF_EXPR
532 ? op0 == truthvalue_false_node
533 : op0 == truthvalue_true_node)))
534 *maybe_const_operands &= op1_const;
537 && (code == TRUTH_ANDIF_EXPR
538 ? op0 == truthvalue_false_node
539 : op0 == truthvalue_true_node)))
540 *maybe_const_itself &= op1_const_self;
544 orig_op0 = op0 = TREE_OPERAND (expr, 0);
545 orig_op1 = op1 = TREE_OPERAND (expr, 1);
546 orig_op2 = op2 = TREE_OPERAND (expr, 2);
547 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
548 for_int_const, false);
550 STRIP_TYPE_NOPS (op0);
551 c_disable_warnings (op0 == truthvalue_false_node);
552 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
553 for_int_const, false);
554 STRIP_TYPE_NOPS (op1);
555 c_enable_warnings (op0 == truthvalue_false_node);
557 c_disable_warnings (op0 == truthvalue_true_node);
558 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
559 for_int_const, false);
560 STRIP_TYPE_NOPS (op2);
561 c_enable_warnings (op0 == truthvalue_true_node);
564 && (TREE_CODE (op0) != INTEGER_CST
565 /* Only the evaluated operand must be an INTEGER_CST. */
566 || (op0 == truthvalue_true_node
567 ? TREE_CODE (op1) != INTEGER_CST
568 : TREE_CODE (op2) != INTEGER_CST)))
571 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
572 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
575 *maybe_const_operands &= op0_const;
576 *maybe_const_itself &= op0_const_self;
580 && op0 == truthvalue_false_node))
581 *maybe_const_operands &= op1_const;
584 && op0 == truthvalue_false_node))
585 *maybe_const_itself &= op1_const_self;
589 && op0 == truthvalue_true_node))
590 *maybe_const_operands &= op2_const;
593 && op0 == truthvalue_true_node))
594 *maybe_const_itself &= op2_const_self;
598 orig_op0 = op0 = TREE_OPERAND (expr, 0);
599 orig_op1 = op1 = TREE_OPERAND (expr, 1);
600 orig_op2 = op2 = TREE_OPERAND (expr, 2);
601 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
602 maybe_const_itself, for_int_const, false);
603 STRIP_TYPE_NOPS (op0);
604 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
605 maybe_const_itself, for_int_const, false);
606 STRIP_TYPE_NOPS (op1);
607 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
608 maybe_const_itself, for_int_const, false);
609 STRIP_TYPE_NOPS (op2);
611 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
612 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
617 case EXCESS_PRECISION_EXPR:
618 /* Each case where an operand with excess precision may be
619 encountered must remove the EXCESS_PRECISION_EXPR around
620 inner operands and possibly put one around the whole
621 expression or possibly convert to the semantic type (which
622 c_fully_fold does); we cannot tell at this stage which is
623 appropriate in any particular case. */
627 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
628 op0 = TREE_OPERAND (expr, 0);
629 if (!SAVE_EXPR_FOLDED_P (expr))
631 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
632 maybe_const_itself, for_int_const,
634 TREE_OPERAND (expr, 0) = op0;
635 SAVE_EXPR_FOLDED_P (expr) = true;
637 /* Return the SAVE_EXPR operand if it is invariant. */
638 if (tree_invariant_p (op0))
643 /* Various codes may appear through folding built-in functions
644 and their arguments. */
649 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
650 have been done by this point, so remove them again. */
651 nowarning |= TREE_NO_WARNING (ret);
652 STRIP_TYPE_NOPS (ret);
653 if (nowarning && !TREE_NO_WARNING (ret))
655 if (!CAN_HAVE_LOCATION_P (ret))
656 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
657 TREE_NO_WARNING (ret) = 1;
661 protected_set_expr_location (ret, loc);
662 if (IS_EXPR_CODE_CLASS (kind))
663 set_source_range (ret, old_range.m_start, old_range.m_finish);