1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
55 Just creates a label with location LOCATION and pushes it into the current
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
65 LABEL_DECL_BREAK (label) = true;
67 LABEL_DECL_CONTINUE (label) = true;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
114 get_bc_label (enum bc_t bc)
116 tree label = bc_label[bc];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
123 /* Genericize a TRY_BLOCK. */
126 genericize_try_block (tree *stmt_p)
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
137 genericize_catch_block (tree *stmt_p)
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
146 /* A terser interface for building a representation of an exception
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
168 genericize_eh_spec_block (tree *stmt_p)
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
182 genericize_if_stmt (tree *stmt_p)
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
193 then_ = build_empty_stmt (locus);
195 else_ = build_empty_stmt (locus);
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
222 tree stmt_list = NULL;
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
227 protected_set_expr_location (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
254 stmt_list = build_empty_stmt (start_locus);
257 if (cond && integer_zerop (cond))
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
267 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
270 append_to_statement_list (loop, &stmt_list);
271 finish_bc_block (&stmt_list, bc_break, blab);
273 stmt_list = build_empty_stmt (start_locus);
278 /* Genericize a FOR_STMT node *STMT_P. */
281 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
286 tree init = FOR_INIT_STMT (stmt);
290 cp_walk_tree (&init, cp_genericize_r, data, NULL);
291 append_to_statement_list (init, &expr);
294 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
295 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
296 append_to_statement_list (loop, &expr);
297 if (expr == NULL_TREE)
302 /* Genericize a WHILE_STMT node *STMT_P. */
305 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
308 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
309 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
312 /* Genericize a DO_STMT node *STMT_P. */
315 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
319 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
322 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
325 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
328 tree break_block, body, cond, type;
329 location_t stmt_locus = EXPR_LOCATION (stmt);
331 break_block = begin_bc_block (bc_break, stmt_locus);
333 body = SWITCH_STMT_BODY (stmt);
335 body = build_empty_stmt (stmt_locus);
336 cond = SWITCH_STMT_COND (stmt);
337 type = SWITCH_STMT_TYPE (stmt);
339 cp_walk_tree (&body, cp_genericize_r, data, NULL);
340 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
341 cp_walk_tree (&type, cp_genericize_r, data, NULL);
344 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
345 finish_bc_block (stmt_p, bc_break, break_block);
348 /* Genericize a CONTINUE_STMT node *STMT_P. */
351 genericize_continue_stmt (tree *stmt_p)
353 tree stmt_list = NULL;
354 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
355 tree label = get_bc_label (bc_continue);
356 location_t location = EXPR_LOCATION (*stmt_p);
357 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
358 append_to_statement_list_force (pred, &stmt_list);
359 append_to_statement_list (jump, &stmt_list);
363 /* Genericize a BREAK_STMT node *STMT_P. */
366 genericize_break_stmt (tree *stmt_p)
368 tree label = get_bc_label (bc_break);
369 location_t location = EXPR_LOCATION (*stmt_p);
370 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
373 /* Genericize a OMP_FOR node *STMT_P. */
376 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
379 location_t locus = EXPR_LOCATION (stmt);
380 tree clab = begin_bc_block (bc_continue, locus);
382 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
383 if (TREE_CODE (stmt) != OMP_TASKLOOP)
384 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
385 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
386 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
387 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
388 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
391 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
394 /* Hook into the middle of gimplifying an OMP_FOR node. */
396 static enum gimplify_status
397 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
399 tree for_stmt = *expr_p;
400 gimple_seq seq = NULL;
402 /* Protect ourselves from recursion. */
403 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
405 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
407 gimplify_and_add (for_stmt, &seq);
408 gimple_seq_add_seq (pre_p, seq);
410 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
415 /* Gimplify an EXPR_STMT node. */
418 gimplify_expr_stmt (tree *stmt_p)
420 tree stmt = EXPR_STMT_EXPR (*stmt_p);
422 if (stmt == error_mark_node)
425 /* Gimplification of a statement expression will nullify the
426 statement if all its side effects are moved to *PRE_P and *POST_P.
428 In this case we will not want to emit the gimplified statement.
429 However, we may still want to emit a warning, so we do that before
431 if (stmt && warn_unused_value)
433 if (!TREE_SIDE_EFFECTS (stmt))
435 if (!IS_EMPTY_STMT (stmt)
436 && !VOID_TYPE_P (TREE_TYPE (stmt))
437 && !TREE_NO_WARNING (stmt))
438 warning (OPT_Wunused_value, "statement with no effect");
441 warn_if_unused_value (stmt, input_location);
444 if (stmt == NULL_TREE)
445 stmt = alloc_stmt_list ();
450 /* Gimplify initialization from an AGGR_INIT_EXPR. */
453 cp_gimplify_init_expr (tree *expr_p)
455 tree from = TREE_OPERAND (*expr_p, 1);
456 tree to = TREE_OPERAND (*expr_p, 0);
459 /* What about code that pulls out the temp and uses it elsewhere? I
460 think that such code never uses the TARGET_EXPR as an initializer. If
461 I'm wrong, we'll abort because the temp won't have any RTL. In that
462 case, I guess we'll need to replace references somehow. */
463 if (TREE_CODE (from) == TARGET_EXPR)
464 from = TARGET_EXPR_INITIAL (from);
466 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
467 inside the TARGET_EXPR. */
470 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
472 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
473 replace the slot operand with our target.
475 Should we add a target parm to gimplify_expr instead? No, as in this
476 case we want to replace the INIT_EXPR. */
477 if (TREE_CODE (sub) == AGGR_INIT_EXPR
478 || TREE_CODE (sub) == VEC_INIT_EXPR)
480 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
481 AGGR_INIT_EXPR_SLOT (sub) = to;
483 VEC_INIT_EXPR_SLOT (sub) = to;
486 /* The initialization is now a side-effect, so the container can
489 TREE_TYPE (from) = void_type_node;
492 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
493 /* Handle aggregate NSDMI. */
494 replace_placeholders (sub, to);
499 t = TREE_OPERAND (t, 1);
504 /* Gimplify a MUST_NOT_THROW_EXPR. */
506 static enum gimplify_status
507 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
510 tree temp = voidify_wrapper_expr (stmt, NULL);
511 tree body = TREE_OPERAND (stmt, 0);
512 gimple_seq try_ = NULL;
513 gimple_seq catch_ = NULL;
516 gimplify_and_add (body, &try_);
517 mnt = gimple_build_eh_must_not_throw (terminate_node);
518 gimple_seq_add_stmt_without_update (&catch_, mnt);
519 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
521 gimple_seq_add_stmt_without_update (pre_p, mnt);
532 /* Return TRUE if an operand (OP) of a given TYPE being copied is
533 really just an empty class copy.
535 Check that the operand has a simple form so that TARGET_EXPRs and
536 non-empty CONSTRUCTORs get reduced properly, and we leave the
537 return slot optimization alone because it isn't a copy. */
540 simple_empty_class_p (tree type, tree op)
543 ((TREE_CODE (op) == COMPOUND_EXPR
544 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
545 || is_gimple_lvalue (op)
546 || INDIRECT_REF_P (op)
547 || (TREE_CODE (op) == CONSTRUCTOR
548 && CONSTRUCTOR_NELTS (op) == 0
549 && !TREE_CLOBBER_P (op))
550 || (TREE_CODE (op) == CALL_EXPR
551 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
552 && is_really_empty_class (type);
555 /* Returns true if evaluating E as an lvalue has side-effects;
556 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
557 have side-effects until there is a read or write through it. */
560 lvalue_has_side_effects (tree e)
562 if (!TREE_SIDE_EFFECTS (e))
564 while (handled_component_p (e))
566 if (TREE_CODE (e) == ARRAY_REF
567 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
569 e = TREE_OPERAND (e, 0);
572 /* Just naming a variable has no side-effects. */
574 else if (INDIRECT_REF_P (e))
575 /* Similarly, indirection has no side-effects. */
576 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
578 /* For anything else, trust TREE_SIDE_EFFECTS. */
579 return TREE_SIDE_EFFECTS (e);
582 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
585 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
587 int saved_stmts_are_full_exprs_p = 0;
588 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
589 enum tree_code code = TREE_CODE (*expr_p);
590 enum gimplify_status ret;
592 if (STATEMENT_CODE_P (code))
594 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
595 current_stmt_tree ()->stmts_are_full_exprs_p
596 = STMT_IS_FULL_EXPR_P (*expr_p);
602 simplify_aggr_init_expr (expr_p);
608 location_t loc = input_location;
609 tree init = VEC_INIT_EXPR_INIT (*expr_p);
610 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
611 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
612 input_location = EXPR_LOCATION (*expr_p);
613 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
614 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
616 tf_warning_or_error);
618 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
619 cp_genericize_tree (expr_p);
621 input_location = loc;
626 /* FIXME communicate throw type to back end, probably by moving
627 THROW_EXPR into ../tree.def. */
628 *expr_p = TREE_OPERAND (*expr_p, 0);
632 case MUST_NOT_THROW_EXPR:
633 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
636 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
637 LHS of an assignment might also be involved in the RHS, as in bug
640 if (fn_contains_cilk_spawn_p (cfun))
642 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
644 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
646 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
648 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
652 cp_gimplify_init_expr (expr_p);
653 if (TREE_CODE (*expr_p) != INIT_EXPR)
655 /* Otherwise fall through. */
659 if (fn_contains_cilk_spawn_p (cfun)
660 && cilk_cp_detect_spawn_and_unwrap (expr_p)
663 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
664 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
666 /* If the back end isn't clever enough to know that the lhs and rhs
667 types are the same, add an explicit conversion. */
668 tree op0 = TREE_OPERAND (*expr_p, 0);
669 tree op1 = TREE_OPERAND (*expr_p, 1);
671 if (!error_operand_p (op0)
672 && !error_operand_p (op1)
673 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
674 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
675 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
676 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
677 TREE_TYPE (op0), op1);
679 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
681 /* Remove any copies of empty classes. Also drop volatile
682 variables on the RHS to avoid infinite recursion from
683 gimplify_expr trying to load the value. */
684 if (TREE_SIDE_EFFECTS (op1))
686 if (TREE_THIS_VOLATILE (op1)
687 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
688 op1 = build_fold_addr_expr (op1);
690 gimplify_and_add (op1, pre_p);
692 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
693 is_gimple_lvalue, fb_lvalue);
694 *expr_p = TREE_OPERAND (*expr_p, 0);
696 /* P0145 says that the RHS is sequenced before the LHS.
697 gimplify_modify_expr gimplifies the RHS before the LHS, but that
698 isn't quite strong enough in two cases:
700 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
701 mean it's evaluated after the LHS.
703 2) the value calculation of the RHS is also sequenced before the
704 LHS, so for scalar assignment we need to preevaluate if the
705 RHS could be affected by LHS side-effects even if it has no
706 side-effects of its own. We don't need this for classes because
707 class assignment takes its RHS by reference. */
708 else if (flag_strong_eval_order > 1
709 && TREE_CODE (*expr_p) == MODIFY_EXPR
710 && lvalue_has_side_effects (op0)
711 && (TREE_CODE (op1) == CALL_EXPR
712 || (SCALAR_TYPE_P (TREE_TYPE (op1))
713 && !TREE_CONSTANT (op1))))
714 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
719 case EMPTY_CLASS_EXPR:
720 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
721 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
726 *expr_p = BASELINK_FUNCTIONS (*expr_p);
731 genericize_try_block (expr_p);
736 genericize_catch_block (expr_p);
741 genericize_eh_spec_block (expr_p);
760 ret = cp_gimplify_omp_for (expr_p, pre_p);
764 gimplify_expr_stmt (expr_p);
768 case UNARY_PLUS_EXPR:
770 tree arg = TREE_OPERAND (*expr_p, 0);
771 tree type = TREE_TYPE (*expr_p);
772 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
778 case CILK_SPAWN_STMT:
779 gcc_assert(fn_contains_cilk_spawn_p (cfun)
780 && cilk_cp_detect_spawn_and_unwrap (expr_p));
784 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
785 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
790 if (fn_contains_cilk_spawn_p (cfun)
791 && cilk_cp_detect_spawn_and_unwrap (expr_p)
794 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
795 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
798 if (!CALL_EXPR_FN (*expr_p))
799 /* Internal function call. */;
800 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
802 /* This is a call to a (compound) assignment operator that used
803 the operator syntax; gimplify the RHS first. */
804 gcc_assert (call_expr_nargs (*expr_p) == 2);
805 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
806 enum gimplify_status t
807 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
811 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
813 /* Leave the last argument for gimplify_call_expr, to avoid problems
814 with __builtin_va_arg_pack(). */
815 int nargs = call_expr_nargs (*expr_p) - 1;
816 for (int i = 0; i < nargs; ++i)
818 enum gimplify_status t
819 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
824 else if (flag_strong_eval_order
825 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
827 /* If flag_strong_eval_order, evaluate the object argument first. */
828 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
829 if (POINTER_TYPE_P (fntype))
830 fntype = TREE_TYPE (fntype);
831 if (TREE_CODE (fntype) == METHOD_TYPE)
833 enum gimplify_status t
834 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
842 if (TREE_OPERAND (*expr_p, 0)
843 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
844 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
846 expr_p = &TREE_OPERAND (*expr_p, 0);
847 code = TREE_CODE (*expr_p);
848 /* Avoid going through the INIT_EXPR case, which can
849 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
850 goto modify_expr_case;
855 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
859 /* Restore saved state. */
860 if (STATEMENT_CODE_P (code))
861 current_stmt_tree ()->stmts_are_full_exprs_p
862 = saved_stmts_are_full_exprs_p;
868 is_invisiref_parm (const_tree t)
870 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
871 && DECL_BY_REFERENCE (t));
874 /* Return true if the uid in both int tree maps are equal. */
877 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
879 return (a->uid == b->uid);
882 /* Hash a UID in a cxx_int_tree_map. */
885 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
890 /* A stable comparison routine for use with splay trees and DECLs. */
893 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
898 return DECL_UID (a) - DECL_UID (b);
901 /* OpenMP context during genericization. */
903 struct cp_genericize_omp_taskreg
907 struct cp_genericize_omp_taskreg *outer;
908 splay_tree variables;
911 /* Return true if genericization should try to determine if
912 DECL is firstprivate or shared within task regions. */
915 omp_var_to_track (tree decl)
917 tree type = TREE_TYPE (decl);
918 if (is_invisiref_parm (decl))
919 type = TREE_TYPE (type);
920 while (TREE_CODE (type) == ARRAY_TYPE)
921 type = TREE_TYPE (type);
922 if (type == error_mark_node || !CLASS_TYPE_P (type))
924 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
926 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
931 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
934 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
936 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
937 (splay_tree_key) decl);
940 int flags = OMP_CLAUSE_DEFAULT_SHARED;
942 omp_cxx_notice_variable (omp_ctx->outer, decl);
943 if (!omp_ctx->default_shared)
945 struct cp_genericize_omp_taskreg *octx;
947 for (octx = omp_ctx->outer; octx; octx = octx->outer)
949 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
950 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
952 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
955 if (octx->is_parallel)
959 && (TREE_CODE (decl) == PARM_DECL
960 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
961 && DECL_CONTEXT (decl) == current_function_decl)))
962 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
963 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
965 /* DECL is implicitly determined firstprivate in
966 the current task construct. Ensure copy ctor and
967 dtor are instantiated, because during gimplification
968 it will be already too late. */
969 tree type = TREE_TYPE (decl);
970 if (is_invisiref_parm (decl))
971 type = TREE_TYPE (type);
972 while (TREE_CODE (type) == ARRAY_TYPE)
973 type = TREE_TYPE (type);
974 get_copy_ctor (type, tf_none);
975 get_dtor (type, tf_none);
978 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
982 /* Genericization context. */
984 struct cp_genericize_data
986 hash_set<tree> *p_set;
987 vec<tree> bind_expr_stack;
988 struct cp_genericize_omp_taskreg *omp_ctx;
993 /* Perform any pre-gimplification folding of C++ front end trees to
995 Note: The folding of none-omp cases is something to move into
996 the middle-end. As for now we have most foldings only on GENERIC
997 in fold-const, we need to perform this before transformation to
1001 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1004 enum tree_code code;
1006 *stmt_p = stmt = cp_fold (*stmt_p);
1008 if (((hash_set<tree> *) data)->add (stmt))
1010 /* Don't walk subtrees of stmts we've already walked once, otherwise
1011 we can have exponential complexity with e.g. lots of nested
1012 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1013 always the same tree, which the first time cp_fold_r has been
1014 called on it had the subtrees walked. */
1019 code = TREE_CODE (stmt);
1020 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1021 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1022 || code == OACC_LOOP)
1027 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1028 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1029 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1030 x = OMP_FOR_COND (stmt);
1031 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1033 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1034 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1036 else if (x && TREE_CODE (x) == TREE_VEC)
1038 n = TREE_VEC_LENGTH (x);
1039 for (i = 0; i < n; i++)
1041 tree o = TREE_VEC_ELT (x, i);
1042 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1043 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1046 x = OMP_FOR_INCR (stmt);
1047 if (x && TREE_CODE (x) == TREE_VEC)
1049 n = TREE_VEC_LENGTH (x);
1050 for (i = 0; i < n; i++)
1052 tree o = TREE_VEC_ELT (x, i);
1053 if (o && TREE_CODE (o) == MODIFY_EXPR)
1054 o = TREE_OPERAND (o, 1);
1055 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1056 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1058 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1059 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1063 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1070 /* Fold ALL the trees! FIXME we should be able to remove this, but
1071 apparently that still causes optimization regressions. */
1074 cp_fold_function (tree fndecl)
1076 hash_set<tree> pset;
1077 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1080 /* Perform any pre-gimplification lowering of C++ front end trees to
1084 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1086 tree stmt = *stmt_p;
1087 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1088 hash_set<tree> *p_set = wtd->p_set;
1090 /* If in an OpenMP context, note var uses. */
1091 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1093 || TREE_CODE (stmt) == PARM_DECL
1094 || TREE_CODE (stmt) == RESULT_DECL)
1095 && omp_var_to_track (stmt))
1096 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1098 /* Don't dereference parms in a thunk, pass the references through. */
1099 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1100 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1106 /* Otherwise, do dereference invisible reference parms. */
1107 if (is_invisiref_parm (stmt))
1109 *stmt_p = convert_from_reference (stmt);
1114 /* Map block scope extern declarations to visible declarations with the
1115 same name and type in outer scopes if any. */
1116 if (cp_function_chain->extern_decl_map
1117 && VAR_OR_FUNCTION_DECL_P (stmt)
1118 && DECL_EXTERNAL (stmt))
1120 struct cxx_int_tree_map *h, in;
1121 in.uid = DECL_UID (stmt);
1122 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1131 /* Other than invisiref parms, don't walk the same tree twice. */
1132 if (p_set->contains (stmt))
1138 if (TREE_CODE (stmt) == ADDR_EXPR
1139 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1141 /* If in an OpenMP context, note var uses. */
1142 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1143 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1144 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1145 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1148 else if (TREE_CODE (stmt) == RETURN_EXPR
1149 && TREE_OPERAND (stmt, 0)
1150 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1151 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1153 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1154 switch (OMP_CLAUSE_CODE (stmt))
1156 case OMP_CLAUSE_LASTPRIVATE:
1157 /* Don't dereference an invisiref in OpenMP clauses. */
1158 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1161 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1162 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1163 cp_genericize_r, data, NULL);
1166 case OMP_CLAUSE_PRIVATE:
1167 /* Don't dereference an invisiref in OpenMP clauses. */
1168 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1170 else if (wtd->omp_ctx != NULL)
1172 /* Private clause doesn't cause any references to the
1173 var in outer contexts, avoid calling
1174 omp_cxx_notice_variable for it. */
1175 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1176 wtd->omp_ctx = NULL;
1177 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1183 case OMP_CLAUSE_SHARED:
1184 case OMP_CLAUSE_FIRSTPRIVATE:
1185 case OMP_CLAUSE_COPYIN:
1186 case OMP_CLAUSE_COPYPRIVATE:
1187 /* Don't dereference an invisiref in OpenMP clauses. */
1188 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1191 case OMP_CLAUSE_REDUCTION:
1192 /* Don't dereference an invisiref in reduction clause's
1193 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1194 still needs to be genericized. */
1195 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1198 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1199 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1200 cp_genericize_r, data, NULL);
1201 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1202 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1203 cp_genericize_r, data, NULL);
1209 else if (IS_TYPE_OR_DECL_P (stmt))
1212 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1213 to lower this construct before scanning it, so we need to lower these
1214 before doing anything else. */
1215 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1216 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1217 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1220 CLEANUP_BODY (stmt),
1221 CLEANUP_EXPR (stmt));
1223 else if (TREE_CODE (stmt) == IF_STMT)
1225 genericize_if_stmt (stmt_p);
1226 /* *stmt_p has changed, tail recurse to handle it again. */
1227 return cp_genericize_r (stmt_p, walk_subtrees, data);
1230 /* COND_EXPR might have incompatible types in branches if one or both
1231 arms are bitfields. Fix it up now. */
1232 else if (TREE_CODE (stmt) == COND_EXPR)
1235 = (TREE_OPERAND (stmt, 1)
1236 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1239 = (TREE_OPERAND (stmt, 2)
1240 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1243 && !useless_type_conversion_p (TREE_TYPE (stmt),
1244 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1246 TREE_OPERAND (stmt, 1)
1247 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1248 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1252 && !useless_type_conversion_p (TREE_TYPE (stmt),
1253 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1255 TREE_OPERAND (stmt, 2)
1256 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1257 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1262 else if (TREE_CODE (stmt) == BIND_EXPR)
1264 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1267 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1269 && !DECL_EXTERNAL (decl)
1270 && omp_var_to_track (decl))
1273 = splay_tree_lookup (wtd->omp_ctx->variables,
1274 (splay_tree_key) decl);
1276 splay_tree_insert (wtd->omp_ctx->variables,
1277 (splay_tree_key) decl,
1279 ? OMP_CLAUSE_DEFAULT_SHARED
1280 : OMP_CLAUSE_DEFAULT_PRIVATE);
1284 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1286 /* The point here is to not sanitize static initializers. */
1287 bool no_sanitize_p = wtd->no_sanitize_p;
1288 wtd->no_sanitize_p = true;
1289 for (tree decl = BIND_EXPR_VARS (stmt);
1291 decl = DECL_CHAIN (decl))
1293 && TREE_STATIC (decl)
1294 && DECL_INITIAL (decl))
1295 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1296 wtd->no_sanitize_p = no_sanitize_p;
1298 wtd->bind_expr_stack.safe_push (stmt);
1299 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1300 cp_genericize_r, data, NULL);
1301 wtd->bind_expr_stack.pop ();
1304 else if (TREE_CODE (stmt) == USING_STMT)
1306 tree block = NULL_TREE;
1308 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1309 BLOCK, and append an IMPORTED_DECL to its
1310 BLOCK_VARS chained list. */
1311 if (wtd->bind_expr_stack.exists ())
1314 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1315 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1320 tree using_directive;
1321 gcc_assert (TREE_OPERAND (stmt, 0));
1323 using_directive = make_node (IMPORTED_DECL);
1324 TREE_TYPE (using_directive) = void_type_node;
1326 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1327 = TREE_OPERAND (stmt, 0);
1328 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1329 BLOCK_VARS (block) = using_directive;
1331 /* The USING_STMT won't appear in GENERIC. */
1332 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1336 else if (TREE_CODE (stmt) == DECL_EXPR
1337 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1339 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1340 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1343 else if (TREE_CODE (stmt) == DECL_EXPR)
1345 tree d = DECL_EXPR_DECL (stmt);
1346 if (TREE_CODE (d) == VAR_DECL)
1347 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1349 else if (TREE_CODE (stmt) == OMP_PARALLEL
1350 || TREE_CODE (stmt) == OMP_TASK
1351 || TREE_CODE (stmt) == OMP_TASKLOOP)
1353 struct cp_genericize_omp_taskreg omp_ctx;
1358 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1359 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1360 omp_ctx.default_shared = omp_ctx.is_parallel;
1361 omp_ctx.outer = wtd->omp_ctx;
1362 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1363 wtd->omp_ctx = &omp_ctx;
1364 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1365 switch (OMP_CLAUSE_CODE (c))
1367 case OMP_CLAUSE_SHARED:
1368 case OMP_CLAUSE_PRIVATE:
1369 case OMP_CLAUSE_FIRSTPRIVATE:
1370 case OMP_CLAUSE_LASTPRIVATE:
1371 decl = OMP_CLAUSE_DECL (c);
1372 if (decl == error_mark_node || !omp_var_to_track (decl))
1374 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1377 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1378 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1379 ? OMP_CLAUSE_DEFAULT_SHARED
1380 : OMP_CLAUSE_DEFAULT_PRIVATE);
1381 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1383 omp_cxx_notice_variable (omp_ctx.outer, decl);
1385 case OMP_CLAUSE_DEFAULT:
1386 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1387 omp_ctx.default_shared = true;
1391 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1392 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1394 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1395 wtd->omp_ctx = omp_ctx.outer;
1396 splay_tree_delete (omp_ctx.variables);
1398 else if (TREE_CODE (stmt) == TRY_BLOCK)
1401 tree try_block = wtd->try_block;
1402 wtd->try_block = stmt;
1403 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1404 wtd->try_block = try_block;
1405 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1407 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1409 /* MUST_NOT_THROW_COND might be something else with TM. */
1410 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1413 tree try_block = wtd->try_block;
1414 wtd->try_block = stmt;
1415 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1416 wtd->try_block = try_block;
1419 else if (TREE_CODE (stmt) == THROW_EXPR)
1421 location_t loc = location_of (stmt);
1422 if (TREE_NO_WARNING (stmt))
1424 else if (wtd->try_block)
1426 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1427 && warning_at (loc, OPT_Wterminate,
1428 "throw will always call terminate()")
1429 && cxx_dialect >= cxx11
1430 && DECL_DESTRUCTOR_P (current_function_decl))
1431 inform (loc, "in C++11 destructors default to noexcept");
1435 if (warn_cxx11_compat && cxx_dialect < cxx11
1436 && DECL_DESTRUCTOR_P (current_function_decl)
1437 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1439 && (get_defaulted_eh_spec (current_function_decl)
1440 == empty_except_spec))
1441 warning_at (loc, OPT_Wc__11_compat,
1442 "in C++11 this throw will terminate because "
1443 "destructors default to noexcept");
1446 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1447 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1448 else if (TREE_CODE (stmt) == FOR_STMT)
1449 genericize_for_stmt (stmt_p, walk_subtrees, data);
1450 else if (TREE_CODE (stmt) == WHILE_STMT)
1451 genericize_while_stmt (stmt_p, walk_subtrees, data);
1452 else if (TREE_CODE (stmt) == DO_STMT)
1453 genericize_do_stmt (stmt_p, walk_subtrees, data);
1454 else if (TREE_CODE (stmt) == SWITCH_STMT)
1455 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1456 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1457 genericize_continue_stmt (stmt_p);
1458 else if (TREE_CODE (stmt) == BREAK_STMT)
1459 genericize_break_stmt (stmt_p);
1460 else if (TREE_CODE (stmt) == OMP_FOR
1461 || TREE_CODE (stmt) == OMP_SIMD
1462 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1463 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1464 else if (TREE_CODE (stmt) == PTRMEM_CST)
1466 /* By the time we get here we're handing off to the back end, so we don't
1467 need or want to preserve PTRMEM_CST anymore. */
1468 *stmt_p = cplus_expand_constant (stmt);
1471 else if ((flag_sanitize
1472 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1473 && !wtd->no_sanitize_p)
1475 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1476 && TREE_CODE (stmt) == NOP_EXPR
1477 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1478 ubsan_maybe_instrument_reference (stmt);
1479 else if (TREE_CODE (stmt) == CALL_EXPR)
1481 tree fn = CALL_EXPR_FN (stmt);
1483 && !error_operand_p (fn)
1484 && POINTER_TYPE_P (TREE_TYPE (fn))
1485 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1488 = TREE_CODE (fn) == ADDR_EXPR
1489 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1490 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1491 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1492 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1493 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1494 cp_ubsan_maybe_instrument_member_call (stmt);
1499 p_set->add (*stmt_p);
1504 /* Lower C++ front end trees to GENERIC in T_P. */
1507 cp_genericize_tree (tree* t_p)
1509 struct cp_genericize_data wtd;
1511 wtd.p_set = new hash_set<tree>;
1512 wtd.bind_expr_stack.create (0);
1514 wtd.try_block = NULL_TREE;
1515 wtd.no_sanitize_p = false;
1516 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1518 wtd.bind_expr_stack.release ();
1519 if (flag_sanitize & SANITIZE_VPTR)
1520 cp_ubsan_instrument_member_accesses (t_p);
1523 /* If a function that should end with a return in non-void
1524 function doesn't obviously end with return, add ubsan
1525 instrumentation code to verify it at runtime. */
1528 cp_ubsan_maybe_instrument_return (tree fndecl)
1530 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1531 || DECL_CONSTRUCTOR_P (fndecl)
1532 || DECL_DESTRUCTOR_P (fndecl)
1533 || !targetm.warn_func_return (fndecl))
1536 tree t = DECL_SAVED_TREE (fndecl);
1539 switch (TREE_CODE (t))
1542 t = BIND_EXPR_BODY (t);
1544 case TRY_FINALLY_EXPR:
1545 t = TREE_OPERAND (t, 0);
1547 case STATEMENT_LIST:
1549 tree_stmt_iterator i = tsi_last (t);
1566 t = DECL_SAVED_TREE (fndecl);
1567 if (TREE_CODE (t) == BIND_EXPR
1568 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1570 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1571 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1572 tsi_link_after (&i, t, TSI_NEW_STMT);
1577 cp_genericize (tree fndecl)
1581 /* Fix up the types of parms passed by invisible reference. */
1582 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1583 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1585 /* If a function's arguments are copied to create a thunk,
1586 then DECL_BY_REFERENCE will be set -- but the type of the
1587 argument will be a pointer type, so we will never get
1589 gcc_assert (!DECL_BY_REFERENCE (t));
1590 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1591 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1592 DECL_BY_REFERENCE (t) = 1;
1593 TREE_ADDRESSABLE (t) = 0;
1597 /* Do the same for the return value. */
1598 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1600 t = DECL_RESULT (fndecl);
1601 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1602 DECL_BY_REFERENCE (t) = 1;
1603 TREE_ADDRESSABLE (t) = 0;
1607 /* Adjust DECL_VALUE_EXPR of the original var. */
1608 tree outer = outer_curly_brace_block (current_function_decl);
1612 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1613 if (DECL_NAME (t) == DECL_NAME (var)
1614 && DECL_HAS_VALUE_EXPR_P (var)
1615 && DECL_VALUE_EXPR (var) == t)
1617 tree val = convert_from_reference (t);
1618 SET_DECL_VALUE_EXPR (var, val);
1624 /* If we're a clone, the body is already GIMPLE. */
1625 if (DECL_CLONED_FUNCTION_P (fndecl))
1628 /* Allow cp_genericize calls to be nested. */
1629 tree save_bc_label[2];
1630 save_bc_label[bc_break] = bc_label[bc_break];
1631 save_bc_label[bc_continue] = bc_label[bc_continue];
1632 bc_label[bc_break] = NULL_TREE;
1633 bc_label[bc_continue] = NULL_TREE;
1635 /* Expand all the array notations here. */
1637 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1638 DECL_SAVED_TREE (fndecl) =
1639 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1641 /* We do want to see every occurrence of the parms, so we can't just use
1642 walk_tree's hash functionality. */
1643 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1645 if (flag_sanitize & SANITIZE_RETURN
1646 && do_ubsan_in_current_function ())
1647 cp_ubsan_maybe_instrument_return (fndecl);
1649 /* Do everything else. */
1650 c_genericize (fndecl);
1652 gcc_assert (bc_label[bc_break] == NULL);
1653 gcc_assert (bc_label[bc_continue] == NULL);
1654 bc_label[bc_break] = save_bc_label[bc_break];
1655 bc_label[bc_continue] = save_bc_label[bc_continue];
1658 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1659 NULL if there is in fact nothing to do. ARG2 may be null if FN
1660 actually only takes one argument. */
1663 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1665 tree defparm, parm, t;
1673 nargs = list_length (DECL_ARGUMENTS (fn));
1674 argarray = XALLOCAVEC (tree, nargs);
1676 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1678 defparm = TREE_CHAIN (defparm);
1680 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1682 tree inner_type = TREE_TYPE (arg1);
1683 tree start1, end1, p1;
1684 tree start2 = NULL, p2 = NULL;
1685 tree ret = NULL, lab;
1691 inner_type = TREE_TYPE (inner_type);
1692 start1 = build4 (ARRAY_REF, inner_type, start1,
1693 size_zero_node, NULL, NULL);
1695 start2 = build4 (ARRAY_REF, inner_type, start2,
1696 size_zero_node, NULL, NULL);
1698 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1699 start1 = build_fold_addr_expr_loc (input_location, start1);
1701 start2 = build_fold_addr_expr_loc (input_location, start2);
1703 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1704 end1 = fold_build_pointer_plus (start1, end1);
1706 p1 = create_tmp_var (TREE_TYPE (start1));
1707 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1708 append_to_statement_list (t, &ret);
1712 p2 = create_tmp_var (TREE_TYPE (start2));
1713 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1714 append_to_statement_list (t, &ret);
1717 lab = create_artificial_label (input_location);
1718 t = build1 (LABEL_EXPR, void_type_node, lab);
1719 append_to_statement_list (t, &ret);
1724 /* Handle default arguments. */
1725 for (parm = defparm; parm && parm != void_list_node;
1726 parm = TREE_CHAIN (parm), i++)
1727 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1728 TREE_PURPOSE (parm), fn, i,
1729 tf_warning_or_error);
1730 t = build_call_a (fn, i, argarray);
1731 t = fold_convert (void_type_node, t);
1732 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1733 append_to_statement_list (t, &ret);
1735 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1736 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1737 append_to_statement_list (t, &ret);
1741 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1742 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1743 append_to_statement_list (t, &ret);
1746 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1747 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1748 append_to_statement_list (t, &ret);
1754 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1756 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1757 /* Handle default arguments. */
1758 for (parm = defparm; parm && parm != void_list_node;
1759 parm = TREE_CHAIN (parm), i++)
1760 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1761 TREE_PURPOSE (parm),
1762 fn, i, tf_warning_or_error);
1763 t = build_call_a (fn, i, argarray);
1764 t = fold_convert (void_type_node, t);
1765 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1769 /* Return code to initialize DECL with its default constructor, or
1770 NULL if there's nothing to do. */
1773 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1775 tree info = CP_OMP_CLAUSE_INFO (clause);
1779 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1784 /* Return code to initialize DST with a copy constructor from SRC. */
1787 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1789 tree info = CP_OMP_CLAUSE_INFO (clause);
1793 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1795 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1800 /* Similarly, except use an assignment operator instead. */
1803 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1805 tree info = CP_OMP_CLAUSE_INFO (clause);
1809 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1811 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1816 /* Return code to destroy DECL. */
1819 cxx_omp_clause_dtor (tree clause, tree decl)
1821 tree info = CP_OMP_CLAUSE_INFO (clause);
1825 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1830 /* True if OpenMP should privatize what this DECL points to rather
1831 than the DECL itself. */
1834 cxx_omp_privatize_by_reference (const_tree decl)
1836 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1837 || is_invisiref_parm (decl));
1840 /* Return true if DECL is const qualified var having no mutable member. */
1842 cxx_omp_const_qual_no_mutable (tree decl)
1844 tree type = TREE_TYPE (decl);
1845 if (TREE_CODE (type) == REFERENCE_TYPE)
1847 if (!is_invisiref_parm (decl))
1849 type = TREE_TYPE (type);
1851 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1853 /* NVR doesn't preserve const qualification of the
1855 tree outer = outer_curly_brace_block (current_function_decl);
1859 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1860 if (DECL_NAME (decl) == DECL_NAME (var)
1861 && (TYPE_MAIN_VARIANT (type)
1862 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1864 if (TYPE_READONLY (TREE_TYPE (var)))
1865 type = TREE_TYPE (var);
1871 if (type == error_mark_node)
1874 /* Variables with const-qualified type having no mutable member
1875 are predetermined shared. */
1876 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1882 /* True if OpenMP sharing attribute of DECL is predetermined. */
1884 enum omp_clause_default_kind
1885 cxx_omp_predetermined_sharing (tree decl)
1887 /* Static data members are predetermined shared. */
1888 if (TREE_STATIC (decl))
1890 tree ctx = CP_DECL_CONTEXT (decl);
1891 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1892 return OMP_CLAUSE_DEFAULT_SHARED;
1895 /* Const qualified vars having no mutable member are predetermined
1897 if (cxx_omp_const_qual_no_mutable (decl))
1898 return OMP_CLAUSE_DEFAULT_SHARED;
1900 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1903 /* Finalize an implicitly determined clause. */
1906 cxx_omp_finish_clause (tree c, gimple_seq *)
1908 tree decl, inner_type;
1909 bool make_shared = false;
1911 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1914 decl = OMP_CLAUSE_DECL (c);
1915 decl = require_complete_type (decl);
1916 inner_type = TREE_TYPE (decl);
1917 if (decl == error_mark_node)
1919 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1920 inner_type = TREE_TYPE (inner_type);
1922 /* We're interested in the base element, not arrays. */
1923 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1924 inner_type = TREE_TYPE (inner_type);
1926 /* Check for special function availability by building a call to one.
1927 Save the results, because later we won't be in the right context
1928 for making these queries. */
1930 && CLASS_TYPE_P (inner_type)
1931 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1935 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1938 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1939 disregarded in OpenMP construct, because it is going to be
1940 remapped during OpenMP lowering. SHARED is true if DECL
1941 is going to be shared, false if it is going to be privatized. */
1944 cxx_omp_disregard_value_expr (tree decl, bool shared)
1948 && DECL_HAS_VALUE_EXPR_P (decl)
1949 && DECL_ARTIFICIAL (decl)
1950 && DECL_LANG_SPECIFIC (decl)
1951 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1954 /* Perform folding on expression X. */
1957 cp_fully_fold (tree x)
1959 if (processing_template_decl)
1961 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1962 have to call both. */
1963 x = maybe_constant_value (x);
1967 /* Fold expression X which is used as an rvalue if RVAL is true. */
1970 cp_fold_maybe_rvalue (tree x, bool rval)
1975 if (rval && DECL_P (x))
1977 tree v = decl_constant_value (x);
1978 if (v != x && v != error_mark_node)
1989 /* Fold expression X which is used as an rvalue. */
1992 cp_fold_rvalue (tree x)
1994 return cp_fold_maybe_rvalue (x, true);
1997 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1998 and certain changes are made to the folding done. Or should be (FIXME). We
1999 never touch maybe_const, as it is only used for the C front-end
2000 C_MAYBE_CONST_EXPR. */
2003 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2005 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2007 return cp_fold_rvalue (x);
2010 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2012 /* Dispose of the whole FOLD_CACHE. */
2015 clear_fold_cache (void)
2017 if (fold_cache != NULL)
2018 fold_cache->empty ();
2021 /* This function tries to fold an expression X.
2022 To avoid combinatorial explosion, folding results are kept in fold_cache.
2023 If we are processing a template or X is invalid, we don't fold at all.
2024 For performance reasons we don't cache expressions representing a
2025 declaration or constant.
2026 Function returns X or its folded variant. */
2031 tree op0, op1, op2, op3;
2032 tree org_x = x, r = NULL_TREE;
2033 enum tree_code code;
2035 bool rval_ops = true;
2037 if (!x || x == error_mark_node)
2040 if (processing_template_decl
2041 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2044 /* Don't bother to cache DECLs or constants. */
2045 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2048 if (fold_cache == NULL)
2049 fold_cache = hash_map<tree, tree>::create_ggc (101);
2051 if (tree *cached = fold_cache->get (x))
2054 code = TREE_CODE (x);
2058 x = fold_sizeof_expr (x);
2061 case VIEW_CONVERT_EXPR:
2065 case NON_LVALUE_EXPR:
2067 if (VOID_TYPE_P (TREE_TYPE (x)))
2070 loc = EXPR_LOCATION (x);
2071 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2073 if (code == CONVERT_EXPR
2074 && SCALAR_TYPE_P (TREE_TYPE (x))
2075 && op0 != void_node)
2076 /* During parsing we used convert_to_*_nofold; re-convert now using the
2077 folding variants, since fold() doesn't do those transformations. */
2078 x = fold (convert (TREE_TYPE (x), op0));
2079 else if (op0 != TREE_OPERAND (x, 0))
2081 if (op0 == error_mark_node)
2082 x = error_mark_node;
2084 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2089 /* Conversion of an out-of-range value has implementation-defined
2090 behavior; the language considers it different from arithmetic
2091 overflow, which is undefined. */
2092 if (TREE_CODE (op0) == INTEGER_CST
2093 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2094 TREE_OVERFLOW (x) = false;
2099 /* We don't need the decltype(auto) obfuscation anymore. */
2100 if (REF_PARENTHESIZED_P (x))
2102 tree p = maybe_undo_parenthesized_ref (x);
2112 case FIX_TRUNC_EXPR:
2117 case TRUTH_NOT_EXPR:
2118 case FIXED_CONVERT_EXPR:
2121 loc = EXPR_LOCATION (x);
2122 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2124 if (op0 != TREE_OPERAND (x, 0))
2126 if (op0 == error_mark_node)
2127 x = error_mark_node;
2130 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2131 if (code == INDIRECT_REF
2132 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2134 TREE_READONLY (x) = TREE_READONLY (org_x);
2135 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2136 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2143 gcc_assert (TREE_CODE (x) != COND_EXPR
2144 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2147 case UNARY_PLUS_EXPR:
2148 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2149 if (op0 == error_mark_node)
2150 x = error_mark_node;
2152 x = fold_convert (TREE_TYPE (x), op0);
2155 case POSTDECREMENT_EXPR:
2156 case POSTINCREMENT_EXPR:
2158 case PREDECREMENT_EXPR:
2159 case PREINCREMENT_EXPR:
2163 case POINTER_PLUS_EXPR:
2167 case TRUNC_DIV_EXPR:
2169 case FLOOR_DIV_EXPR:
2170 case ROUND_DIV_EXPR:
2171 case TRUNC_MOD_EXPR:
2173 case ROUND_MOD_EXPR:
2175 case EXACT_DIV_EXPR:
2185 case TRUTH_AND_EXPR:
2186 case TRUTH_ANDIF_EXPR:
2188 case TRUTH_ORIF_EXPR:
2189 case TRUTH_XOR_EXPR:
2190 case LT_EXPR: case LE_EXPR:
2191 case GT_EXPR: case GE_EXPR:
2192 case EQ_EXPR: case NE_EXPR:
2193 case UNORDERED_EXPR: case ORDERED_EXPR:
2194 case UNLT_EXPR: case UNLE_EXPR:
2195 case UNGT_EXPR: case UNGE_EXPR:
2196 case UNEQ_EXPR: case LTGT_EXPR:
2197 case RANGE_EXPR: case COMPLEX_EXPR:
2199 loc = EXPR_LOCATION (x);
2200 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2201 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2203 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2205 if (op0 == error_mark_node || op1 == error_mark_node)
2206 x = error_mark_node;
2208 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2213 if (TREE_NO_WARNING (org_x)
2214 && warn_nonnull_compare
2215 && COMPARISON_CLASS_P (org_x))
2217 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2219 else if (COMPARISON_CLASS_P (x))
2220 TREE_NO_WARNING (x) = 1;
2221 /* Otherwise give up on optimizing these, let GIMPLE folders
2222 optimize those later on. */
2223 else if (op0 != TREE_OPERAND (org_x, 0)
2224 || op1 != TREE_OPERAND (org_x, 1))
2226 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2227 TREE_NO_WARNING (x) = 1;
2237 /* Don't bother folding a void condition, since it can't produce a
2238 constant value. Also, some statement-level uses of COND_EXPR leave
2239 one of the branches NULL, so folding would crash. */
2240 if (VOID_TYPE_P (TREE_TYPE (x)))
2243 loc = EXPR_LOCATION (x);
2244 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2245 op1 = cp_fold (TREE_OPERAND (x, 1));
2246 op2 = cp_fold (TREE_OPERAND (x, 2));
2248 if (op0 != TREE_OPERAND (x, 0)
2249 || op1 != TREE_OPERAND (x, 1)
2250 || op2 != TREE_OPERAND (x, 2))
2252 if (op0 == error_mark_node
2253 || op1 == error_mark_node
2254 || op2 == error_mark_node)
2255 x = error_mark_node;
2257 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2262 /* A COND_EXPR might have incompatible types in branches if one or both
2263 arms are bitfields. If folding exposed such a branch, fix it up. */
2264 if (TREE_CODE (x) != code)
2265 if (tree type = is_bitfield_expr_with_lowered_type (x))
2266 x = fold_convert (type, x);
2272 int i, m, sv = optimize, nw = sv, changed = 0;
2273 tree callee = get_callee_fndecl (x);
2275 /* Some built-in function calls will be evaluated at compile-time in
2276 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2277 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2278 if (callee && DECL_BUILT_IN (callee) && !optimize
2279 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2280 && current_function_decl
2281 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2286 m = call_expr_nargs (x);
2287 for (i = 0; i < m; i++)
2289 r = cp_fold (CALL_EXPR_ARG (x, i));
2290 if (r != CALL_EXPR_ARG (x, i))
2292 if (r == error_mark_node)
2294 x = error_mark_node;
2299 CALL_EXPR_ARG (x, i) = r;
2301 if (x == error_mark_node)
2308 if (TREE_CODE (r) != CALL_EXPR)
2316 /* Invoke maybe_constant_value for functions declared
2317 constexpr and not called with AGGR_INIT_EXPRs.
2319 Do constexpr expansion of expressions where the call itself is not
2320 constant, but the call followed by an INDIRECT_REF is. */
2321 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2323 r = maybe_constant_value (x);
2326 if (TREE_CODE (r) != CALL_EXPR)
2341 bool changed = false;
2342 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2343 vec<constructor_elt, va_gc> *nelts = NULL;
2344 vec_safe_reserve (nelts, vec_safe_length (elts));
2345 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2347 tree op = cp_fold (p->value);
2348 constructor_elt e = { p->index, op };
2349 nelts->quick_push (e);
2352 if (op == error_mark_node)
2354 x = error_mark_node;
2362 x = build_constructor (TREE_TYPE (x), nelts);
2369 bool changed = false;
2370 vec<tree, va_gc> *vec = make_tree_vector ();
2371 int i, n = TREE_VEC_LENGTH (x);
2372 vec_safe_reserve (vec, n);
2374 for (i = 0; i < n; i++)
2376 tree op = cp_fold (TREE_VEC_ELT (x, i));
2377 vec->quick_push (op);
2378 if (op != TREE_VEC_ELT (x, i))
2385 for (i = 0; i < n; i++)
2386 TREE_VEC_ELT (r, i) = (*vec)[i];
2390 release_tree_vector (vec);
2396 case ARRAY_RANGE_REF:
2398 loc = EXPR_LOCATION (x);
2399 op0 = cp_fold (TREE_OPERAND (x, 0));
2400 op1 = cp_fold (TREE_OPERAND (x, 1));
2401 op2 = cp_fold (TREE_OPERAND (x, 2));
2402 op3 = cp_fold (TREE_OPERAND (x, 3));
2404 if (op0 != TREE_OPERAND (x, 0)
2405 || op1 != TREE_OPERAND (x, 1)
2406 || op2 != TREE_OPERAND (x, 2)
2407 || op3 != TREE_OPERAND (x, 3))
2409 if (op0 == error_mark_node
2410 || op1 == error_mark_node
2411 || op2 == error_mark_node
2412 || op3 == error_mark_node)
2413 x = error_mark_node;
2416 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2417 TREE_READONLY (x) = TREE_READONLY (org_x);
2418 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2419 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2430 fold_cache->put (org_x, x);
2431 /* Prevent that we try to fold an already folded result again. */
2433 fold_cache->put (x, x);
2438 #include "gt-cp-cp-gimplify.h"