1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
35 #include "cp-cilkplus.h"
37 /* Forward declarations. */
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
42 static tree cp_fold (tree);
44 /* Local declarations. */
46 enum bc_t { bc_break = 0, bc_continue = 1 };
48 /* Stack of labels which are targets for "break" or "continue",
49 linked through TREE_CHAIN. */
50 static tree bc_label[2];
52 /* Begin a scope which can be exited by a break or continue statement. BC
55 Just creates a label with location LOCATION and pushes it into the current
59 begin_bc_block (enum bc_t bc, location_t location)
61 tree label = create_artificial_label (location);
62 DECL_CHAIN (label) = bc_label[bc];
65 LABEL_DECL_BREAK (label) = true;
67 LABEL_DECL_CONTINUE (label) = true;
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 gcc_assert (label == bc_label[bc]);
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
94 of gimplify_cilk_spawn. */
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
102 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 is_gimple_reg, fb_rvalue);
110 /* Get the LABEL_EXPR to represent a break or continue statement
111 in the current block scope. BC indicates which. */
114 get_bc_label (enum bc_t bc)
116 tree label = bc_label[bc];
118 /* Mark the label used for finish_bc_block. */
119 TREE_USED (label) = 1;
123 /* Genericize a TRY_BLOCK. */
126 genericize_try_block (tree *stmt_p)
128 tree body = TRY_STMTS (*stmt_p);
129 tree cleanup = TRY_HANDLERS (*stmt_p);
131 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
134 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
137 genericize_catch_block (tree *stmt_p)
139 tree type = HANDLER_TYPE (*stmt_p);
140 tree body = HANDLER_BODY (*stmt_p);
142 /* FIXME should the caught type go in TREE_TYPE? */
143 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
146 /* A terser interface for building a representation of an exception
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
154 /* FIXME should the allowed types go in TREE_TYPE? */
155 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
158 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159 append_to_statement_list (body, &TREE_OPERAND (t, 0));
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
168 genericize_eh_spec_block (tree *stmt_p)
170 tree body = EH_SPEC_STMTS (*stmt_p);
171 tree allowed = EH_SPEC_RAISES (*stmt_p);
172 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
174 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175 TREE_NO_WARNING (*stmt_p) = true;
176 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
179 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
182 genericize_if_stmt (tree *stmt_p)
184 tree stmt, cond, then_, else_;
185 location_t locus = EXPR_LOCATION (*stmt_p);
188 cond = IF_COND (stmt);
189 then_ = THEN_CLAUSE (stmt);
190 else_ = ELSE_CLAUSE (stmt);
193 then_ = build_empty_stmt (locus);
195 else_ = build_empty_stmt (locus);
197 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
199 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
202 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203 if (!EXPR_HAS_LOCATION (stmt))
204 protected_set_expr_location (stmt, locus);
208 /* Build a generic representation of one of the C loop forms. COND is the
209 loop condition or NULL_TREE. BODY is the (possibly compound) statement
210 controlled by the loop. INCR is the increment expression of a for-loop,
211 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
212 evaluated before the loop body as in while and for loops, or after the
213 loop body as in do-while loops. */
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 tree incr, bool cond_is_first, int *walk_subtrees,
222 tree stmt_list = NULL;
224 blab = begin_bc_block (bc_break, start_locus);
225 clab = begin_bc_block (bc_continue, start_locus);
227 protected_set_expr_location (incr, start_locus);
229 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230 cp_walk_tree (&body, cp_genericize_r, data, NULL);
231 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
234 if (cond && TREE_CODE (cond) != INTEGER_CST)
236 /* If COND is constant, don't bother building an exit. If it's false,
237 we won't build a loop. If it's true, any exits are in the body. */
238 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 get_bc_label (bc_break));
241 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 build_empty_stmt (cloc), exit);
245 if (exit && cond_is_first)
246 append_to_statement_list (exit, &stmt_list);
247 append_to_statement_list (body, &stmt_list);
248 finish_bc_block (&stmt_list, bc_continue, clab);
249 append_to_statement_list (incr, &stmt_list);
250 if (exit && !cond_is_first)
251 append_to_statement_list (exit, &stmt_list);
254 stmt_list = build_empty_stmt (start_locus);
257 if (cond && integer_zerop (cond))
260 loop = fold_build3_loc (start_locus, COND_EXPR,
261 void_type_node, cond, stmt_list,
262 build_empty_stmt (start_locus));
268 location_t loc = start_locus;
269 if (!cond || integer_nonzerop (cond))
270 loc = EXPR_LOCATION (expr_first (body));
271 if (loc == UNKNOWN_LOCATION)
273 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
277 append_to_statement_list (loop, &stmt_list);
278 finish_bc_block (&stmt_list, bc_break, blab);
280 stmt_list = build_empty_stmt (start_locus);
285 /* Genericize a FOR_STMT node *STMT_P. */
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
293 tree init = FOR_INIT_STMT (stmt);
297 cp_walk_tree (&init, cp_genericize_r, data, NULL);
298 append_to_statement_list (init, &expr);
301 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303 append_to_statement_list (loop, &expr);
304 if (expr == NULL_TREE)
309 /* Genericize a WHILE_STMT node *STMT_P. */
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
319 /* Genericize a DO_STMT node *STMT_P. */
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
325 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
335 tree break_block, body, cond, type;
336 location_t stmt_locus = EXPR_LOCATION (stmt);
338 break_block = begin_bc_block (bc_break, stmt_locus);
340 body = SWITCH_STMT_BODY (stmt);
342 body = build_empty_stmt (stmt_locus);
343 cond = SWITCH_STMT_COND (stmt);
344 type = SWITCH_STMT_TYPE (stmt);
346 cp_walk_tree (&body, cp_genericize_r, data, NULL);
347 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348 cp_walk_tree (&type, cp_genericize_r, data, NULL);
351 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352 finish_bc_block (stmt_p, bc_break, break_block);
355 /* Genericize a CONTINUE_STMT node *STMT_P. */
358 genericize_continue_stmt (tree *stmt_p)
360 tree stmt_list = NULL;
361 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362 tree label = get_bc_label (bc_continue);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 append_to_statement_list_force (pred, &stmt_list);
366 append_to_statement_list (jump, &stmt_list);
370 /* Genericize a BREAK_STMT node *STMT_P. */
373 genericize_break_stmt (tree *stmt_p)
375 tree label = get_bc_label (bc_break);
376 location_t location = EXPR_LOCATION (*stmt_p);
377 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
380 /* Genericize a OMP_FOR node *STMT_P. */
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
386 location_t locus = EXPR_LOCATION (stmt);
387 tree clab = begin_bc_block (bc_continue, locus);
389 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390 if (TREE_CODE (stmt) != OMP_TASKLOOP)
391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
398 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
401 /* Hook into the middle of gimplifying an OMP_FOR node. */
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
406 tree for_stmt = *expr_p;
407 gimple_seq seq = NULL;
409 /* Protect ourselves from recursion. */
410 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
412 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
414 gimplify_and_add (for_stmt, &seq);
415 gimple_seq_add_seq (pre_p, seq);
417 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
422 /* Gimplify an EXPR_STMT node. */
425 gimplify_expr_stmt (tree *stmt_p)
427 tree stmt = EXPR_STMT_EXPR (*stmt_p);
429 if (stmt == error_mark_node)
432 /* Gimplification of a statement expression will nullify the
433 statement if all its side effects are moved to *PRE_P and *POST_P.
435 In this case we will not want to emit the gimplified statement.
436 However, we may still want to emit a warning, so we do that before
438 if (stmt && warn_unused_value)
440 if (!TREE_SIDE_EFFECTS (stmt))
442 if (!IS_EMPTY_STMT (stmt)
443 && !VOID_TYPE_P (TREE_TYPE (stmt))
444 && !TREE_NO_WARNING (stmt))
445 warning (OPT_Wunused_value, "statement with no effect");
448 warn_if_unused_value (stmt, input_location);
451 if (stmt == NULL_TREE)
452 stmt = alloc_stmt_list ();
457 /* Gimplify initialization from an AGGR_INIT_EXPR. */
460 cp_gimplify_init_expr (tree *expr_p)
462 tree from = TREE_OPERAND (*expr_p, 1);
463 tree to = TREE_OPERAND (*expr_p, 0);
466 /* What about code that pulls out the temp and uses it elsewhere? I
467 think that such code never uses the TARGET_EXPR as an initializer. If
468 I'm wrong, we'll abort because the temp won't have any RTL. In that
469 case, I guess we'll need to replace references somehow. */
470 if (TREE_CODE (from) == TARGET_EXPR)
471 from = TARGET_EXPR_INITIAL (from);
473 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474 inside the TARGET_EXPR. */
477 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
479 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 replace the slot operand with our target.
482 Should we add a target parm to gimplify_expr instead? No, as in this
483 case we want to replace the INIT_EXPR. */
484 if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 || TREE_CODE (sub) == VEC_INIT_EXPR)
487 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 AGGR_INIT_EXPR_SLOT (sub) = to;
490 VEC_INIT_EXPR_SLOT (sub) = to;
493 /* The initialization is now a side-effect, so the container can
496 TREE_TYPE (from) = void_type_node;
499 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
500 /* Handle aggregate NSDMI. */
501 replace_placeholders (sub, to);
506 t = TREE_OPERAND (t, 1);
511 /* Gimplify a MUST_NOT_THROW_EXPR. */
513 static enum gimplify_status
514 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
517 tree temp = voidify_wrapper_expr (stmt, NULL);
518 tree body = TREE_OPERAND (stmt, 0);
519 gimple_seq try_ = NULL;
520 gimple_seq catch_ = NULL;
523 gimplify_and_add (body, &try_);
524 mnt = gimple_build_eh_must_not_throw (terminate_node);
525 gimple_seq_add_stmt_without_update (&catch_, mnt);
526 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
528 gimple_seq_add_stmt_without_update (pre_p, mnt);
539 /* Return TRUE if an operand (OP) of a given TYPE being copied is
540 really just an empty class copy.
542 Check that the operand has a simple form so that TARGET_EXPRs and
543 non-empty CONSTRUCTORs get reduced properly, and we leave the
544 return slot optimization alone because it isn't a copy. */
547 simple_empty_class_p (tree type, tree op)
550 ((TREE_CODE (op) == COMPOUND_EXPR
551 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
552 || is_gimple_lvalue (op)
553 || INDIRECT_REF_P (op)
554 || (TREE_CODE (op) == CONSTRUCTOR
555 && CONSTRUCTOR_NELTS (op) == 0
556 && !TREE_CLOBBER_P (op))
557 || (TREE_CODE (op) == CALL_EXPR
558 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559 && is_really_empty_class (type);
562 /* Returns true if evaluating E as an lvalue has side-effects;
563 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
564 have side-effects until there is a read or write through it. */
567 lvalue_has_side_effects (tree e)
569 if (!TREE_SIDE_EFFECTS (e))
571 while (handled_component_p (e))
573 if (TREE_CODE (e) == ARRAY_REF
574 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
576 e = TREE_OPERAND (e, 0);
579 /* Just naming a variable has no side-effects. */
581 else if (INDIRECT_REF_P (e))
582 /* Similarly, indirection has no side-effects. */
583 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
585 /* For anything else, trust TREE_SIDE_EFFECTS. */
586 return TREE_SIDE_EFFECTS (e);
589 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
592 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
594 int saved_stmts_are_full_exprs_p = 0;
595 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
596 enum tree_code code = TREE_CODE (*expr_p);
597 enum gimplify_status ret;
599 if (STATEMENT_CODE_P (code))
601 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
602 current_stmt_tree ()->stmts_are_full_exprs_p
603 = STMT_IS_FULL_EXPR_P (*expr_p);
609 simplify_aggr_init_expr (expr_p);
615 location_t loc = input_location;
616 tree init = VEC_INIT_EXPR_INIT (*expr_p);
617 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
618 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
619 input_location = EXPR_LOCATION (*expr_p);
620 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
621 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
623 tf_warning_or_error);
625 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
626 cp_genericize_tree (expr_p);
628 input_location = loc;
633 /* FIXME communicate throw type to back end, probably by moving
634 THROW_EXPR into ../tree.def. */
635 *expr_p = TREE_OPERAND (*expr_p, 0);
639 case MUST_NOT_THROW_EXPR:
640 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
643 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
644 LHS of an assignment might also be involved in the RHS, as in bug
647 if (fn_contains_cilk_spawn_p (cfun))
649 if (cilk_cp_detect_spawn_and_unwrap (expr_p))
651 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
653 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
655 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
659 cp_gimplify_init_expr (expr_p);
660 if (TREE_CODE (*expr_p) != INIT_EXPR)
666 if (fn_contains_cilk_spawn_p (cfun)
667 && cilk_cp_detect_spawn_and_unwrap (expr_p)
670 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
671 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
673 /* If the back end isn't clever enough to know that the lhs and rhs
674 types are the same, add an explicit conversion. */
675 tree op0 = TREE_OPERAND (*expr_p, 0);
676 tree op1 = TREE_OPERAND (*expr_p, 1);
678 if (!error_operand_p (op0)
679 && !error_operand_p (op1)
680 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
681 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
682 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
683 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
684 TREE_TYPE (op0), op1);
686 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
688 /* Remove any copies of empty classes. Also drop volatile
689 variables on the RHS to avoid infinite recursion from
690 gimplify_expr trying to load the value. */
691 if (TREE_SIDE_EFFECTS (op1))
693 if (TREE_THIS_VOLATILE (op1)
694 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
695 op1 = build_fold_addr_expr (op1);
697 gimplify_and_add (op1, pre_p);
699 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
700 is_gimple_lvalue, fb_lvalue);
701 *expr_p = TREE_OPERAND (*expr_p, 0);
703 /* P0145 says that the RHS is sequenced before the LHS.
704 gimplify_modify_expr gimplifies the RHS before the LHS, but that
705 isn't quite strong enough in two cases:
707 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
708 mean it's evaluated after the LHS.
710 2) the value calculation of the RHS is also sequenced before the
711 LHS, so for scalar assignment we need to preevaluate if the
712 RHS could be affected by LHS side-effects even if it has no
713 side-effects of its own. We don't need this for classes because
714 class assignment takes its RHS by reference. */
715 else if (flag_strong_eval_order > 1
716 && TREE_CODE (*expr_p) == MODIFY_EXPR
717 && lvalue_has_side_effects (op0)
718 && (TREE_CODE (op1) == CALL_EXPR
719 || (SCALAR_TYPE_P (TREE_TYPE (op1))
720 && !TREE_CONSTANT (op1))))
721 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
726 case EMPTY_CLASS_EXPR:
727 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
728 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
733 *expr_p = BASELINK_FUNCTIONS (*expr_p);
738 genericize_try_block (expr_p);
743 genericize_catch_block (expr_p);
748 genericize_eh_spec_block (expr_p);
767 ret = cp_gimplify_omp_for (expr_p, pre_p);
771 gimplify_expr_stmt (expr_p);
775 case UNARY_PLUS_EXPR:
777 tree arg = TREE_OPERAND (*expr_p, 0);
778 tree type = TREE_TYPE (*expr_p);
779 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
785 case CILK_SPAWN_STMT:
786 gcc_assert(fn_contains_cilk_spawn_p (cfun)
787 && cilk_cp_detect_spawn_and_unwrap (expr_p));
791 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
792 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
797 if (fn_contains_cilk_spawn_p (cfun)
798 && cilk_cp_detect_spawn_and_unwrap (expr_p)
801 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
802 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
805 if (!CALL_EXPR_FN (*expr_p))
806 /* Internal function call. */;
807 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
809 /* This is a call to a (compound) assignment operator that used
810 the operator syntax; gimplify the RHS first. */
811 gcc_assert (call_expr_nargs (*expr_p) == 2);
812 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
813 enum gimplify_status t
814 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
818 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
820 /* Leave the last argument for gimplify_call_expr, to avoid problems
821 with __builtin_va_arg_pack(). */
822 int nargs = call_expr_nargs (*expr_p) - 1;
823 for (int i = 0; i < nargs; ++i)
825 enum gimplify_status t
826 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
831 else if (flag_strong_eval_order
832 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
834 /* If flag_strong_eval_order, evaluate the object argument first. */
835 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
836 if (POINTER_TYPE_P (fntype))
837 fntype = TREE_TYPE (fntype);
838 if (TREE_CODE (fntype) == METHOD_TYPE)
840 enum gimplify_status t
841 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
849 if (TREE_OPERAND (*expr_p, 0)
850 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
851 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
853 expr_p = &TREE_OPERAND (*expr_p, 0);
854 code = TREE_CODE (*expr_p);
855 /* Avoid going through the INIT_EXPR case, which can
856 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
857 goto modify_expr_case;
862 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
866 /* Restore saved state. */
867 if (STATEMENT_CODE_P (code))
868 current_stmt_tree ()->stmts_are_full_exprs_p
869 = saved_stmts_are_full_exprs_p;
875 is_invisiref_parm (const_tree t)
877 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
878 && DECL_BY_REFERENCE (t));
881 /* Return true if the uid in both int tree maps are equal. */
884 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
886 return (a->uid == b->uid);
889 /* Hash a UID in a cxx_int_tree_map. */
892 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
897 /* A stable comparison routine for use with splay trees and DECLs. */
900 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
905 return DECL_UID (a) - DECL_UID (b);
908 /* OpenMP context during genericization. */
910 struct cp_genericize_omp_taskreg
914 struct cp_genericize_omp_taskreg *outer;
915 splay_tree variables;
918 /* Return true if genericization should try to determine if
919 DECL is firstprivate or shared within task regions. */
922 omp_var_to_track (tree decl)
924 tree type = TREE_TYPE (decl);
925 if (is_invisiref_parm (decl))
926 type = TREE_TYPE (type);
927 while (TREE_CODE (type) == ARRAY_TYPE)
928 type = TREE_TYPE (type);
929 if (type == error_mark_node || !CLASS_TYPE_P (type))
931 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
933 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
938 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
941 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
943 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
944 (splay_tree_key) decl);
947 int flags = OMP_CLAUSE_DEFAULT_SHARED;
949 omp_cxx_notice_variable (omp_ctx->outer, decl);
950 if (!omp_ctx->default_shared)
952 struct cp_genericize_omp_taskreg *octx;
954 for (octx = omp_ctx->outer; octx; octx = octx->outer)
956 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
957 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
959 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
962 if (octx->is_parallel)
966 && (TREE_CODE (decl) == PARM_DECL
967 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
968 && DECL_CONTEXT (decl) == current_function_decl)))
969 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
970 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
972 /* DECL is implicitly determined firstprivate in
973 the current task construct. Ensure copy ctor and
974 dtor are instantiated, because during gimplification
975 it will be already too late. */
976 tree type = TREE_TYPE (decl);
977 if (is_invisiref_parm (decl))
978 type = TREE_TYPE (type);
979 while (TREE_CODE (type) == ARRAY_TYPE)
980 type = TREE_TYPE (type);
981 get_copy_ctor (type, tf_none);
982 get_dtor (type, tf_none);
985 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
989 /* Genericization context. */
991 struct cp_genericize_data
993 hash_set<tree> *p_set;
994 vec<tree> bind_expr_stack;
995 struct cp_genericize_omp_taskreg *omp_ctx;
1000 /* Perform any pre-gimplification folding of C++ front end trees to
1002 Note: The folding of none-omp cases is something to move into
1003 the middle-end. As for now we have most foldings only on GENERIC
1004 in fold-const, we need to perform this before transformation to
1008 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1011 enum tree_code code;
1013 *stmt_p = stmt = cp_fold (*stmt_p);
1015 if (((hash_set<tree> *) data)->add (stmt))
1017 /* Don't walk subtrees of stmts we've already walked once, otherwise
1018 we can have exponential complexity with e.g. lots of nested
1019 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1020 always the same tree, which the first time cp_fold_r has been
1021 called on it had the subtrees walked. */
1026 code = TREE_CODE (stmt);
1027 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1028 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1029 || code == OACC_LOOP)
1034 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1035 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1036 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1037 x = OMP_FOR_COND (stmt);
1038 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1040 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1041 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1043 else if (x && TREE_CODE (x) == TREE_VEC)
1045 n = TREE_VEC_LENGTH (x);
1046 for (i = 0; i < n; i++)
1048 tree o = TREE_VEC_ELT (x, i);
1049 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1050 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1053 x = OMP_FOR_INCR (stmt);
1054 if (x && TREE_CODE (x) == TREE_VEC)
1056 n = TREE_VEC_LENGTH (x);
1057 for (i = 0; i < n; i++)
1059 tree o = TREE_VEC_ELT (x, i);
1060 if (o && TREE_CODE (o) == MODIFY_EXPR)
1061 o = TREE_OPERAND (o, 1);
1062 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1063 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1065 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1066 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1070 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1077 /* Fold ALL the trees! FIXME we should be able to remove this, but
1078 apparently that still causes optimization regressions. */
1081 cp_fold_function (tree fndecl)
1083 hash_set<tree> pset;
1084 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1087 /* Perform any pre-gimplification lowering of C++ front end trees to
1091 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1093 tree stmt = *stmt_p;
1094 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1095 hash_set<tree> *p_set = wtd->p_set;
1097 /* If in an OpenMP context, note var uses. */
1098 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1100 || TREE_CODE (stmt) == PARM_DECL
1101 || TREE_CODE (stmt) == RESULT_DECL)
1102 && omp_var_to_track (stmt))
1103 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1105 /* Don't dereference parms in a thunk, pass the references through. */
1106 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1107 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1113 /* Otherwise, do dereference invisible reference parms. */
1114 if (is_invisiref_parm (stmt))
1116 *stmt_p = convert_from_reference (stmt);
1121 /* Map block scope extern declarations to visible declarations with the
1122 same name and type in outer scopes if any. */
1123 if (cp_function_chain->extern_decl_map
1124 && VAR_OR_FUNCTION_DECL_P (stmt)
1125 && DECL_EXTERNAL (stmt))
1127 struct cxx_int_tree_map *h, in;
1128 in.uid = DECL_UID (stmt);
1129 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1138 /* Other than invisiref parms, don't walk the same tree twice. */
1139 if (p_set->contains (stmt))
1145 if (TREE_CODE (stmt) == ADDR_EXPR
1146 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1148 /* If in an OpenMP context, note var uses. */
1149 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1150 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1151 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1152 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1155 else if (TREE_CODE (stmt) == RETURN_EXPR
1156 && TREE_OPERAND (stmt, 0)
1157 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1158 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1160 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1161 switch (OMP_CLAUSE_CODE (stmt))
1163 case OMP_CLAUSE_LASTPRIVATE:
1164 /* Don't dereference an invisiref in OpenMP clauses. */
1165 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1168 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1169 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1170 cp_genericize_r, data, NULL);
1173 case OMP_CLAUSE_PRIVATE:
1174 /* Don't dereference an invisiref in OpenMP clauses. */
1175 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1177 else if (wtd->omp_ctx != NULL)
1179 /* Private clause doesn't cause any references to the
1180 var in outer contexts, avoid calling
1181 omp_cxx_notice_variable for it. */
1182 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1183 wtd->omp_ctx = NULL;
1184 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1190 case OMP_CLAUSE_SHARED:
1191 case OMP_CLAUSE_FIRSTPRIVATE:
1192 case OMP_CLAUSE_COPYIN:
1193 case OMP_CLAUSE_COPYPRIVATE:
1194 /* Don't dereference an invisiref in OpenMP clauses. */
1195 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1198 case OMP_CLAUSE_REDUCTION:
1199 /* Don't dereference an invisiref in reduction clause's
1200 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1201 still needs to be genericized. */
1202 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1205 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1206 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1207 cp_genericize_r, data, NULL);
1208 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1209 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1210 cp_genericize_r, data, NULL);
1216 else if (IS_TYPE_OR_DECL_P (stmt))
1219 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1220 to lower this construct before scanning it, so we need to lower these
1221 before doing anything else. */
1222 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1223 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1224 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1227 CLEANUP_BODY (stmt),
1228 CLEANUP_EXPR (stmt));
1230 else if (TREE_CODE (stmt) == IF_STMT)
1232 genericize_if_stmt (stmt_p);
1233 /* *stmt_p has changed, tail recurse to handle it again. */
1234 return cp_genericize_r (stmt_p, walk_subtrees, data);
1237 /* COND_EXPR might have incompatible types in branches if one or both
1238 arms are bitfields. Fix it up now. */
1239 else if (TREE_CODE (stmt) == COND_EXPR)
1242 = (TREE_OPERAND (stmt, 1)
1243 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1246 = (TREE_OPERAND (stmt, 2)
1247 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1250 && !useless_type_conversion_p (TREE_TYPE (stmt),
1251 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1253 TREE_OPERAND (stmt, 1)
1254 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1255 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1259 && !useless_type_conversion_p (TREE_TYPE (stmt),
1260 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1262 TREE_OPERAND (stmt, 2)
1263 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1264 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1269 else if (TREE_CODE (stmt) == BIND_EXPR)
1271 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1274 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1276 && !DECL_EXTERNAL (decl)
1277 && omp_var_to_track (decl))
1280 = splay_tree_lookup (wtd->omp_ctx->variables,
1281 (splay_tree_key) decl);
1283 splay_tree_insert (wtd->omp_ctx->variables,
1284 (splay_tree_key) decl,
1286 ? OMP_CLAUSE_DEFAULT_SHARED
1287 : OMP_CLAUSE_DEFAULT_PRIVATE);
1291 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1293 /* The point here is to not sanitize static initializers. */
1294 bool no_sanitize_p = wtd->no_sanitize_p;
1295 wtd->no_sanitize_p = true;
1296 for (tree decl = BIND_EXPR_VARS (stmt);
1298 decl = DECL_CHAIN (decl))
1300 && TREE_STATIC (decl)
1301 && DECL_INITIAL (decl))
1302 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1303 wtd->no_sanitize_p = no_sanitize_p;
1305 wtd->bind_expr_stack.safe_push (stmt);
1306 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1307 cp_genericize_r, data, NULL);
1308 wtd->bind_expr_stack.pop ();
1311 else if (TREE_CODE (stmt) == USING_STMT)
1313 tree block = NULL_TREE;
1315 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1316 BLOCK, and append an IMPORTED_DECL to its
1317 BLOCK_VARS chained list. */
1318 if (wtd->bind_expr_stack.exists ())
1321 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1322 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1327 tree using_directive;
1328 gcc_assert (TREE_OPERAND (stmt, 0));
1330 using_directive = make_node (IMPORTED_DECL);
1331 TREE_TYPE (using_directive) = void_type_node;
1333 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1334 = TREE_OPERAND (stmt, 0);
1335 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1336 BLOCK_VARS (block) = using_directive;
1338 /* The USING_STMT won't appear in GENERIC. */
1339 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1343 else if (TREE_CODE (stmt) == DECL_EXPR
1344 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1346 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1347 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1350 else if (TREE_CODE (stmt) == DECL_EXPR)
1352 tree d = DECL_EXPR_DECL (stmt);
1353 if (TREE_CODE (d) == VAR_DECL)
1354 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1356 else if (TREE_CODE (stmt) == OMP_PARALLEL
1357 || TREE_CODE (stmt) == OMP_TASK
1358 || TREE_CODE (stmt) == OMP_TASKLOOP)
1360 struct cp_genericize_omp_taskreg omp_ctx;
1365 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1366 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1367 omp_ctx.default_shared = omp_ctx.is_parallel;
1368 omp_ctx.outer = wtd->omp_ctx;
1369 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1370 wtd->omp_ctx = &omp_ctx;
1371 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1372 switch (OMP_CLAUSE_CODE (c))
1374 case OMP_CLAUSE_SHARED:
1375 case OMP_CLAUSE_PRIVATE:
1376 case OMP_CLAUSE_FIRSTPRIVATE:
1377 case OMP_CLAUSE_LASTPRIVATE:
1378 decl = OMP_CLAUSE_DECL (c);
1379 if (decl == error_mark_node || !omp_var_to_track (decl))
1381 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1384 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1385 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1386 ? OMP_CLAUSE_DEFAULT_SHARED
1387 : OMP_CLAUSE_DEFAULT_PRIVATE);
1388 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1390 omp_cxx_notice_variable (omp_ctx.outer, decl);
1392 case OMP_CLAUSE_DEFAULT:
1393 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1394 omp_ctx.default_shared = true;
1398 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1399 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1401 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1402 wtd->omp_ctx = omp_ctx.outer;
1403 splay_tree_delete (omp_ctx.variables);
1405 else if (TREE_CODE (stmt) == TRY_BLOCK)
1408 tree try_block = wtd->try_block;
1409 wtd->try_block = stmt;
1410 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1411 wtd->try_block = try_block;
1412 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1414 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1416 /* MUST_NOT_THROW_COND might be something else with TM. */
1417 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1420 tree try_block = wtd->try_block;
1421 wtd->try_block = stmt;
1422 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1423 wtd->try_block = try_block;
1426 else if (TREE_CODE (stmt) == THROW_EXPR)
1428 location_t loc = location_of (stmt);
1429 if (TREE_NO_WARNING (stmt))
1431 else if (wtd->try_block)
1433 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1434 && warning_at (loc, OPT_Wterminate,
1435 "throw will always call terminate()")
1436 && cxx_dialect >= cxx11
1437 && DECL_DESTRUCTOR_P (current_function_decl))
1438 inform (loc, "in C++11 destructors default to noexcept");
1442 if (warn_cxx11_compat && cxx_dialect < cxx11
1443 && DECL_DESTRUCTOR_P (current_function_decl)
1444 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1446 && (get_defaulted_eh_spec (current_function_decl)
1447 == empty_except_spec))
1448 warning_at (loc, OPT_Wc__11_compat,
1449 "in C++11 this throw will terminate because "
1450 "destructors default to noexcept");
1453 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1454 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1455 else if (TREE_CODE (stmt) == FOR_STMT)
1456 genericize_for_stmt (stmt_p, walk_subtrees, data);
1457 else if (TREE_CODE (stmt) == WHILE_STMT)
1458 genericize_while_stmt (stmt_p, walk_subtrees, data);
1459 else if (TREE_CODE (stmt) == DO_STMT)
1460 genericize_do_stmt (stmt_p, walk_subtrees, data);
1461 else if (TREE_CODE (stmt) == SWITCH_STMT)
1462 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1463 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1464 genericize_continue_stmt (stmt_p);
1465 else if (TREE_CODE (stmt) == BREAK_STMT)
1466 genericize_break_stmt (stmt_p);
1467 else if (TREE_CODE (stmt) == OMP_FOR
1468 || TREE_CODE (stmt) == OMP_SIMD
1469 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1470 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1471 else if (TREE_CODE (stmt) == PTRMEM_CST)
1473 /* By the time we get here we're handing off to the back end, so we don't
1474 need or want to preserve PTRMEM_CST anymore. */
1475 *stmt_p = cplus_expand_constant (stmt);
1478 else if ((flag_sanitize
1479 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1480 && !wtd->no_sanitize_p)
1482 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1483 && TREE_CODE (stmt) == NOP_EXPR
1484 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1485 ubsan_maybe_instrument_reference (stmt);
1486 else if (TREE_CODE (stmt) == CALL_EXPR)
1488 tree fn = CALL_EXPR_FN (stmt);
1490 && !error_operand_p (fn)
1491 && POINTER_TYPE_P (TREE_TYPE (fn))
1492 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1495 = TREE_CODE (fn) == ADDR_EXPR
1496 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1497 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1498 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1499 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1500 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1501 cp_ubsan_maybe_instrument_member_call (stmt);
1506 p_set->add (*stmt_p);
1511 /* Lower C++ front end trees to GENERIC in T_P. */
1514 cp_genericize_tree (tree* t_p)
1516 struct cp_genericize_data wtd;
1518 wtd.p_set = new hash_set<tree>;
1519 wtd.bind_expr_stack.create (0);
1521 wtd.try_block = NULL_TREE;
1522 wtd.no_sanitize_p = false;
1523 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1525 wtd.bind_expr_stack.release ();
1526 if (flag_sanitize & SANITIZE_VPTR)
1527 cp_ubsan_instrument_member_accesses (t_p);
1530 /* If a function that should end with a return in non-void
1531 function doesn't obviously end with return, add ubsan
1532 instrumentation code to verify it at runtime. */
1535 cp_ubsan_maybe_instrument_return (tree fndecl)
1537 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1538 || DECL_CONSTRUCTOR_P (fndecl)
1539 || DECL_DESTRUCTOR_P (fndecl)
1540 || !targetm.warn_func_return (fndecl))
1543 tree t = DECL_SAVED_TREE (fndecl);
1546 switch (TREE_CODE (t))
1549 t = BIND_EXPR_BODY (t);
1551 case TRY_FINALLY_EXPR:
1552 t = TREE_OPERAND (t, 0);
1554 case STATEMENT_LIST:
1556 tree_stmt_iterator i = tsi_last (t);
1573 t = DECL_SAVED_TREE (fndecl);
1574 if (TREE_CODE (t) == BIND_EXPR
1575 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1577 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1578 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1579 tsi_link_after (&i, t, TSI_NEW_STMT);
1584 cp_genericize (tree fndecl)
1588 /* Fix up the types of parms passed by invisible reference. */
1589 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1590 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1592 /* If a function's arguments are copied to create a thunk,
1593 then DECL_BY_REFERENCE will be set -- but the type of the
1594 argument will be a pointer type, so we will never get
1596 gcc_assert (!DECL_BY_REFERENCE (t));
1597 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1598 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1599 DECL_BY_REFERENCE (t) = 1;
1600 TREE_ADDRESSABLE (t) = 0;
1604 /* Do the same for the return value. */
1605 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1607 t = DECL_RESULT (fndecl);
1608 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1609 DECL_BY_REFERENCE (t) = 1;
1610 TREE_ADDRESSABLE (t) = 0;
1614 /* Adjust DECL_VALUE_EXPR of the original var. */
1615 tree outer = outer_curly_brace_block (current_function_decl);
1619 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1620 if (DECL_NAME (t) == DECL_NAME (var)
1621 && DECL_HAS_VALUE_EXPR_P (var)
1622 && DECL_VALUE_EXPR (var) == t)
1624 tree val = convert_from_reference (t);
1625 SET_DECL_VALUE_EXPR (var, val);
1631 /* If we're a clone, the body is already GIMPLE. */
1632 if (DECL_CLONED_FUNCTION_P (fndecl))
1635 /* Allow cp_genericize calls to be nested. */
1636 tree save_bc_label[2];
1637 save_bc_label[bc_break] = bc_label[bc_break];
1638 save_bc_label[bc_continue] = bc_label[bc_continue];
1639 bc_label[bc_break] = NULL_TREE;
1640 bc_label[bc_continue] = NULL_TREE;
1642 /* Expand all the array notations here. */
1644 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1645 DECL_SAVED_TREE (fndecl) =
1646 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1648 /* We do want to see every occurrence of the parms, so we can't just use
1649 walk_tree's hash functionality. */
1650 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1652 if (flag_sanitize & SANITIZE_RETURN
1653 && do_ubsan_in_current_function ())
1654 cp_ubsan_maybe_instrument_return (fndecl);
1656 /* Do everything else. */
1657 c_genericize (fndecl);
1659 gcc_assert (bc_label[bc_break] == NULL);
1660 gcc_assert (bc_label[bc_continue] == NULL);
1661 bc_label[bc_break] = save_bc_label[bc_break];
1662 bc_label[bc_continue] = save_bc_label[bc_continue];
1665 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1666 NULL if there is in fact nothing to do. ARG2 may be null if FN
1667 actually only takes one argument. */
1670 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1672 tree defparm, parm, t;
1680 nargs = list_length (DECL_ARGUMENTS (fn));
1681 argarray = XALLOCAVEC (tree, nargs);
1683 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1685 defparm = TREE_CHAIN (defparm);
1687 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1689 tree inner_type = TREE_TYPE (arg1);
1690 tree start1, end1, p1;
1691 tree start2 = NULL, p2 = NULL;
1692 tree ret = NULL, lab;
1698 inner_type = TREE_TYPE (inner_type);
1699 start1 = build4 (ARRAY_REF, inner_type, start1,
1700 size_zero_node, NULL, NULL);
1702 start2 = build4 (ARRAY_REF, inner_type, start2,
1703 size_zero_node, NULL, NULL);
1705 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1706 start1 = build_fold_addr_expr_loc (input_location, start1);
1708 start2 = build_fold_addr_expr_loc (input_location, start2);
1710 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1711 end1 = fold_build_pointer_plus (start1, end1);
1713 p1 = create_tmp_var (TREE_TYPE (start1));
1714 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1715 append_to_statement_list (t, &ret);
1719 p2 = create_tmp_var (TREE_TYPE (start2));
1720 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1721 append_to_statement_list (t, &ret);
1724 lab = create_artificial_label (input_location);
1725 t = build1 (LABEL_EXPR, void_type_node, lab);
1726 append_to_statement_list (t, &ret);
1731 /* Handle default arguments. */
1732 for (parm = defparm; parm && parm != void_list_node;
1733 parm = TREE_CHAIN (parm), i++)
1734 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1735 TREE_PURPOSE (parm), fn, i,
1736 tf_warning_or_error);
1737 t = build_call_a (fn, i, argarray);
1738 t = fold_convert (void_type_node, t);
1739 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1740 append_to_statement_list (t, &ret);
1742 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1743 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1744 append_to_statement_list (t, &ret);
1748 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1749 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1750 append_to_statement_list (t, &ret);
1753 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1754 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1755 append_to_statement_list (t, &ret);
1761 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1763 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1764 /* Handle default arguments. */
1765 for (parm = defparm; parm && parm != void_list_node;
1766 parm = TREE_CHAIN (parm), i++)
1767 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1768 TREE_PURPOSE (parm),
1769 fn, i, tf_warning_or_error);
1770 t = build_call_a (fn, i, argarray);
1771 t = fold_convert (void_type_node, t);
1772 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1776 /* Return code to initialize DECL with its default constructor, or
1777 NULL if there's nothing to do. */
1780 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1782 tree info = CP_OMP_CLAUSE_INFO (clause);
1786 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1791 /* Return code to initialize DST with a copy constructor from SRC. */
1794 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1796 tree info = CP_OMP_CLAUSE_INFO (clause);
1800 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1802 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1807 /* Similarly, except use an assignment operator instead. */
1810 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1812 tree info = CP_OMP_CLAUSE_INFO (clause);
1816 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1818 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1823 /* Return code to destroy DECL. */
1826 cxx_omp_clause_dtor (tree clause, tree decl)
1828 tree info = CP_OMP_CLAUSE_INFO (clause);
1832 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1837 /* True if OpenMP should privatize what this DECL points to rather
1838 than the DECL itself. */
1841 cxx_omp_privatize_by_reference (const_tree decl)
1843 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1844 || is_invisiref_parm (decl));
1847 /* Return true if DECL is const qualified var having no mutable member. */
1849 cxx_omp_const_qual_no_mutable (tree decl)
1851 tree type = TREE_TYPE (decl);
1852 if (TREE_CODE (type) == REFERENCE_TYPE)
1854 if (!is_invisiref_parm (decl))
1856 type = TREE_TYPE (type);
1858 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1860 /* NVR doesn't preserve const qualification of the
1862 tree outer = outer_curly_brace_block (current_function_decl);
1866 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1867 if (DECL_NAME (decl) == DECL_NAME (var)
1868 && (TYPE_MAIN_VARIANT (type)
1869 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1871 if (TYPE_READONLY (TREE_TYPE (var)))
1872 type = TREE_TYPE (var);
1878 if (type == error_mark_node)
1881 /* Variables with const-qualified type having no mutable member
1882 are predetermined shared. */
1883 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1889 /* True if OpenMP sharing attribute of DECL is predetermined. */
1891 enum omp_clause_default_kind
1892 cxx_omp_predetermined_sharing (tree decl)
1894 /* Static data members are predetermined shared. */
1895 if (TREE_STATIC (decl))
1897 tree ctx = CP_DECL_CONTEXT (decl);
1898 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1899 return OMP_CLAUSE_DEFAULT_SHARED;
1902 /* Const qualified vars having no mutable member are predetermined
1904 if (cxx_omp_const_qual_no_mutable (decl))
1905 return OMP_CLAUSE_DEFAULT_SHARED;
1907 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1910 /* Finalize an implicitly determined clause. */
1913 cxx_omp_finish_clause (tree c, gimple_seq *)
1915 tree decl, inner_type;
1916 bool make_shared = false;
1918 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1921 decl = OMP_CLAUSE_DECL (c);
1922 decl = require_complete_type (decl);
1923 inner_type = TREE_TYPE (decl);
1924 if (decl == error_mark_node)
1926 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1927 inner_type = TREE_TYPE (inner_type);
1929 /* We're interested in the base element, not arrays. */
1930 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1931 inner_type = TREE_TYPE (inner_type);
1933 /* Check for special function availability by building a call to one.
1934 Save the results, because later we won't be in the right context
1935 for making these queries. */
1937 && CLASS_TYPE_P (inner_type)
1938 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1942 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1945 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1946 disregarded in OpenMP construct, because it is going to be
1947 remapped during OpenMP lowering. SHARED is true if DECL
1948 is going to be shared, false if it is going to be privatized. */
1951 cxx_omp_disregard_value_expr (tree decl, bool shared)
1955 && DECL_HAS_VALUE_EXPR_P (decl)
1956 && DECL_ARTIFICIAL (decl)
1957 && DECL_LANG_SPECIFIC (decl)
1958 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1961 /* Perform folding on expression X. */
1964 cp_fully_fold (tree x)
1966 if (processing_template_decl)
1968 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
1969 have to call both. */
1970 if (cxx_dialect >= cxx11)
1971 x = maybe_constant_value (x);
1975 /* Fold expression X which is used as an rvalue if RVAL is true. */
1978 cp_fold_maybe_rvalue (tree x, bool rval)
1983 if (rval && DECL_P (x))
1985 tree v = decl_constant_value (x);
1986 if (v != x && v != error_mark_node)
1997 /* Fold expression X which is used as an rvalue. */
2000 cp_fold_rvalue (tree x)
2002 return cp_fold_maybe_rvalue (x, true);
2005 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2006 and certain changes are made to the folding done. Or should be (FIXME). We
2007 never touch maybe_const, as it is only used for the C front-end
2008 C_MAYBE_CONST_EXPR. */
2011 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2013 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2015 return cp_fold_rvalue (x);
2018 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2020 /* Dispose of the whole FOLD_CACHE. */
2023 clear_fold_cache (void)
2025 if (fold_cache != NULL)
2026 fold_cache->empty ();
2029 /* This function tries to fold an expression X.
2030 To avoid combinatorial explosion, folding results are kept in fold_cache.
2031 If we are processing a template or X is invalid, we don't fold at all.
2032 For performance reasons we don't cache expressions representing a
2033 declaration or constant.
2034 Function returns X or its folded variant. */
2039 tree op0, op1, op2, op3;
2040 tree org_x = x, r = NULL_TREE;
2041 enum tree_code code;
2043 bool rval_ops = true;
2045 if (!x || x == error_mark_node)
2048 if (processing_template_decl
2049 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2052 /* Don't bother to cache DECLs or constants. */
2053 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2056 if (fold_cache == NULL)
2057 fold_cache = hash_map<tree, tree>::create_ggc (101);
2059 if (tree *cached = fold_cache->get (x))
2062 code = TREE_CODE (x);
2066 x = fold_sizeof_expr (x);
2069 case VIEW_CONVERT_EXPR:
2074 case NON_LVALUE_EXPR:
2076 if (VOID_TYPE_P (TREE_TYPE (x)))
2079 loc = EXPR_LOCATION (x);
2080 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2082 if (code == CONVERT_EXPR
2083 && SCALAR_TYPE_P (TREE_TYPE (x))
2084 && op0 != void_node)
2085 /* During parsing we used convert_to_*_nofold; re-convert now using the
2086 folding variants, since fold() doesn't do those transformations. */
2087 x = fold (convert (TREE_TYPE (x), op0));
2088 else if (op0 != TREE_OPERAND (x, 0))
2090 if (op0 == error_mark_node)
2091 x = error_mark_node;
2093 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2098 /* Conversion of an out-of-range value has implementation-defined
2099 behavior; the language considers it different from arithmetic
2100 overflow, which is undefined. */
2101 if (TREE_CODE (op0) == INTEGER_CST
2102 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2103 TREE_OVERFLOW (x) = false;
2108 /* We don't need the decltype(auto) obfuscation anymore. */
2109 if (REF_PARENTHESIZED_P (x))
2111 tree p = maybe_undo_parenthesized_ref (x);
2122 case FIX_TRUNC_EXPR:
2127 case TRUTH_NOT_EXPR:
2128 case FIXED_CONVERT_EXPR:
2131 loc = EXPR_LOCATION (x);
2132 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2134 if (op0 != TREE_OPERAND (x, 0))
2136 if (op0 == error_mark_node)
2137 x = error_mark_node;
2140 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2141 if (code == INDIRECT_REF
2142 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2144 TREE_READONLY (x) = TREE_READONLY (org_x);
2145 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2146 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2153 gcc_assert (TREE_CODE (x) != COND_EXPR
2154 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2157 case UNARY_PLUS_EXPR:
2158 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2159 if (op0 == error_mark_node)
2160 x = error_mark_node;
2162 x = fold_convert (TREE_TYPE (x), op0);
2165 case POSTDECREMENT_EXPR:
2166 case POSTINCREMENT_EXPR:
2168 case PREDECREMENT_EXPR:
2169 case PREINCREMENT_EXPR:
2174 case POINTER_PLUS_EXPR:
2178 case TRUNC_DIV_EXPR:
2180 case FLOOR_DIV_EXPR:
2181 case ROUND_DIV_EXPR:
2182 case TRUNC_MOD_EXPR:
2184 case ROUND_MOD_EXPR:
2186 case EXACT_DIV_EXPR:
2196 case TRUTH_AND_EXPR:
2197 case TRUTH_ANDIF_EXPR:
2199 case TRUTH_ORIF_EXPR:
2200 case TRUTH_XOR_EXPR:
2201 case LT_EXPR: case LE_EXPR:
2202 case GT_EXPR: case GE_EXPR:
2203 case EQ_EXPR: case NE_EXPR:
2204 case UNORDERED_EXPR: case ORDERED_EXPR:
2205 case UNLT_EXPR: case UNLE_EXPR:
2206 case UNGT_EXPR: case UNGE_EXPR:
2207 case UNEQ_EXPR: case LTGT_EXPR:
2208 case RANGE_EXPR: case COMPLEX_EXPR:
2210 loc = EXPR_LOCATION (x);
2211 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2212 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2214 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2216 if (op0 == error_mark_node || op1 == error_mark_node)
2217 x = error_mark_node;
2219 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2224 if (TREE_NO_WARNING (org_x)
2225 && warn_nonnull_compare
2226 && COMPARISON_CLASS_P (org_x))
2228 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2230 else if (COMPARISON_CLASS_P (x))
2231 TREE_NO_WARNING (x) = 1;
2232 /* Otherwise give up on optimizing these, let GIMPLE folders
2233 optimize those later on. */
2234 else if (op0 != TREE_OPERAND (org_x, 0)
2235 || op1 != TREE_OPERAND (org_x, 1))
2237 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2238 TREE_NO_WARNING (x) = 1;
2248 /* Don't bother folding a void condition, since it can't produce a
2249 constant value. Also, some statement-level uses of COND_EXPR leave
2250 one of the branches NULL, so folding would crash. */
2251 if (VOID_TYPE_P (TREE_TYPE (x)))
2254 loc = EXPR_LOCATION (x);
2255 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2256 op1 = cp_fold (TREE_OPERAND (x, 1));
2257 op2 = cp_fold (TREE_OPERAND (x, 2));
2259 if (op0 != TREE_OPERAND (x, 0)
2260 || op1 != TREE_OPERAND (x, 1)
2261 || op2 != TREE_OPERAND (x, 2))
2263 if (op0 == error_mark_node
2264 || op1 == error_mark_node
2265 || op2 == error_mark_node)
2266 x = error_mark_node;
2268 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2273 /* A COND_EXPR might have incompatible types in branches if one or both
2274 arms are bitfields. If folding exposed such a branch, fix it up. */
2275 if (TREE_CODE (x) != code)
2276 if (tree type = is_bitfield_expr_with_lowered_type (x))
2277 x = fold_convert (type, x);
2283 int i, m, sv = optimize, nw = sv, changed = 0;
2284 tree callee = get_callee_fndecl (x);
2286 /* Some built-in function calls will be evaluated at compile-time in
2287 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2288 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2289 if (callee && DECL_BUILT_IN (callee) && !optimize
2290 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2291 && current_function_decl
2292 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2297 m = call_expr_nargs (x);
2298 for (i = 0; i < m; i++)
2300 r = cp_fold (CALL_EXPR_ARG (x, i));
2301 if (r != CALL_EXPR_ARG (x, i))
2303 if (r == error_mark_node)
2305 x = error_mark_node;
2310 CALL_EXPR_ARG (x, i) = r;
2312 if (x == error_mark_node)
2319 if (TREE_CODE (r) != CALL_EXPR)
2327 /* Invoke maybe_constant_value for functions declared
2328 constexpr and not called with AGGR_INIT_EXPRs.
2330 Do constexpr expansion of expressions where the call itself is not
2331 constant, but the call followed by an INDIRECT_REF is. */
2332 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2334 r = maybe_constant_value (x);
2337 if (TREE_CODE (r) != CALL_EXPR)
2352 bool changed = false;
2353 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2354 vec<constructor_elt, va_gc> *nelts = NULL;
2355 vec_safe_reserve (nelts, vec_safe_length (elts));
2356 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2358 tree op = cp_fold (p->value);
2359 constructor_elt e = { p->index, op };
2360 nelts->quick_push (e);
2363 if (op == error_mark_node)
2365 x = error_mark_node;
2373 x = build_constructor (TREE_TYPE (x), nelts);
2380 bool changed = false;
2381 vec<tree, va_gc> *vec = make_tree_vector ();
2382 int i, n = TREE_VEC_LENGTH (x);
2383 vec_safe_reserve (vec, n);
2385 for (i = 0; i < n; i++)
2387 tree op = cp_fold (TREE_VEC_ELT (x, i));
2388 vec->quick_push (op);
2389 if (op != TREE_VEC_ELT (x, i))
2396 for (i = 0; i < n; i++)
2397 TREE_VEC_ELT (r, i) = (*vec)[i];
2401 release_tree_vector (vec);
2407 case ARRAY_RANGE_REF:
2409 loc = EXPR_LOCATION (x);
2410 op0 = cp_fold (TREE_OPERAND (x, 0));
2411 op1 = cp_fold (TREE_OPERAND (x, 1));
2412 op2 = cp_fold (TREE_OPERAND (x, 2));
2413 op3 = cp_fold (TREE_OPERAND (x, 3));
2415 if (op0 != TREE_OPERAND (x, 0)
2416 || op1 != TREE_OPERAND (x, 1)
2417 || op2 != TREE_OPERAND (x, 2)
2418 || op3 != TREE_OPERAND (x, 3))
2420 if (op0 == error_mark_node
2421 || op1 == error_mark_node
2422 || op2 == error_mark_node
2423 || op3 == error_mark_node)
2424 x = error_mark_node;
2427 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2428 TREE_READONLY (x) = TREE_READONLY (org_x);
2429 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2430 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2441 fold_cache->put (org_x, x);
2442 /* Prevent that we try to fold an already folded result again. */
2444 fold_cache->put (x, x);
2449 #include "gt-cp-cp-gimplify.h"