1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "stor-layout.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
36 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
47 #include "splay-tree.h"
49 #include "c-family/c-ubsan.h"
52 /* Forward declarations. */
54 static tree cp_genericize_r (tree *, int *, void *);
55 static void cp_genericize_tree (tree*);
57 /* Local declarations. */
59 enum bc_t { bc_break = 0, bc_continue = 1 };
61 /* Stack of labels which are targets for "break" or "continue",
62 linked through TREE_CHAIN. */
63 static tree bc_label[2];
65 /* Begin a scope which can be exited by a break or continue statement. BC
68 Just creates a label with location LOCATION and pushes it into the current
72 begin_bc_block (enum bc_t bc, location_t location)
74 tree label = create_artificial_label (location);
75 DECL_CHAIN (label) = bc_label[bc];
80 /* Finish a scope which can be exited by a break or continue statement.
81 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
82 an expression for the contents of the scope.
84 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
85 BLOCK. Otherwise, just forget the label. */
88 finish_bc_block (tree *block, enum bc_t bc, tree label)
90 gcc_assert (label == bc_label[bc]);
92 if (TREE_USED (label))
93 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
96 bc_label[bc] = DECL_CHAIN (label);
97 DECL_CHAIN (label) = NULL_TREE;
100 /* Get the LABEL_EXPR to represent a break or continue statement
101 in the current block scope. BC indicates which. */
104 get_bc_label (enum bc_t bc)
106 tree label = bc_label[bc];
108 /* Mark the label used for finish_bc_block. */
109 TREE_USED (label) = 1;
113 /* Genericize a TRY_BLOCK. */
116 genericize_try_block (tree *stmt_p)
118 tree body = TRY_STMTS (*stmt_p);
119 tree cleanup = TRY_HANDLERS (*stmt_p);
121 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
124 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
127 genericize_catch_block (tree *stmt_p)
129 tree type = HANDLER_TYPE (*stmt_p);
130 tree body = HANDLER_BODY (*stmt_p);
132 /* FIXME should the caught type go in TREE_TYPE? */
133 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
136 /* A terser interface for building a representation of an exception
140 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
144 /* FIXME should the allowed types go in TREE_TYPE? */
145 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
146 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
148 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
149 append_to_statement_list (body, &TREE_OPERAND (t, 0));
154 /* Genericize an EH_SPEC_BLOCK by converting it to a
155 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
158 genericize_eh_spec_block (tree *stmt_p)
160 tree body = EH_SPEC_STMTS (*stmt_p);
161 tree allowed = EH_SPEC_RAISES (*stmt_p);
162 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
164 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
165 TREE_NO_WARNING (*stmt_p) = true;
166 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
169 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
172 genericize_if_stmt (tree *stmt_p)
174 tree stmt, cond, then_, else_;
175 location_t locus = EXPR_LOCATION (*stmt_p);
178 cond = IF_COND (stmt);
179 then_ = THEN_CLAUSE (stmt);
180 else_ = ELSE_CLAUSE (stmt);
183 then_ = build_empty_stmt (locus);
185 else_ = build_empty_stmt (locus);
187 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
189 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
193 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
194 SET_EXPR_LOCATION (stmt, locus);
198 /* Build a generic representation of one of the C loop forms. COND is the
199 loop condition or NULL_TREE. BODY is the (possibly compound) statement
200 controlled by the loop. INCR is the increment expression of a for-loop,
201 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
202 evaluated before the loop body as in while and for loops, or after the
203 loop body as in do-while loops. */
206 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
207 tree incr, bool cond_is_first, int *walk_subtrees,
212 tree stmt_list = NULL;
214 blab = begin_bc_block (bc_break, start_locus);
215 clab = begin_bc_block (bc_continue, start_locus);
217 if (incr && EXPR_P (incr))
218 SET_EXPR_LOCATION (incr, start_locus);
220 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
221 cp_walk_tree (&body, cp_genericize_r, data, NULL);
222 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
225 if (cond && TREE_CODE (cond) != INTEGER_CST)
227 /* If COND is constant, don't bother building an exit. If it's false,
228 we won't build a loop. If it's true, any exits are in the body. */
229 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
230 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
231 get_bc_label (bc_break));
232 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
233 build_empty_stmt (cloc), exit);
236 if (exit && cond_is_first)
237 append_to_statement_list (exit, &stmt_list);
238 append_to_statement_list (body, &stmt_list);
239 finish_bc_block (&stmt_list, bc_continue, clab);
240 append_to_statement_list (incr, &stmt_list);
241 if (exit && !cond_is_first)
242 append_to_statement_list (exit, &stmt_list);
245 stmt_list = build_empty_stmt (start_locus);
248 if (cond && integer_zerop (cond))
251 loop = fold_build3_loc (start_locus, COND_EXPR,
252 void_type_node, cond, stmt_list,
253 build_empty_stmt (start_locus));
258 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
261 append_to_statement_list (loop, &stmt_list);
262 finish_bc_block (&stmt_list, bc_break, blab);
264 stmt_list = build_empty_stmt (start_locus);
269 /* Genericize a FOR_STMT node *STMT_P. */
272 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
277 tree init = FOR_INIT_STMT (stmt);
281 cp_walk_tree (&init, cp_genericize_r, data, NULL);
282 append_to_statement_list (init, &expr);
285 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
286 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
287 append_to_statement_list (loop, &expr);
288 if (expr == NULL_TREE)
293 /* Genericize a WHILE_STMT node *STMT_P. */
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
354 /* Genericize a BREAK_STMT node *STMT_P. */
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
384 /* Hook into the middle of gimplifying an OMP_FOR node. */
386 static enum gimplify_status
387 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
389 tree for_stmt = *expr_p;
390 gimple_seq seq = NULL;
392 /* Protect ourselves from recursion. */
393 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
397 gimplify_and_add (for_stmt, &seq);
398 gimple_seq_add_seq (pre_p, seq);
400 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
405 /* Gimplify an EXPR_STMT node. */
408 gimplify_expr_stmt (tree *stmt_p)
410 tree stmt = EXPR_STMT_EXPR (*stmt_p);
412 if (stmt == error_mark_node)
415 /* Gimplification of a statement expression will nullify the
416 statement if all its side effects are moved to *PRE_P and *POST_P.
418 In this case we will not want to emit the gimplified statement.
419 However, we may still want to emit a warning, so we do that before
421 if (stmt && warn_unused_value)
423 if (!TREE_SIDE_EFFECTS (stmt))
425 if (!IS_EMPTY_STMT (stmt)
426 && !VOID_TYPE_P (TREE_TYPE (stmt))
427 && !TREE_NO_WARNING (stmt))
428 warning (OPT_Wunused_value, "statement with no effect");
431 warn_if_unused_value (stmt, input_location);
434 if (stmt == NULL_TREE)
435 stmt = alloc_stmt_list ();
440 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 cp_gimplify_init_expr (tree *expr_p)
445 tree from = TREE_OPERAND (*expr_p, 1);
446 tree to = TREE_OPERAND (*expr_p, 0);
449 /* What about code that pulls out the temp and uses it elsewhere? I
450 think that such code never uses the TARGET_EXPR as an initializer. If
451 I'm wrong, we'll abort because the temp won't have any RTL. In that
452 case, I guess we'll need to replace references somehow. */
453 if (TREE_CODE (from) == TARGET_EXPR)
454 from = TARGET_EXPR_INITIAL (from);
456 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
457 inside the TARGET_EXPR. */
460 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
462 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
463 replace the slot operand with our target.
465 Should we add a target parm to gimplify_expr instead? No, as in this
466 case we want to replace the INIT_EXPR. */
467 if (TREE_CODE (sub) == AGGR_INIT_EXPR
468 || TREE_CODE (sub) == VEC_INIT_EXPR)
470 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
471 AGGR_INIT_EXPR_SLOT (sub) = to;
473 VEC_INIT_EXPR_SLOT (sub) = to;
476 /* The initialization is now a side-effect, so the container can
479 TREE_TYPE (from) = void_type_node;
482 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub, to);
489 t = TREE_OPERAND (t, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
500 tree temp = voidify_wrapper_expr (stmt, NULL);
501 tree body = TREE_OPERAND (stmt, 0);
502 gimple_seq try_ = NULL;
503 gimple_seq catch_ = NULL;
506 gimplify_and_add (body, &try_);
507 mnt = gimple_build_eh_must_not_throw (terminate_node);
508 gimple_seq_add_stmt_without_update (&catch_, mnt);
509 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
511 gimple_seq_add_stmt_without_update (pre_p, mnt);
522 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
525 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
527 int saved_stmts_are_full_exprs_p = 0;
528 enum tree_code code = TREE_CODE (*expr_p);
529 enum gimplify_status ret;
531 if (STATEMENT_CODE_P (code))
533 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
534 current_stmt_tree ()->stmts_are_full_exprs_p
535 = STMT_IS_FULL_EXPR_P (*expr_p);
541 *expr_p = cplus_expand_constant (*expr_p);
546 simplify_aggr_init_expr (expr_p);
552 location_t loc = input_location;
553 tree init = VEC_INIT_EXPR_INIT (*expr_p);
554 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
555 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
556 input_location = EXPR_LOCATION (*expr_p);
557 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
558 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
560 tf_warning_or_error);
561 cp_genericize_tree (expr_p);
563 input_location = loc;
568 /* FIXME communicate throw type to back end, probably by moving
569 THROW_EXPR into ../tree.def. */
570 *expr_p = TREE_OPERAND (*expr_p, 0);
574 case MUST_NOT_THROW_EXPR:
575 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
578 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
579 LHS of an assignment might also be involved in the RHS, as in bug
582 if (fn_contains_cilk_spawn_p (cfun)
583 && cilk_detect_spawn_and_unwrap (expr_p)
585 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
586 cp_gimplify_init_expr (expr_p);
587 if (TREE_CODE (*expr_p) != INIT_EXPR)
589 /* Otherwise fall through. */
592 if (fn_contains_cilk_spawn_p (cfun)
593 && cilk_detect_spawn_and_unwrap (expr_p)
595 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
597 /* If the back end isn't clever enough to know that the lhs and rhs
598 types are the same, add an explicit conversion. */
599 tree op0 = TREE_OPERAND (*expr_p, 0);
600 tree op1 = TREE_OPERAND (*expr_p, 1);
602 if (!error_operand_p (op0)
603 && !error_operand_p (op1)
604 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
605 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
606 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
607 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
608 TREE_TYPE (op0), op1);
610 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
611 || (TREE_CODE (op1) == CONSTRUCTOR
612 && CONSTRUCTOR_NELTS (op1) == 0
613 && !TREE_CLOBBER_P (op1))
614 || (TREE_CODE (op1) == CALL_EXPR
615 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
616 && is_really_empty_class (TREE_TYPE (op0)))
618 /* Remove any copies of empty classes. We check that the RHS
619 has a simple form so that TARGET_EXPRs and non-empty
620 CONSTRUCTORs get reduced properly, and we leave the return
621 slot optimization alone because it isn't a copy (FIXME so it
622 shouldn't be represented as one).
624 Also drop volatile variables on the RHS to avoid infinite
625 recursion from gimplify_expr trying to load the value. */
626 if (!TREE_SIDE_EFFECTS (op1))
628 else if (TREE_THIS_VOLATILE (op1)
629 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
630 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
631 build_fold_addr_expr (op1), op0);
633 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
640 case EMPTY_CLASS_EXPR:
641 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
642 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
647 *expr_p = BASELINK_FUNCTIONS (*expr_p);
652 genericize_try_block (expr_p);
657 genericize_catch_block (expr_p);
662 genericize_eh_spec_block (expr_p);
680 ret = cp_gimplify_omp_for (expr_p, pre_p);
684 gimplify_expr_stmt (expr_p);
688 case UNARY_PLUS_EXPR:
690 tree arg = TREE_OPERAND (*expr_p, 0);
691 tree type = TREE_TYPE (*expr_p);
692 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
698 case CILK_SPAWN_STMT:
700 (fn_contains_cilk_spawn_p (cfun)
701 && cilk_detect_spawn_and_unwrap (expr_p));
703 /* If errors are seen, then just process it as a CALL_EXPR. */
705 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
708 if (fn_contains_cilk_spawn_p (cfun)
709 && cilk_detect_spawn_and_unwrap (expr_p)
711 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
713 /* DR 1030 says that we need to evaluate the elements of an
714 initializer-list in forward order even when it's used as arguments to
715 a constructor. So if the target wants to evaluate them in reverse
716 order and there's more than one argument other than 'this', gimplify
719 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
720 && call_expr_nargs (*expr_p) > 2)
722 int nargs = call_expr_nargs (*expr_p);
723 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
724 for (int i = 1; i < nargs; ++i)
726 enum gimplify_status t
727 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
735 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
739 /* Restore saved state. */
740 if (STATEMENT_CODE_P (code))
741 current_stmt_tree ()->stmts_are_full_exprs_p
742 = saved_stmts_are_full_exprs_p;
748 is_invisiref_parm (const_tree t)
750 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
751 && DECL_BY_REFERENCE (t));
754 /* Return true if the uid in both int tree maps are equal. */
757 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
759 return (a->uid == b->uid);
762 /* Hash a UID in a cxx_int_tree_map. */
765 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
770 /* A stable comparison routine for use with splay trees and DECLs. */
773 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
778 return DECL_UID (a) - DECL_UID (b);
781 /* OpenMP context during genericization. */
783 struct cp_genericize_omp_taskreg
787 struct cp_genericize_omp_taskreg *outer;
788 splay_tree variables;
791 /* Return true if genericization should try to determine if
792 DECL is firstprivate or shared within task regions. */
795 omp_var_to_track (tree decl)
797 tree type = TREE_TYPE (decl);
798 if (is_invisiref_parm (decl))
799 type = TREE_TYPE (type);
800 while (TREE_CODE (type) == ARRAY_TYPE)
801 type = TREE_TYPE (type);
802 if (type == error_mark_node || !CLASS_TYPE_P (type))
804 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
806 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
811 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
814 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
816 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
817 (splay_tree_key) decl);
820 int flags = OMP_CLAUSE_DEFAULT_SHARED;
822 omp_cxx_notice_variable (omp_ctx->outer, decl);
823 if (!omp_ctx->default_shared)
825 struct cp_genericize_omp_taskreg *octx;
827 for (octx = omp_ctx->outer; octx; octx = octx->outer)
829 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
830 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
832 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
835 if (octx->is_parallel)
839 && (TREE_CODE (decl) == PARM_DECL
840 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
841 && DECL_CONTEXT (decl) == current_function_decl)))
842 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
843 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
845 /* DECL is implicitly determined firstprivate in
846 the current task construct. Ensure copy ctor and
847 dtor are instantiated, because during gimplification
848 it will be already too late. */
849 tree type = TREE_TYPE (decl);
850 if (is_invisiref_parm (decl))
851 type = TREE_TYPE (type);
852 while (TREE_CODE (type) == ARRAY_TYPE)
853 type = TREE_TYPE (type);
854 get_copy_ctor (type, tf_none);
855 get_dtor (type, tf_none);
858 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
862 /* Genericization context. */
864 struct cp_genericize_data
866 hash_set<tree> *p_set;
867 vec<tree> bind_expr_stack;
868 struct cp_genericize_omp_taskreg *omp_ctx;
871 /* Perform any pre-gimplification lowering of C++ front end trees to
875 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
878 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
879 hash_set<tree> *p_set = wtd->p_set;
881 /* If in an OpenMP context, note var uses. */
882 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
884 || TREE_CODE (stmt) == PARM_DECL
885 || TREE_CODE (stmt) == RESULT_DECL)
886 && omp_var_to_track (stmt))
887 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
889 if (is_invisiref_parm (stmt)
890 /* Don't dereference parms in a thunk, pass the references through. */
891 && !(DECL_THUNK_P (current_function_decl)
892 && TREE_CODE (stmt) == PARM_DECL))
894 *stmt_p = convert_from_reference (stmt);
899 /* Map block scope extern declarations to visible declarations with the
900 same name and type in outer scopes if any. */
901 if (cp_function_chain->extern_decl_map
902 && VAR_OR_FUNCTION_DECL_P (stmt)
903 && DECL_EXTERNAL (stmt))
905 struct cxx_int_tree_map *h, in;
906 in.uid = DECL_UID (stmt);
907 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
916 /* Other than invisiref parms, don't walk the same tree twice. */
917 if (p_set->contains (stmt))
923 if (TREE_CODE (stmt) == ADDR_EXPR
924 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
926 /* If in an OpenMP context, note var uses. */
927 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
928 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
929 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
930 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
933 else if (TREE_CODE (stmt) == RETURN_EXPR
934 && TREE_OPERAND (stmt, 0)
935 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
936 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
938 else if (TREE_CODE (stmt) == OMP_CLAUSE)
939 switch (OMP_CLAUSE_CODE (stmt))
941 case OMP_CLAUSE_LASTPRIVATE:
942 /* Don't dereference an invisiref in OpenMP clauses. */
943 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
946 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
947 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
948 cp_genericize_r, data, NULL);
951 case OMP_CLAUSE_PRIVATE:
952 /* Don't dereference an invisiref in OpenMP clauses. */
953 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
955 else if (wtd->omp_ctx != NULL)
957 /* Private clause doesn't cause any references to the
958 var in outer contexts, avoid calling
959 omp_cxx_notice_variable for it. */
960 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
962 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
968 case OMP_CLAUSE_SHARED:
969 case OMP_CLAUSE_FIRSTPRIVATE:
970 case OMP_CLAUSE_COPYIN:
971 case OMP_CLAUSE_COPYPRIVATE:
972 /* Don't dereference an invisiref in OpenMP clauses. */
973 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
976 case OMP_CLAUSE_REDUCTION:
977 /* Don't dereference an invisiref in reduction clause's
978 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
979 still needs to be genericized. */
980 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
983 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
984 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
985 cp_genericize_r, data, NULL);
986 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
987 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
988 cp_genericize_r, data, NULL);
994 else if (IS_TYPE_OR_DECL_P (stmt))
997 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
998 to lower this construct before scanning it, so we need to lower these
999 before doing anything else. */
1000 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1001 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1002 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1005 CLEANUP_BODY (stmt),
1006 CLEANUP_EXPR (stmt));
1008 else if (TREE_CODE (stmt) == IF_STMT)
1010 genericize_if_stmt (stmt_p);
1011 /* *stmt_p has changed, tail recurse to handle it again. */
1012 return cp_genericize_r (stmt_p, walk_subtrees, data);
1015 /* COND_EXPR might have incompatible types in branches if one or both
1016 arms are bitfields. Fix it up now. */
1017 else if (TREE_CODE (stmt) == COND_EXPR)
1020 = (TREE_OPERAND (stmt, 1)
1021 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1024 = (TREE_OPERAND (stmt, 2)
1025 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1028 && !useless_type_conversion_p (TREE_TYPE (stmt),
1029 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1031 TREE_OPERAND (stmt, 1)
1032 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1033 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1037 && !useless_type_conversion_p (TREE_TYPE (stmt),
1038 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1040 TREE_OPERAND (stmt, 2)
1041 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1042 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1047 else if (TREE_CODE (stmt) == BIND_EXPR)
1049 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1052 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1054 && !DECL_EXTERNAL (decl)
1055 && omp_var_to_track (decl))
1058 = splay_tree_lookup (wtd->omp_ctx->variables,
1059 (splay_tree_key) decl);
1061 splay_tree_insert (wtd->omp_ctx->variables,
1062 (splay_tree_key) decl,
1064 ? OMP_CLAUSE_DEFAULT_SHARED
1065 : OMP_CLAUSE_DEFAULT_PRIVATE);
1068 wtd->bind_expr_stack.safe_push (stmt);
1069 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1070 cp_genericize_r, data, NULL);
1071 wtd->bind_expr_stack.pop ();
1074 else if (TREE_CODE (stmt) == USING_STMT)
1076 tree block = NULL_TREE;
1078 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1079 BLOCK, and append an IMPORTED_DECL to its
1080 BLOCK_VARS chained list. */
1081 if (wtd->bind_expr_stack.exists ())
1084 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1085 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1090 tree using_directive;
1091 gcc_assert (TREE_OPERAND (stmt, 0));
1093 using_directive = make_node (IMPORTED_DECL);
1094 TREE_TYPE (using_directive) = void_type_node;
1096 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1097 = TREE_OPERAND (stmt, 0);
1098 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1099 BLOCK_VARS (block) = using_directive;
1101 /* The USING_STMT won't appear in GENERIC. */
1102 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1106 else if (TREE_CODE (stmt) == DECL_EXPR
1107 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1109 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1110 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1113 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1115 struct cp_genericize_omp_taskreg omp_ctx;
1120 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1121 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1122 omp_ctx.default_shared = omp_ctx.is_parallel;
1123 omp_ctx.outer = wtd->omp_ctx;
1124 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1125 wtd->omp_ctx = &omp_ctx;
1126 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1127 switch (OMP_CLAUSE_CODE (c))
1129 case OMP_CLAUSE_SHARED:
1130 case OMP_CLAUSE_PRIVATE:
1131 case OMP_CLAUSE_FIRSTPRIVATE:
1132 case OMP_CLAUSE_LASTPRIVATE:
1133 decl = OMP_CLAUSE_DECL (c);
1134 if (decl == error_mark_node || !omp_var_to_track (decl))
1136 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1139 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1140 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1141 ? OMP_CLAUSE_DEFAULT_SHARED
1142 : OMP_CLAUSE_DEFAULT_PRIVATE);
1143 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1145 omp_cxx_notice_variable (omp_ctx.outer, decl);
1147 case OMP_CLAUSE_DEFAULT:
1148 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1149 omp_ctx.default_shared = true;
1153 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1154 wtd->omp_ctx = omp_ctx.outer;
1155 splay_tree_delete (omp_ctx.variables);
1157 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1158 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1159 else if (TREE_CODE (stmt) == FOR_STMT)
1160 genericize_for_stmt (stmt_p, walk_subtrees, data);
1161 else if (TREE_CODE (stmt) == WHILE_STMT)
1162 genericize_while_stmt (stmt_p, walk_subtrees, data);
1163 else if (TREE_CODE (stmt) == DO_STMT)
1164 genericize_do_stmt (stmt_p, walk_subtrees, data);
1165 else if (TREE_CODE (stmt) == SWITCH_STMT)
1166 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1167 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1168 genericize_continue_stmt (stmt_p);
1169 else if (TREE_CODE (stmt) == BREAK_STMT)
1170 genericize_break_stmt (stmt_p);
1171 else if (TREE_CODE (stmt) == OMP_FOR
1172 || TREE_CODE (stmt) == OMP_SIMD
1173 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1174 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1175 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1177 if (SIZEOF_EXPR_TYPE_P (stmt))
1179 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1180 SIZEOF_EXPR, false);
1181 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1182 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1183 SIZEOF_EXPR, false);
1185 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1186 SIZEOF_EXPR, false);
1187 if (*stmt_p == error_mark_node)
1188 *stmt_p = size_one_node;
1191 else if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1193 if (TREE_CODE (stmt) == NOP_EXPR
1194 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1195 ubsan_maybe_instrument_reference (stmt);
1196 else if (TREE_CODE (stmt) == CALL_EXPR)
1198 tree fn = CALL_EXPR_FN (stmt);
1200 && !error_operand_p (fn)
1201 && POINTER_TYPE_P (TREE_TYPE (fn))
1202 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1205 = TREE_CODE (fn) == ADDR_EXPR
1206 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1207 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1208 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1213 p_set->add (*stmt_p);
1218 /* Lower C++ front end trees to GENERIC in T_P. */
1221 cp_genericize_tree (tree* t_p)
1223 struct cp_genericize_data wtd;
1225 wtd.p_set = new hash_set<tree>;
1226 wtd.bind_expr_stack.create (0);
1228 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1230 wtd.bind_expr_stack.release ();
1233 /* If a function that should end with a return in non-void
1234 function doesn't obviously end with return, add ubsan
1235 instrumentation code to verify it at runtime. */
1238 cp_ubsan_maybe_instrument_return (tree fndecl)
1240 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1241 || DECL_CONSTRUCTOR_P (fndecl)
1242 || DECL_DESTRUCTOR_P (fndecl)
1243 || !targetm.warn_func_return (fndecl))
1246 tree t = DECL_SAVED_TREE (fndecl);
1249 switch (TREE_CODE (t))
1252 t = BIND_EXPR_BODY (t);
1254 case TRY_FINALLY_EXPR:
1255 t = TREE_OPERAND (t, 0);
1257 case STATEMENT_LIST:
1259 tree_stmt_iterator i = tsi_last (t);
1276 t = DECL_SAVED_TREE (fndecl);
1277 if (TREE_CODE (t) == BIND_EXPR
1278 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1280 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1281 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1282 tsi_link_after (&i, t, TSI_NEW_STMT);
1287 cp_genericize (tree fndecl)
1291 /* Fix up the types of parms passed by invisible reference. */
1292 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1293 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1295 /* If a function's arguments are copied to create a thunk,
1296 then DECL_BY_REFERENCE will be set -- but the type of the
1297 argument will be a pointer type, so we will never get
1299 gcc_assert (!DECL_BY_REFERENCE (t));
1300 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1301 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1302 DECL_BY_REFERENCE (t) = 1;
1303 TREE_ADDRESSABLE (t) = 0;
1307 /* Do the same for the return value. */
1308 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1310 t = DECL_RESULT (fndecl);
1311 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1312 DECL_BY_REFERENCE (t) = 1;
1313 TREE_ADDRESSABLE (t) = 0;
1317 /* Adjust DECL_VALUE_EXPR of the original var. */
1318 tree outer = outer_curly_brace_block (current_function_decl);
1322 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1323 if (DECL_NAME (t) == DECL_NAME (var)
1324 && DECL_HAS_VALUE_EXPR_P (var)
1325 && DECL_VALUE_EXPR (var) == t)
1327 tree val = convert_from_reference (t);
1328 SET_DECL_VALUE_EXPR (var, val);
1334 /* If we're a clone, the body is already GIMPLE. */
1335 if (DECL_CLONED_FUNCTION_P (fndecl))
1338 /* Expand all the array notations here. */
1340 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1341 DECL_SAVED_TREE (fndecl) =
1342 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1344 /* We do want to see every occurrence of the parms, so we can't just use
1345 walk_tree's hash functionality. */
1346 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1348 if (flag_sanitize & SANITIZE_RETURN
1349 && current_function_decl != NULL_TREE
1350 && !lookup_attribute ("no_sanitize_undefined",
1351 DECL_ATTRIBUTES (current_function_decl)))
1352 cp_ubsan_maybe_instrument_return (fndecl);
1354 /* Do everything else. */
1355 c_genericize (fndecl);
1357 gcc_assert (bc_label[bc_break] == NULL);
1358 gcc_assert (bc_label[bc_continue] == NULL);
1361 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1362 NULL if there is in fact nothing to do. ARG2 may be null if FN
1363 actually only takes one argument. */
1366 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1368 tree defparm, parm, t;
1376 nargs = list_length (DECL_ARGUMENTS (fn));
1377 argarray = XALLOCAVEC (tree, nargs);
1379 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1381 defparm = TREE_CHAIN (defparm);
1383 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1385 tree inner_type = TREE_TYPE (arg1);
1386 tree start1, end1, p1;
1387 tree start2 = NULL, p2 = NULL;
1388 tree ret = NULL, lab;
1394 inner_type = TREE_TYPE (inner_type);
1395 start1 = build4 (ARRAY_REF, inner_type, start1,
1396 size_zero_node, NULL, NULL);
1398 start2 = build4 (ARRAY_REF, inner_type, start2,
1399 size_zero_node, NULL, NULL);
1401 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1402 start1 = build_fold_addr_expr_loc (input_location, start1);
1404 start2 = build_fold_addr_expr_loc (input_location, start2);
1406 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1407 end1 = fold_build_pointer_plus (start1, end1);
1409 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1410 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1411 append_to_statement_list (t, &ret);
1415 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1416 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1417 append_to_statement_list (t, &ret);
1420 lab = create_artificial_label (input_location);
1421 t = build1 (LABEL_EXPR, void_type_node, lab);
1422 append_to_statement_list (t, &ret);
1427 /* Handle default arguments. */
1428 for (parm = defparm; parm && parm != void_list_node;
1429 parm = TREE_CHAIN (parm), i++)
1430 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1431 TREE_PURPOSE (parm), fn, i,
1432 tf_warning_or_error);
1433 t = build_call_a (fn, i, argarray);
1434 t = fold_convert (void_type_node, t);
1435 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1436 append_to_statement_list (t, &ret);
1438 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1439 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1440 append_to_statement_list (t, &ret);
1444 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1445 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1446 append_to_statement_list (t, &ret);
1449 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1450 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1451 append_to_statement_list (t, &ret);
1457 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1459 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1460 /* Handle default arguments. */
1461 for (parm = defparm; parm && parm != void_list_node;
1462 parm = TREE_CHAIN (parm), i++)
1463 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1464 TREE_PURPOSE (parm),
1465 fn, i, tf_warning_or_error);
1466 t = build_call_a (fn, i, argarray);
1467 t = fold_convert (void_type_node, t);
1468 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1472 /* Return code to initialize DECL with its default constructor, or
1473 NULL if there's nothing to do. */
1476 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1478 tree info = CP_OMP_CLAUSE_INFO (clause);
1482 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1487 /* Return code to initialize DST with a copy constructor from SRC. */
1490 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1492 tree info = CP_OMP_CLAUSE_INFO (clause);
1496 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1498 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1503 /* Similarly, except use an assignment operator instead. */
1506 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1508 tree info = CP_OMP_CLAUSE_INFO (clause);
1512 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1514 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1519 /* Return code to destroy DECL. */
1522 cxx_omp_clause_dtor (tree clause, tree decl)
1524 tree info = CP_OMP_CLAUSE_INFO (clause);
1528 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1533 /* True if OpenMP should privatize what this DECL points to rather
1534 than the DECL itself. */
1537 cxx_omp_privatize_by_reference (const_tree decl)
1539 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1540 || is_invisiref_parm (decl));
1543 /* Return true if DECL is const qualified var having no mutable member. */
1545 cxx_omp_const_qual_no_mutable (tree decl)
1547 tree type = TREE_TYPE (decl);
1548 if (TREE_CODE (type) == REFERENCE_TYPE)
1550 if (!is_invisiref_parm (decl))
1552 type = TREE_TYPE (type);
1554 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1556 /* NVR doesn't preserve const qualification of the
1558 tree outer = outer_curly_brace_block (current_function_decl);
1562 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1563 if (DECL_NAME (decl) == DECL_NAME (var)
1564 && (TYPE_MAIN_VARIANT (type)
1565 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1567 if (TYPE_READONLY (TREE_TYPE (var)))
1568 type = TREE_TYPE (var);
1574 if (type == error_mark_node)
1577 /* Variables with const-qualified type having no mutable member
1578 are predetermined shared. */
1579 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1585 /* True if OpenMP sharing attribute of DECL is predetermined. */
1587 enum omp_clause_default_kind
1588 cxx_omp_predetermined_sharing (tree decl)
1590 /* Static data members are predetermined shared. */
1591 if (TREE_STATIC (decl))
1593 tree ctx = CP_DECL_CONTEXT (decl);
1594 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1595 return OMP_CLAUSE_DEFAULT_SHARED;
1598 /* Const qualified vars having no mutable member are predetermined
1600 if (cxx_omp_const_qual_no_mutable (decl))
1601 return OMP_CLAUSE_DEFAULT_SHARED;
1603 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1606 /* Finalize an implicitly determined clause. */
1609 cxx_omp_finish_clause (tree c, gimple_seq *)
1611 tree decl, inner_type;
1612 bool make_shared = false;
1614 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1617 decl = OMP_CLAUSE_DECL (c);
1618 decl = require_complete_type (decl);
1619 inner_type = TREE_TYPE (decl);
1620 if (decl == error_mark_node)
1622 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1624 if (is_invisiref_parm (decl))
1625 inner_type = TREE_TYPE (inner_type);
1628 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1634 /* We're interested in the base element, not arrays. */
1635 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1636 inner_type = TREE_TYPE (inner_type);
1638 /* Check for special function availability by building a call to one.
1639 Save the results, because later we won't be in the right context
1640 for making these queries. */
1642 && CLASS_TYPE_P (inner_type)
1643 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1647 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;