1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "c-family/c-common.h"
29 #include "tree-iterator.h"
32 #include "pointer-set.h"
34 #include "splay-tree.h"
36 /* Forward declarations. */
38 static tree cp_genericize_r (tree *, int *, void *);
39 static void cp_genericize_tree (tree*);
41 /* Local declarations. */
43 enum bc_t { bc_break = 0, bc_continue = 1 };
45 /* Stack of labels which are targets for "break" or "continue",
46 linked through TREE_CHAIN. */
47 static tree bc_label[2];
49 /* Begin a scope which can be exited by a break or continue statement. BC
52 Just creates a label with location LOCATION and pushes it into the current
56 begin_bc_block (enum bc_t bc, location_t location)
58 tree label = create_artificial_label (location);
59 DECL_CHAIN (label) = bc_label[bc];
64 /* Finish a scope which can be exited by a break or continue statement.
65 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
66 an expression for the contents of the scope.
68 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
69 BLOCK. Otherwise, just forget the label. */
72 finish_bc_block (tree *block, enum bc_t bc, tree label)
74 gcc_assert (label == bc_label[bc]);
76 if (TREE_USED (label))
77 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
80 bc_label[bc] = DECL_CHAIN (label);
81 DECL_CHAIN (label) = NULL_TREE;
84 /* Get the LABEL_EXPR to represent a break or continue statement
85 in the current block scope. BC indicates which. */
88 get_bc_label (enum bc_t bc)
90 tree label = bc_label[bc];
92 /* Mark the label used for finish_bc_block. */
93 TREE_USED (label) = 1;
97 /* Genericize a TRY_BLOCK. */
100 genericize_try_block (tree *stmt_p)
102 tree body = TRY_STMTS (*stmt_p);
103 tree cleanup = TRY_HANDLERS (*stmt_p);
105 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
108 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
111 genericize_catch_block (tree *stmt_p)
113 tree type = HANDLER_TYPE (*stmt_p);
114 tree body = HANDLER_BODY (*stmt_p);
116 /* FIXME should the caught type go in TREE_TYPE? */
117 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
120 /* A terser interface for building a representation of an exception
124 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
128 /* FIXME should the allowed types go in TREE_TYPE? */
129 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
130 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
132 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
133 append_to_statement_list (body, &TREE_OPERAND (t, 0));
138 /* Genericize an EH_SPEC_BLOCK by converting it to a
139 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
142 genericize_eh_spec_block (tree *stmt_p)
144 tree body = EH_SPEC_STMTS (*stmt_p);
145 tree allowed = EH_SPEC_RAISES (*stmt_p);
146 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
148 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
149 TREE_NO_WARNING (*stmt_p) = true;
150 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
153 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
156 genericize_if_stmt (tree *stmt_p)
158 tree stmt, cond, then_, else_;
159 location_t locus = EXPR_LOCATION (*stmt_p);
162 cond = IF_COND (stmt);
163 then_ = THEN_CLAUSE (stmt);
164 else_ = ELSE_CLAUSE (stmt);
167 then_ = build_empty_stmt (locus);
169 else_ = build_empty_stmt (locus);
171 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
173 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
176 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
177 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
178 SET_EXPR_LOCATION (stmt, locus);
182 /* Build a generic representation of one of the C loop forms. COND is the
183 loop condition or NULL_TREE. BODY is the (possibly compound) statement
184 controlled by the loop. INCR is the increment expression of a for-loop,
185 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
186 evaluated before the loop body as in while and for loops, or after the
187 loop body as in do-while loops. */
190 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
191 tree incr, bool cond_is_first, int *walk_subtrees,
195 tree entry = NULL, exit = NULL, t;
196 tree stmt_list = NULL;
198 blab = begin_bc_block (bc_break, start_locus);
199 clab = begin_bc_block (bc_continue, start_locus);
201 if (incr && EXPR_P (incr))
202 SET_EXPR_LOCATION (incr, start_locus);
204 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
205 cp_walk_tree (&body, cp_genericize_r, data, NULL);
206 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
209 /* If condition is zero don't generate a loop construct. */
210 if (cond && integer_zerop (cond))
214 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
215 get_bc_label (bc_break));
216 append_to_statement_list (t, &stmt_list);
221 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
222 tree top = build1 (LABEL_EXPR, void_type_node,
223 create_artificial_label (start_locus));
225 /* If we have an exit condition, then we build an IF with gotos either
226 out of the loop, or to the top of it. If there's no exit condition,
227 then we just build a jump back to the top. */
228 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
230 if (cond && !integer_nonzerop (cond))
232 /* Canonicalize the loop condition to the end. This means
233 generating a branch to the loop condition. Reuse the
234 continue label, if possible. */
239 entry = build1 (LABEL_EXPR, void_type_node,
240 create_artificial_label (start_locus));
241 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
242 LABEL_EXPR_LABEL (entry));
245 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
246 get_bc_label (bc_continue));
247 append_to_statement_list (t, &stmt_list);
250 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
251 exit = fold_build3_loc (start_locus,
252 COND_EXPR, void_type_node, cond, exit, t);
255 append_to_statement_list (top, &stmt_list);
258 append_to_statement_list (body, &stmt_list);
259 finish_bc_block (&stmt_list, bc_continue, clab);
260 append_to_statement_list (incr, &stmt_list);
261 append_to_statement_list (entry, &stmt_list);
262 append_to_statement_list (exit, &stmt_list);
263 finish_bc_block (&stmt_list, bc_break, blab);
265 if (stmt_list == NULL_TREE)
266 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
271 /* Genericize a FOR_STMT node *STMT_P. */
274 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
279 tree init = FOR_INIT_STMT (stmt);
283 cp_walk_tree (&init, cp_genericize_r, data, NULL);
284 append_to_statement_list (init, &expr);
287 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
288 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
289 append_to_statement_list (loop, &expr);
293 /* Genericize a WHILE_STMT node *STMT_P. */
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
299 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 /* Genericize a DO_STMT node *STMT_P. */
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
309 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
319 tree break_block, body, cond, type;
320 location_t stmt_locus = EXPR_LOCATION (stmt);
322 break_block = begin_bc_block (bc_break, stmt_locus);
324 body = SWITCH_STMT_BODY (stmt);
326 body = build_empty_stmt (stmt_locus);
327 cond = SWITCH_STMT_COND (stmt);
328 type = SWITCH_STMT_TYPE (stmt);
330 cp_walk_tree (&body, cp_genericize_r, data, NULL);
331 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332 cp_walk_tree (&type, cp_genericize_r, data, NULL);
335 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336 finish_bc_block (stmt_p, bc_break, break_block);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
342 genericize_continue_stmt (tree *stmt_p)
344 tree stmt_list = NULL;
345 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346 tree label = get_bc_label (bc_continue);
347 location_t location = EXPR_LOCATION (*stmt_p);
348 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349 append_to_statement_list (pred, &stmt_list);
350 append_to_statement_list (jump, &stmt_list);
354 /* Genericize a BREAK_STMT node *STMT_P. */
357 genericize_break_stmt (tree *stmt_p)
359 tree label = get_bc_label (bc_break);
360 location_t location = EXPR_LOCATION (*stmt_p);
361 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 /* Genericize a OMP_FOR node *STMT_P. */
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
370 location_t locus = EXPR_LOCATION (stmt);
371 tree clab = begin_bc_block (bc_continue, locus);
373 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
384 /* Hook into the middle of gimplifying an OMP_FOR node. */
386 static enum gimplify_status
387 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
389 tree for_stmt = *expr_p;
390 gimple_seq seq = NULL;
392 /* Protect ourselves from recursion. */
393 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
395 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
397 gimplify_and_add (for_stmt, &seq);
398 gimple_seq_add_seq (pre_p, seq);
400 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
405 /* Gimplify an EXPR_STMT node. */
408 gimplify_expr_stmt (tree *stmt_p)
410 tree stmt = EXPR_STMT_EXPR (*stmt_p);
412 if (stmt == error_mark_node)
415 /* Gimplification of a statement expression will nullify the
416 statement if all its side effects are moved to *PRE_P and *POST_P.
418 In this case we will not want to emit the gimplified statement.
419 However, we may still want to emit a warning, so we do that before
421 if (stmt && warn_unused_value)
423 if (!TREE_SIDE_EFFECTS (stmt))
425 if (!IS_EMPTY_STMT (stmt)
426 && !VOID_TYPE_P (TREE_TYPE (stmt))
427 && !TREE_NO_WARNING (stmt))
428 warning (OPT_Wunused_value, "statement with no effect");
431 warn_if_unused_value (stmt, input_location);
434 if (stmt == NULL_TREE)
435 stmt = alloc_stmt_list ();
440 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443 cp_gimplify_init_expr (tree *expr_p)
445 tree from = TREE_OPERAND (*expr_p, 1);
446 tree to = TREE_OPERAND (*expr_p, 0);
449 /* What about code that pulls out the temp and uses it elsewhere? I
450 think that such code never uses the TARGET_EXPR as an initializer. If
451 I'm wrong, we'll abort because the temp won't have any RTL. In that
452 case, I guess we'll need to replace references somehow. */
453 if (TREE_CODE (from) == TARGET_EXPR)
454 from = TARGET_EXPR_INITIAL (from);
456 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
457 inside the TARGET_EXPR. */
460 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
462 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
463 replace the slot operand with our target.
465 Should we add a target parm to gimplify_expr instead? No, as in this
466 case we want to replace the INIT_EXPR. */
467 if (TREE_CODE (sub) == AGGR_INIT_EXPR
468 || TREE_CODE (sub) == VEC_INIT_EXPR)
470 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
471 AGGR_INIT_EXPR_SLOT (sub) = to;
473 VEC_INIT_EXPR_SLOT (sub) = to;
476 /* The initialization is now a side-effect, so the container can
479 TREE_TYPE (from) = void_type_node;
485 t = TREE_OPERAND (t, 1);
490 /* Gimplify a MUST_NOT_THROW_EXPR. */
492 static enum gimplify_status
493 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
496 tree temp = voidify_wrapper_expr (stmt, NULL);
497 tree body = TREE_OPERAND (stmt, 0);
498 gimple_seq try_ = NULL;
499 gimple_seq catch_ = NULL;
502 gimplify_and_add (body, &try_);
503 mnt = gimple_build_eh_must_not_throw (terminate_node);
504 gimple_seq_add_stmt_without_update (&catch_, mnt);
505 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
507 gimple_seq_add_stmt_without_update (pre_p, mnt);
518 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
521 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
523 int saved_stmts_are_full_exprs_p = 0;
524 enum tree_code code = TREE_CODE (*expr_p);
525 enum gimplify_status ret;
527 if (STATEMENT_CODE_P (code))
529 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
530 current_stmt_tree ()->stmts_are_full_exprs_p
531 = STMT_IS_FULL_EXPR_P (*expr_p);
537 *expr_p = cplus_expand_constant (*expr_p);
542 simplify_aggr_init_expr (expr_p);
548 location_t loc = input_location;
549 tree init = VEC_INIT_EXPR_INIT (*expr_p);
550 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
551 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
552 input_location = EXPR_LOCATION (*expr_p);
553 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
554 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
556 tf_warning_or_error);
557 cp_genericize_tree (expr_p);
559 input_location = loc;
564 /* FIXME communicate throw type to back end, probably by moving
565 THROW_EXPR into ../tree.def. */
566 *expr_p = TREE_OPERAND (*expr_p, 0);
570 case MUST_NOT_THROW_EXPR:
571 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
574 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
575 LHS of an assignment might also be involved in the RHS, as in bug
578 cp_gimplify_init_expr (expr_p);
579 if (TREE_CODE (*expr_p) != INIT_EXPR)
581 /* Otherwise fall through. */
584 /* If the back end isn't clever enough to know that the lhs and rhs
585 types are the same, add an explicit conversion. */
586 tree op0 = TREE_OPERAND (*expr_p, 0);
587 tree op1 = TREE_OPERAND (*expr_p, 1);
589 if (!error_operand_p (op0)
590 && !error_operand_p (op1)
591 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
592 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
593 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
594 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
595 TREE_TYPE (op0), op1);
597 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
598 || (TREE_CODE (op1) == CONSTRUCTOR
599 && CONSTRUCTOR_NELTS (op1) == 0
600 && !TREE_CLOBBER_P (op1))
601 || (TREE_CODE (op1) == CALL_EXPR
602 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
603 && is_really_empty_class (TREE_TYPE (op0)))
605 /* Remove any copies of empty classes. We check that the RHS
606 has a simple form so that TARGET_EXPRs and non-empty
607 CONSTRUCTORs get reduced properly, and we leave the return
608 slot optimization alone because it isn't a copy (FIXME so it
609 shouldn't be represented as one).
611 Also drop volatile variables on the RHS to avoid infinite
612 recursion from gimplify_expr trying to load the value. */
613 if (!TREE_SIDE_EFFECTS (op1)
614 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
616 else if (TREE_CODE (op1) == MEM_REF
617 && TREE_THIS_VOLATILE (op1))
619 /* Similarly for volatile MEM_REFs on the RHS. */
620 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
623 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
624 TREE_OPERAND (op1, 0), op0);
627 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
634 case EMPTY_CLASS_EXPR:
635 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
636 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
641 *expr_p = BASELINK_FUNCTIONS (*expr_p);
646 genericize_try_block (expr_p);
651 genericize_catch_block (expr_p);
656 genericize_eh_spec_block (expr_p);
672 ret = cp_gimplify_omp_for (expr_p, pre_p);
676 gimplify_expr_stmt (expr_p);
680 case UNARY_PLUS_EXPR:
682 tree arg = TREE_OPERAND (*expr_p, 0);
683 tree type = TREE_TYPE (*expr_p);
684 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
691 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
695 /* Restore saved state. */
696 if (STATEMENT_CODE_P (code))
697 current_stmt_tree ()->stmts_are_full_exprs_p
698 = saved_stmts_are_full_exprs_p;
704 is_invisiref_parm (const_tree t)
706 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
707 && DECL_BY_REFERENCE (t));
710 /* Return true if the uid in both int tree maps are equal. */
713 cxx_int_tree_map_eq (const void *va, const void *vb)
715 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
716 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
717 return (a->uid == b->uid);
720 /* Hash a UID in a cxx_int_tree_map. */
723 cxx_int_tree_map_hash (const void *item)
725 return ((const struct cxx_int_tree_map *)item)->uid;
728 /* A stable comparison routine for use with splay trees and DECLs. */
731 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
736 return DECL_UID (a) - DECL_UID (b);
739 /* OpenMP context during genericization. */
741 struct cp_genericize_omp_taskreg
745 struct cp_genericize_omp_taskreg *outer;
746 splay_tree variables;
749 /* Return true if genericization should try to determine if
750 DECL is firstprivate or shared within task regions. */
753 omp_var_to_track (tree decl)
755 tree type = TREE_TYPE (decl);
756 if (is_invisiref_parm (decl))
757 type = TREE_TYPE (type);
758 while (TREE_CODE (type) == ARRAY_TYPE)
759 type = TREE_TYPE (type);
760 if (type == error_mark_node || !CLASS_TYPE_P (type))
762 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
764 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
769 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
772 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
774 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
775 (splay_tree_key) decl);
778 int flags = OMP_CLAUSE_DEFAULT_SHARED;
780 omp_cxx_notice_variable (omp_ctx->outer, decl);
781 if (!omp_ctx->default_shared)
783 struct cp_genericize_omp_taskreg *octx;
785 for (octx = omp_ctx->outer; octx; octx = octx->outer)
787 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
788 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
790 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
793 if (octx->is_parallel)
797 && (TREE_CODE (decl) == PARM_DECL
798 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
799 && DECL_CONTEXT (decl) == current_function_decl)))
800 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
801 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
803 /* DECL is implicitly determined firstprivate in
804 the current task construct. Ensure copy ctor and
805 dtor are instantiated, because during gimplification
806 it will be already too late. */
807 tree type = TREE_TYPE (decl);
808 if (is_invisiref_parm (decl))
809 type = TREE_TYPE (type);
810 while (TREE_CODE (type) == ARRAY_TYPE)
811 type = TREE_TYPE (type);
812 get_copy_ctor (type, tf_none);
813 get_dtor (type, tf_none);
816 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
820 /* Genericization context. */
822 struct cp_genericize_data
824 struct pointer_set_t *p_set;
825 vec<tree> bind_expr_stack;
826 struct cp_genericize_omp_taskreg *omp_ctx;
829 /* Perform any pre-gimplification lowering of C++ front end trees to
833 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
836 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
837 struct pointer_set_t *p_set = wtd->p_set;
839 /* If in an OpenMP context, note var uses. */
840 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
841 && (TREE_CODE (stmt) == VAR_DECL
842 || TREE_CODE (stmt) == PARM_DECL
843 || TREE_CODE (stmt) == RESULT_DECL)
844 && omp_var_to_track (stmt))
845 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
847 if (is_invisiref_parm (stmt)
848 /* Don't dereference parms in a thunk, pass the references through. */
849 && !(DECL_THUNK_P (current_function_decl)
850 && TREE_CODE (stmt) == PARM_DECL))
852 *stmt_p = convert_from_reference (stmt);
857 /* Map block scope extern declarations to visible declarations with the
858 same name and type in outer scopes if any. */
859 if (cp_function_chain->extern_decl_map
860 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
861 && DECL_EXTERNAL (stmt))
863 struct cxx_int_tree_map *h, in;
864 in.uid = DECL_UID (stmt);
865 h = (struct cxx_int_tree_map *)
866 htab_find_with_hash (cp_function_chain->extern_decl_map,
876 /* Other than invisiref parms, don't walk the same tree twice. */
877 if (pointer_set_contains (p_set, stmt))
883 if (TREE_CODE (stmt) == ADDR_EXPR
884 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
886 /* If in an OpenMP context, note var uses. */
887 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
888 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
889 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
890 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
893 else if (TREE_CODE (stmt) == RETURN_EXPR
894 && TREE_OPERAND (stmt, 0)
895 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
896 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
898 else if (TREE_CODE (stmt) == OMP_CLAUSE)
899 switch (OMP_CLAUSE_CODE (stmt))
901 case OMP_CLAUSE_LASTPRIVATE:
902 /* Don't dereference an invisiref in OpenMP clauses. */
903 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
906 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
907 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
908 cp_genericize_r, data, NULL);
911 case OMP_CLAUSE_PRIVATE:
912 /* Don't dereference an invisiref in OpenMP clauses. */
913 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
915 else if (wtd->omp_ctx != NULL)
917 /* Private clause doesn't cause any references to the
918 var in outer contexts, avoid calling
919 omp_cxx_notice_variable for it. */
920 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
922 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
928 case OMP_CLAUSE_SHARED:
929 case OMP_CLAUSE_FIRSTPRIVATE:
930 case OMP_CLAUSE_COPYIN:
931 case OMP_CLAUSE_COPYPRIVATE:
932 /* Don't dereference an invisiref in OpenMP clauses. */
933 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
936 case OMP_CLAUSE_REDUCTION:
937 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
942 else if (IS_TYPE_OR_DECL_P (stmt))
945 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
946 to lower this construct before scanning it, so we need to lower these
947 before doing anything else. */
948 else if (TREE_CODE (stmt) == CLEANUP_STMT)
949 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
950 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
954 CLEANUP_EXPR (stmt));
956 else if (TREE_CODE (stmt) == IF_STMT)
958 genericize_if_stmt (stmt_p);
959 /* *stmt_p has changed, tail recurse to handle it again. */
960 return cp_genericize_r (stmt_p, walk_subtrees, data);
963 /* COND_EXPR might have incompatible types in branches if one or both
964 arms are bitfields. Fix it up now. */
965 else if (TREE_CODE (stmt) == COND_EXPR)
968 = (TREE_OPERAND (stmt, 1)
969 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
972 = (TREE_OPERAND (stmt, 2)
973 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
976 && !useless_type_conversion_p (TREE_TYPE (stmt),
977 TREE_TYPE (TREE_OPERAND (stmt, 1))))
979 TREE_OPERAND (stmt, 1)
980 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
981 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
985 && !useless_type_conversion_p (TREE_TYPE (stmt),
986 TREE_TYPE (TREE_OPERAND (stmt, 2))))
988 TREE_OPERAND (stmt, 2)
989 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
990 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
995 else if (TREE_CODE (stmt) == BIND_EXPR)
997 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1000 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1001 if (TREE_CODE (decl) == VAR_DECL
1002 && !DECL_EXTERNAL (decl)
1003 && omp_var_to_track (decl))
1006 = splay_tree_lookup (wtd->omp_ctx->variables,
1007 (splay_tree_key) decl);
1009 splay_tree_insert (wtd->omp_ctx->variables,
1010 (splay_tree_key) decl,
1012 ? OMP_CLAUSE_DEFAULT_SHARED
1013 : OMP_CLAUSE_DEFAULT_PRIVATE);
1016 wtd->bind_expr_stack.safe_push (stmt);
1017 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1018 cp_genericize_r, data, NULL);
1019 wtd->bind_expr_stack.pop ();
1022 else if (TREE_CODE (stmt) == USING_STMT)
1024 tree block = NULL_TREE;
1026 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1027 BLOCK, and append an IMPORTED_DECL to its
1028 BLOCK_VARS chained list. */
1029 if (wtd->bind_expr_stack.exists ())
1032 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1033 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1038 tree using_directive;
1039 gcc_assert (TREE_OPERAND (stmt, 0));
1041 using_directive = make_node (IMPORTED_DECL);
1042 TREE_TYPE (using_directive) = void_type_node;
1044 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1045 = TREE_OPERAND (stmt, 0);
1046 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1047 BLOCK_VARS (block) = using_directive;
1049 /* The USING_STMT won't appear in GENERIC. */
1050 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1054 else if (TREE_CODE (stmt) == DECL_EXPR
1055 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1057 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1058 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1061 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1063 struct cp_genericize_omp_taskreg omp_ctx;
1068 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1069 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1070 omp_ctx.default_shared = omp_ctx.is_parallel;
1071 omp_ctx.outer = wtd->omp_ctx;
1072 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1073 wtd->omp_ctx = &omp_ctx;
1074 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1075 switch (OMP_CLAUSE_CODE (c))
1077 case OMP_CLAUSE_SHARED:
1078 case OMP_CLAUSE_PRIVATE:
1079 case OMP_CLAUSE_FIRSTPRIVATE:
1080 case OMP_CLAUSE_LASTPRIVATE:
1081 decl = OMP_CLAUSE_DECL (c);
1082 if (decl == error_mark_node || !omp_var_to_track (decl))
1084 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1087 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1088 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1089 ? OMP_CLAUSE_DEFAULT_SHARED
1090 : OMP_CLAUSE_DEFAULT_PRIVATE);
1091 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1093 omp_cxx_notice_variable (omp_ctx.outer, decl);
1095 case OMP_CLAUSE_DEFAULT:
1096 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1097 omp_ctx.default_shared = true;
1101 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1102 wtd->omp_ctx = omp_ctx.outer;
1103 splay_tree_delete (omp_ctx.variables);
1105 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1106 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1107 else if (TREE_CODE (stmt) == FOR_STMT)
1108 genericize_for_stmt (stmt_p, walk_subtrees, data);
1109 else if (TREE_CODE (stmt) == WHILE_STMT)
1110 genericize_while_stmt (stmt_p, walk_subtrees, data);
1111 else if (TREE_CODE (stmt) == DO_STMT)
1112 genericize_do_stmt (stmt_p, walk_subtrees, data);
1113 else if (TREE_CODE (stmt) == SWITCH_STMT)
1114 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1115 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1116 genericize_continue_stmt (stmt_p);
1117 else if (TREE_CODE (stmt) == BREAK_STMT)
1118 genericize_break_stmt (stmt_p);
1119 else if (TREE_CODE (stmt) == OMP_FOR)
1120 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1121 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1123 if (SIZEOF_EXPR_TYPE_P (stmt))
1125 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1126 SIZEOF_EXPR, false);
1127 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1128 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1129 SIZEOF_EXPR, false);
1131 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1132 SIZEOF_EXPR, false);
1133 if (*stmt_p == error_mark_node)
1134 *stmt_p = size_one_node;
1138 pointer_set_insert (p_set, *stmt_p);
1143 /* Lower C++ front end trees to GENERIC in T_P. */
1146 cp_genericize_tree (tree* t_p)
1148 struct cp_genericize_data wtd;
1150 wtd.p_set = pointer_set_create ();
1151 wtd.bind_expr_stack.create (0);
1153 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1154 pointer_set_destroy (wtd.p_set);
1155 wtd.bind_expr_stack.release ();
1159 cp_genericize (tree fndecl)
1163 /* Fix up the types of parms passed by invisible reference. */
1164 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1165 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1167 /* If a function's arguments are copied to create a thunk,
1168 then DECL_BY_REFERENCE will be set -- but the type of the
1169 argument will be a pointer type, so we will never get
1171 gcc_assert (!DECL_BY_REFERENCE (t));
1172 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1173 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1174 DECL_BY_REFERENCE (t) = 1;
1175 TREE_ADDRESSABLE (t) = 0;
1179 /* Do the same for the return value. */
1180 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1182 t = DECL_RESULT (fndecl);
1183 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1184 DECL_BY_REFERENCE (t) = 1;
1185 TREE_ADDRESSABLE (t) = 0;
1189 /* Adjust DECL_VALUE_EXPR of the original var. */
1190 tree outer = outer_curly_brace_block (current_function_decl);
1194 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1195 if (DECL_NAME (t) == DECL_NAME (var)
1196 && DECL_HAS_VALUE_EXPR_P (var)
1197 && DECL_VALUE_EXPR (var) == t)
1199 tree val = convert_from_reference (t);
1200 SET_DECL_VALUE_EXPR (var, val);
1206 /* If we're a clone, the body is already GIMPLE. */
1207 if (DECL_CLONED_FUNCTION_P (fndecl))
1210 /* We do want to see every occurrence of the parms, so we can't just use
1211 walk_tree's hash functionality. */
1212 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1214 /* Do everything else. */
1215 c_genericize (fndecl);
1217 gcc_assert (bc_label[bc_break] == NULL);
1218 gcc_assert (bc_label[bc_continue] == NULL);
1221 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1222 NULL if there is in fact nothing to do. ARG2 may be null if FN
1223 actually only takes one argument. */
1226 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1228 tree defparm, parm, t;
1236 nargs = list_length (DECL_ARGUMENTS (fn));
1237 argarray = XALLOCAVEC (tree, nargs);
1239 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1241 defparm = TREE_CHAIN (defparm);
1243 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1245 tree inner_type = TREE_TYPE (arg1);
1246 tree start1, end1, p1;
1247 tree start2 = NULL, p2 = NULL;
1248 tree ret = NULL, lab;
1254 inner_type = TREE_TYPE (inner_type);
1255 start1 = build4 (ARRAY_REF, inner_type, start1,
1256 size_zero_node, NULL, NULL);
1258 start2 = build4 (ARRAY_REF, inner_type, start2,
1259 size_zero_node, NULL, NULL);
1261 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1262 start1 = build_fold_addr_expr_loc (input_location, start1);
1264 start2 = build_fold_addr_expr_loc (input_location, start2);
1266 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1267 end1 = fold_build_pointer_plus (start1, end1);
1269 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1270 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1271 append_to_statement_list (t, &ret);
1275 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1276 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1277 append_to_statement_list (t, &ret);
1280 lab = create_artificial_label (input_location);
1281 t = build1 (LABEL_EXPR, void_type_node, lab);
1282 append_to_statement_list (t, &ret);
1287 /* Handle default arguments. */
1288 for (parm = defparm; parm && parm != void_list_node;
1289 parm = TREE_CHAIN (parm), i++)
1290 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1291 TREE_PURPOSE (parm), fn, i,
1292 tf_warning_or_error);
1293 t = build_call_a (fn, i, argarray);
1294 t = fold_convert (void_type_node, t);
1295 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1296 append_to_statement_list (t, &ret);
1298 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1299 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1300 append_to_statement_list (t, &ret);
1304 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1305 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1306 append_to_statement_list (t, &ret);
1309 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1310 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1311 append_to_statement_list (t, &ret);
1317 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1319 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1320 /* Handle default arguments. */
1321 for (parm = defparm; parm && parm != void_list_node;
1322 parm = TREE_CHAIN (parm), i++)
1323 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1324 TREE_PURPOSE (parm),
1325 fn, i, tf_warning_or_error);
1326 t = build_call_a (fn, i, argarray);
1327 t = fold_convert (void_type_node, t);
1328 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1332 /* Return code to initialize DECL with its default constructor, or
1333 NULL if there's nothing to do. */
1336 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1338 tree info = CP_OMP_CLAUSE_INFO (clause);
1342 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1347 /* Return code to initialize DST with a copy constructor from SRC. */
1350 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1352 tree info = CP_OMP_CLAUSE_INFO (clause);
1356 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1358 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1363 /* Similarly, except use an assignment operator instead. */
1366 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1368 tree info = CP_OMP_CLAUSE_INFO (clause);
1372 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1374 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1379 /* Return code to destroy DECL. */
1382 cxx_omp_clause_dtor (tree clause, tree decl)
1384 tree info = CP_OMP_CLAUSE_INFO (clause);
1388 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1393 /* True if OpenMP should privatize what this DECL points to rather
1394 than the DECL itself. */
1397 cxx_omp_privatize_by_reference (const_tree decl)
1399 return is_invisiref_parm (decl);
1402 /* Return true if DECL is const qualified var having no mutable member. */
1404 cxx_omp_const_qual_no_mutable (tree decl)
1406 tree type = TREE_TYPE (decl);
1407 if (TREE_CODE (type) == REFERENCE_TYPE)
1409 if (!is_invisiref_parm (decl))
1411 type = TREE_TYPE (type);
1413 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1415 /* NVR doesn't preserve const qualification of the
1417 tree outer = outer_curly_brace_block (current_function_decl);
1421 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1422 if (DECL_NAME (decl) == DECL_NAME (var)
1423 && (TYPE_MAIN_VARIANT (type)
1424 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1426 if (TYPE_READONLY (TREE_TYPE (var)))
1427 type = TREE_TYPE (var);
1433 if (type == error_mark_node)
1436 /* Variables with const-qualified type having no mutable member
1437 are predetermined shared. */
1438 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1444 /* True if OpenMP sharing attribute of DECL is predetermined. */
1446 enum omp_clause_default_kind
1447 cxx_omp_predetermined_sharing (tree decl)
1449 /* Static data members are predetermined shared. */
1450 if (TREE_STATIC (decl))
1452 tree ctx = CP_DECL_CONTEXT (decl);
1453 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1454 return OMP_CLAUSE_DEFAULT_SHARED;
1457 /* Const qualified vars having no mutable member are predetermined
1459 if (cxx_omp_const_qual_no_mutable (decl))
1460 return OMP_CLAUSE_DEFAULT_SHARED;
1462 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1465 /* Finalize an implicitly determined clause. */
1468 cxx_omp_finish_clause (tree c)
1470 tree decl, inner_type;
1471 bool make_shared = false;
1473 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1476 decl = OMP_CLAUSE_DECL (c);
1477 decl = require_complete_type (decl);
1478 inner_type = TREE_TYPE (decl);
1479 if (decl == error_mark_node)
1481 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1483 if (is_invisiref_parm (decl))
1484 inner_type = TREE_TYPE (inner_type);
1487 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1493 /* We're interested in the base element, not arrays. */
1494 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1495 inner_type = TREE_TYPE (inner_type);
1497 /* Check for special function availability by building a call to one.
1498 Save the results, because later we won't be in the right context
1499 for making these queries. */
1501 && CLASS_TYPE_P (inner_type)
1502 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1506 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;