1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
38 #include "fold-const.h"
43 #include "gimple-fold.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
55 #include "gimple-low.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 /* Hash set of poisoned variables in a bind expr. */
66 static hash_set<tree> *asan_poisoned_variables = NULL;
68 enum gimplify_omp_var_data
74 GOVD_FIRSTPRIVATE = 16,
75 GOVD_LASTPRIVATE = 32,
79 GOVD_DEBUG_PRIVATE = 512,
80 GOVD_PRIVATE_OUTER_REF = 1024,
84 /* Flag for GOVD_MAP: don't copy back. */
85 GOVD_MAP_TO_ONLY = 8192,
87 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
88 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
90 GOVD_MAP_0LEN_ARRAY = 32768,
92 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
93 GOVD_MAP_ALWAYS_TO = 65536,
95 /* Flag for shared vars that are or might be stored to in the region. */
96 GOVD_WRITTEN = 131072,
98 /* Flag for GOVD_MAP, if it is a forced mapping. */
99 GOVD_MAP_FORCE = 262144,
101 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
102 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
109 ORT_WORKSHARE = 0x00,
113 ORT_COMBINED_PARALLEL = 0x03,
116 ORT_UNTIED_TASK = 0x05,
119 ORT_COMBINED_TEAMS = 0x09,
122 ORT_TARGET_DATA = 0x10,
124 /* Data region with offloading. */
126 ORT_COMBINED_TARGET = 0x21,
128 /* OpenACC variants. */
129 ORT_ACC = 0x40, /* A generic OpenACC region. */
130 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
131 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
132 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
133 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
135 /* Dummy OpenMP region, used to disable expansion of
136 DECL_VALUE_EXPRs in taskloop pre body. */
140 /* Gimplify hashtable helper. */
142 struct gimplify_hasher : free_ptr_hash <elt_t>
144 static inline hashval_t hash (const elt_t *);
145 static inline bool equal (const elt_t *, const elt_t *);
150 struct gimplify_ctx *prev_context;
152 vec<gbind *> bind_expr_stack;
154 gimple_seq conditional_cleanups;
158 vec<tree> case_labels;
159 hash_set<tree> *live_switch_vars;
160 /* The formal temporary table. Should this be persistent? */
161 hash_table<gimplify_hasher> *temp_htab;
164 unsigned into_ssa : 1;
165 unsigned allow_rhs_cond_expr : 1;
166 unsigned in_cleanup_point_expr : 1;
167 unsigned keep_stack : 1;
168 unsigned save_stack : 1;
169 unsigned in_switch_expr : 1;
172 struct gimplify_omp_ctx
174 struct gimplify_omp_ctx *outer_context;
175 splay_tree variables;
176 hash_set<tree> *privatized_types;
177 /* Iteration variables in an OMP_FOR. */
178 vec<tree> loop_iter_var;
180 enum omp_clause_default_kind default_kind;
181 enum omp_region_type region_type;
184 bool target_map_scalars_firstprivate;
185 bool target_map_pointers_as_0len_arrays;
186 bool target_firstprivatize_array_bases;
189 static struct gimplify_ctx *gimplify_ctxp;
190 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
192 /* Forward declaration. */
193 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
194 static hash_map<tree, tree> *oacc_declare_returns;
196 /* Shorter alias name for the above function for use in gimplify.c
200 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
202 gimple_seq_add_stmt_without_update (seq_p, gs);
205 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
206 NULL, a new sequence is allocated. This function is
207 similar to gimple_seq_add_seq, but does not scan the operands.
208 During gimplification, we need to manipulate statement sequences
209 before the def/use vectors have been constructed. */
212 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
214 gimple_stmt_iterator si;
219 si = gsi_last (*dst_p);
220 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
224 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
225 and popping gimplify contexts. */
227 static struct gimplify_ctx *ctx_pool = NULL;
229 /* Return a gimplify context struct from the pool. */
231 static inline struct gimplify_ctx *
234 struct gimplify_ctx * c = ctx_pool;
237 ctx_pool = c->prev_context;
239 c = XNEW (struct gimplify_ctx);
241 memset (c, '\0', sizeof (*c));
245 /* Put gimplify context C back into the pool. */
248 ctx_free (struct gimplify_ctx *c)
250 c->prev_context = ctx_pool;
254 /* Free allocated ctx stack memory. */
257 free_gimplify_stack (void)
259 struct gimplify_ctx *c;
261 while ((c = ctx_pool))
263 ctx_pool = c->prev_context;
269 /* Set up a context for the gimplifier. */
272 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
274 struct gimplify_ctx *c = ctx_alloc ();
276 c->prev_context = gimplify_ctxp;
278 gimplify_ctxp->into_ssa = in_ssa;
279 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
282 /* Tear down a context for the gimplifier. If BODY is non-null, then
283 put the temporaries into the outer BIND_EXPR. Otherwise, put them
286 BODY is not a sequence, but the first tuple in a sequence. */
289 pop_gimplify_context (gimple *body)
291 struct gimplify_ctx *c = gimplify_ctxp;
294 && (!c->bind_expr_stack.exists ()
295 || c->bind_expr_stack.is_empty ()));
296 c->bind_expr_stack.release ();
297 gimplify_ctxp = c->prev_context;
300 declare_vars (c->temps, body, false);
302 record_vars (c->temps);
309 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
312 gimple_push_bind_expr (gbind *bind_stmt)
314 gimplify_ctxp->bind_expr_stack.reserve (8);
315 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
318 /* Pop the first element off the stack of bindings. */
321 gimple_pop_bind_expr (void)
323 gimplify_ctxp->bind_expr_stack.pop ();
326 /* Return the first element of the stack of bindings. */
329 gimple_current_bind_expr (void)
331 return gimplify_ctxp->bind_expr_stack.last ();
334 /* Return the stack of bindings created during gimplification. */
337 gimple_bind_expr_stack (void)
339 return gimplify_ctxp->bind_expr_stack;
342 /* Return true iff there is a COND_EXPR between us and the innermost
343 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
346 gimple_conditional_context (void)
348 return gimplify_ctxp->conditions > 0;
351 /* Note that we've entered a COND_EXPR. */
354 gimple_push_condition (void)
356 #ifdef ENABLE_GIMPLE_CHECKING
357 if (gimplify_ctxp->conditions == 0)
358 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
360 ++(gimplify_ctxp->conditions);
363 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
364 now, add any conditional cleanups we've seen to the prequeue. */
367 gimple_pop_condition (gimple_seq *pre_p)
369 int conds = --(gimplify_ctxp->conditions);
371 gcc_assert (conds >= 0);
374 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
375 gimplify_ctxp->conditional_cleanups = NULL;
379 /* A stable comparison routine for use with splay trees and DECLs. */
382 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
387 return DECL_UID (a) - DECL_UID (b);
390 /* Create a new omp construct that deals with variable remapping. */
392 static struct gimplify_omp_ctx *
393 new_omp_context (enum omp_region_type region_type)
395 struct gimplify_omp_ctx *c;
397 c = XCNEW (struct gimplify_omp_ctx);
398 c->outer_context = gimplify_omp_ctxp;
399 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
400 c->privatized_types = new hash_set<tree>;
401 c->location = input_location;
402 c->region_type = region_type;
403 if ((region_type & ORT_TASK) == 0)
404 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
406 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
411 /* Destroy an omp construct that deals with variable remapping. */
414 delete_omp_context (struct gimplify_omp_ctx *c)
416 splay_tree_delete (c->variables);
417 delete c->privatized_types;
418 c->loop_iter_var.release ();
422 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
423 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
425 /* Both gimplify the statement T and append it to *SEQ_P. This function
426 behaves exactly as gimplify_stmt, but you don't have to pass T as a
430 gimplify_and_add (tree t, gimple_seq *seq_p)
432 gimplify_stmt (&t, seq_p);
435 /* Gimplify statement T into sequence *SEQ_P, and return the first
436 tuple in the sequence of generated tuples for this statement.
437 Return NULL if gimplifying T produced no tuples. */
440 gimplify_and_return_first (tree t, gimple_seq *seq_p)
442 gimple_stmt_iterator last = gsi_last (*seq_p);
444 gimplify_and_add (t, seq_p);
446 if (!gsi_end_p (last))
449 return gsi_stmt (last);
452 return gimple_seq_first_stmt (*seq_p);
455 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
456 LHS, or for a call argument. */
459 is_gimple_mem_rhs (tree t)
461 /* If we're dealing with a renamable type, either source or dest must be
462 a renamed variable. */
463 if (is_gimple_reg_type (TREE_TYPE (t)))
464 return is_gimple_val (t);
466 return is_gimple_val (t) || is_gimple_lvalue (t);
469 /* Return true if T is a CALL_EXPR or an expression that can be
470 assigned to a temporary. Note that this predicate should only be
471 used during gimplification. See the rationale for this in
472 gimplify_modify_expr. */
475 is_gimple_reg_rhs_or_call (tree t)
477 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
478 || TREE_CODE (t) == CALL_EXPR);
481 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
482 this predicate should only be used during gimplification. See the
483 rationale for this in gimplify_modify_expr. */
486 is_gimple_mem_rhs_or_call (tree t)
488 /* If we're dealing with a renamable type, either source or dest must be
489 a renamed variable. */
490 if (is_gimple_reg_type (TREE_TYPE (t)))
491 return is_gimple_val (t);
493 return (is_gimple_val (t) || is_gimple_lvalue (t)
494 || TREE_CODE (t) == CALL_EXPR);
497 /* Create a temporary with a name derived from VAL. Subroutine of
498 lookup_tmp_var; nobody else should call this function. */
501 create_tmp_from_val (tree val)
503 /* Drop all qualifiers and address-space information from the value type. */
504 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
505 tree var = create_tmp_var (type, get_name (val));
506 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
507 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
508 DECL_GIMPLE_REG_P (var) = 1;
512 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
513 an existing expression temporary. */
516 lookup_tmp_var (tree val, bool is_formal)
520 /* If not optimizing, never really reuse a temporary. local-alloc
521 won't allocate any variable that is used in more than one basic
522 block, which means it will go into memory, causing much extra
523 work in reload and final and poorer code generation, outweighing
524 the extra memory allocation here. */
525 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
526 ret = create_tmp_from_val (val);
533 if (!gimplify_ctxp->temp_htab)
534 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
535 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
538 elt_p = XNEW (elt_t);
540 elt_p->temp = ret = create_tmp_from_val (val);
553 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
556 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
561 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
562 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
563 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
566 if (gimplify_ctxp->into_ssa
567 && is_gimple_reg_type (TREE_TYPE (val)))
568 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
570 t = lookup_tmp_var (val, is_formal);
572 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
574 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
576 /* gimplify_modify_expr might want to reduce this further. */
577 gimplify_and_add (mod, pre_p);
583 /* Return a formal temporary variable initialized with VAL. PRE_P is as
584 in gimplify_expr. Only use this function if:
586 1) The value of the unfactored expression represented by VAL will not
587 change between the initialization and use of the temporary, and
588 2) The temporary will not be otherwise modified.
590 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
591 and #2 means it is inappropriate for && temps.
593 For other cases, use get_initialized_tmp_var instead. */
596 get_formal_tmp_var (tree val, gimple_seq *pre_p)
598 return internal_get_tmp_var (val, pre_p, NULL, true);
601 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
602 are as in gimplify_expr. */
605 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
607 return internal_get_tmp_var (val, pre_p, post_p, false);
610 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
611 generate debug info for them; otherwise don't. */
614 declare_vars (tree vars, gimple *gs, bool debug_info)
621 gbind *scope = as_a <gbind *> (gs);
623 temps = nreverse (last);
625 block = gimple_bind_block (scope);
626 gcc_assert (!block || TREE_CODE (block) == BLOCK);
627 if (!block || !debug_info)
629 DECL_CHAIN (last) = gimple_bind_vars (scope);
630 gimple_bind_set_vars (scope, temps);
634 /* We need to attach the nodes both to the BIND_EXPR and to its
635 associated BLOCK for debugging purposes. The key point here
636 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
637 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
638 if (BLOCK_VARS (block))
639 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
642 gimple_bind_set_vars (scope,
643 chainon (gimple_bind_vars (scope), temps));
644 BLOCK_VARS (block) = temps;
650 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
651 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
652 no such upper bound can be obtained. */
655 force_constant_size (tree var)
657 /* The only attempt we make is by querying the maximum size of objects
658 of the variable's type. */
660 HOST_WIDE_INT max_size;
662 gcc_assert (TREE_CODE (var) == VAR_DECL);
664 max_size = max_int_size_in_bytes (TREE_TYPE (var));
666 gcc_assert (max_size >= 0);
669 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
671 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
674 /* Push the temporary variable TMP into the current binding. */
677 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
679 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
681 /* Later processing assumes that the object size is constant, which might
682 not be true at this point. Force the use of a constant upper bound in
684 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
685 force_constant_size (tmp);
687 DECL_CONTEXT (tmp) = fn->decl;
688 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
690 record_vars_into (tmp, fn->decl);
693 /* Push the temporary variable TMP into the current binding. */
696 gimple_add_tmp_var (tree tmp)
698 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
700 /* Later processing assumes that the object size is constant, which might
701 not be true at this point. Force the use of a constant upper bound in
703 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
704 force_constant_size (tmp);
706 DECL_CONTEXT (tmp) = current_function_decl;
707 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
711 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
712 gimplify_ctxp->temps = tmp;
714 /* Mark temporaries local within the nearest enclosing parallel. */
715 if (gimplify_omp_ctxp)
717 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
719 && (ctx->region_type == ORT_WORKSHARE
720 || ctx->region_type == ORT_SIMD
721 || ctx->region_type == ORT_ACC))
722 ctx = ctx->outer_context;
724 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
733 /* This case is for nested functions. We need to expose the locals
735 body_seq = gimple_body (current_function_decl);
736 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
742 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
743 nodes that are referenced more than once in GENERIC functions. This is
744 necessary because gimplification (translation into GIMPLE) is performed
745 by modifying tree nodes in-place, so gimplication of a shared node in a
746 first context could generate an invalid GIMPLE form in a second context.
748 This is achieved with a simple mark/copy/unmark algorithm that walks the
749 GENERIC representation top-down, marks nodes with TREE_VISITED the first
750 time it encounters them, duplicates them if they already have TREE_VISITED
751 set, and finally removes the TREE_VISITED marks it has set.
753 The algorithm works only at the function level, i.e. it generates a GENERIC
754 representation of a function with no nodes shared within the function when
755 passed a GENERIC function (except for nodes that are allowed to be shared).
757 At the global level, it is also necessary to unshare tree nodes that are
758 referenced in more than one function, for the same aforementioned reason.
759 This requires some cooperation from the front-end. There are 2 strategies:
761 1. Manual unsharing. The front-end needs to call unshare_expr on every
762 expression that might end up being shared across functions.
764 2. Deep unsharing. This is an extension of regular unsharing. Instead
765 of calling unshare_expr on expressions that might be shared across
766 functions, the front-end pre-marks them with TREE_VISITED. This will
767 ensure that they are unshared on the first reference within functions
768 when the regular unsharing algorithm runs. The counterpart is that
769 this algorithm must look deeper than for manual unsharing, which is
770 specified by LANG_HOOKS_DEEP_UNSHARING.
772 If there are only few specific cases of node sharing across functions, it is
773 probably easier for a front-end to unshare the expressions manually. On the
774 contrary, if the expressions generated at the global level are as widespread
775 as expressions generated within functions, deep unsharing is very likely the
778 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
779 These nodes model computations that must be done once. If we were to
780 unshare something like SAVE_EXPR(i++), the gimplification process would
781 create wrong code. However, if DATA is non-null, it must hold a pointer
782 set that is used to unshare the subtrees of these nodes. */
785 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
788 enum tree_code code = TREE_CODE (t);
790 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
791 copy their subtrees if we can make sure to do it only once. */
792 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
794 if (data && !((hash_set<tree> *)data)->add (t))
800 /* Stop at types, decls, constants like copy_tree_r. */
801 else if (TREE_CODE_CLASS (code) == tcc_type
802 || TREE_CODE_CLASS (code) == tcc_declaration
803 || TREE_CODE_CLASS (code) == tcc_constant
804 /* We can't do anything sensible with a BLOCK used as an
805 expression, but we also can't just die when we see it
806 because of non-expression uses. So we avert our eyes
807 and cross our fingers. Silly Java. */
811 /* Cope with the statement expression extension. */
812 else if (code == STATEMENT_LIST)
815 /* Leave the bulk of the work to copy_tree_r itself. */
817 copy_tree_r (tp, walk_subtrees, NULL);
822 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
823 If *TP has been visited already, then *TP is deeply copied by calling
824 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
827 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
830 enum tree_code code = TREE_CODE (t);
832 /* Skip types, decls, and constants. But we do want to look at their
833 types and the bounds of types. Mark them as visited so we properly
834 unmark their subtrees on the unmark pass. If we've already seen them,
835 don't look down further. */
836 if (TREE_CODE_CLASS (code) == tcc_type
837 || TREE_CODE_CLASS (code) == tcc_declaration
838 || TREE_CODE_CLASS (code) == tcc_constant)
840 if (TREE_VISITED (t))
843 TREE_VISITED (t) = 1;
846 /* If this node has been visited already, unshare it and don't look
848 else if (TREE_VISITED (t))
850 walk_tree (tp, mostly_copy_tree_r, data, NULL);
854 /* Otherwise, mark the node as visited and keep looking. */
856 TREE_VISITED (t) = 1;
861 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
862 copy_if_shared_r callback unmodified. */
865 copy_if_shared (tree *tp, void *data)
867 walk_tree (tp, copy_if_shared_r, data, NULL);
870 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
871 any nested functions. */
874 unshare_body (tree fndecl)
876 struct cgraph_node *cgn = cgraph_node::get (fndecl);
877 /* If the language requires deep unsharing, we need a pointer set to make
878 sure we don't repeatedly unshare subtrees of unshareable nodes. */
879 hash_set<tree> *visited
880 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
882 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
883 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
884 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
889 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
890 unshare_body (cgn->decl);
893 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
894 Subtrees are walked until the first unvisited node is encountered. */
897 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
901 /* If this node has been visited, unmark it and keep looking. */
902 if (TREE_VISITED (t))
903 TREE_VISITED (t) = 0;
905 /* Otherwise, don't look any deeper. */
912 /* Unmark the visited trees rooted at *TP. */
915 unmark_visited (tree *tp)
917 walk_tree (tp, unmark_visited_r, NULL, NULL);
920 /* Likewise, but mark all trees as not visited. */
923 unvisit_body (tree fndecl)
925 struct cgraph_node *cgn = cgraph_node::get (fndecl);
927 unmark_visited (&DECL_SAVED_TREE (fndecl));
928 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
929 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
932 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
933 unvisit_body (cgn->decl);
936 /* Unconditionally make an unshared copy of EXPR. This is used when using
937 stored expressions which span multiple functions, such as BINFO_VTABLE,
938 as the normal unsharing process can't tell that they're shared. */
941 unshare_expr (tree expr)
943 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
947 /* Worker for unshare_expr_without_location. */
950 prune_expr_location (tree *tp, int *walk_subtrees, void *)
953 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
959 /* Similar to unshare_expr but also prune all expression locations
963 unshare_expr_without_location (tree expr)
965 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
967 walk_tree (&expr, prune_expr_location, NULL, NULL);
971 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
972 contain statements and have a value. Assign its value to a temporary
973 and give it void_type_node. Return the temporary, or NULL_TREE if
974 WRAPPER was already void. */
977 voidify_wrapper_expr (tree wrapper, tree temp)
979 tree type = TREE_TYPE (wrapper);
980 if (type && !VOID_TYPE_P (type))
984 /* Set p to point to the body of the wrapper. Loop until we find
985 something that isn't a wrapper. */
986 for (p = &wrapper; p && *p; )
988 switch (TREE_CODE (*p))
991 TREE_SIDE_EFFECTS (*p) = 1;
992 TREE_TYPE (*p) = void_type_node;
993 /* For a BIND_EXPR, the body is operand 1. */
994 p = &BIND_EXPR_BODY (*p);
997 case CLEANUP_POINT_EXPR:
998 case TRY_FINALLY_EXPR:
1000 TREE_SIDE_EFFECTS (*p) = 1;
1001 TREE_TYPE (*p) = void_type_node;
1002 p = &TREE_OPERAND (*p, 0);
1005 case STATEMENT_LIST:
1007 tree_stmt_iterator i = tsi_last (*p);
1008 if (TREE_CODE(*tsi_stmt_ptr (i)) == STATEMENT_LIST_END)
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1020 /* Advance to the last statement. Set all container types to
1022 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1029 case TRANSACTION_EXPR:
1030 TREE_SIDE_EFFECTS (*p) = 1;
1031 TREE_TYPE (*p) = void_type_node;
1032 p = &TRANSACTION_EXPR_BODY (*p);
1036 /* Assume that any tree upon which voidify_wrapper_expr is
1037 directly called is a wrapper, and that its body is op0. */
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TREE_OPERAND (*p, 0);
1050 if (p == NULL || IS_EMPTY_STMT (*p))
1054 /* The wrapper is on the RHS of an assignment that we're pushing
1056 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1057 || TREE_CODE (temp) == MODIFY_EXPR);
1058 TREE_OPERAND (temp, 1) = *p;
1063 temp = create_tmp_var (type, "retval");
1064 *p = build2 (INIT_EXPR, type, temp, *p);
1073 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1074 a temporary through which they communicate. */
1077 build_stack_save_restore (gcall **save, gcall **restore)
1081 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1082 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1083 gimple_call_set_lhs (*save, tmp_var);
1086 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1090 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1093 build_asan_poison_call_expr (tree decl)
1095 /* Do not poison variables that have size equal to zero. */
1096 tree unit_size = DECL_SIZE_UNIT (decl);
1097 if (zerop (unit_size))
1100 tree base = build_fold_addr_expr (decl);
1102 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1104 build_int_cst (integer_type_node,
1109 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1110 on POISON flag, shadow memory of a DECL variable. The call will be
1111 put on location identified by IT iterator, where BEFORE flag drives
1112 position where the stmt will be put. */
1115 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1118 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1119 if (gimplify_omp_ctxp)
1122 tree unit_size = DECL_SIZE_UNIT (decl);
1123 tree base = build_fold_addr_expr (decl);
1125 /* Do not poison variables that have size equal to zero. */
1126 if (zerop (unit_size))
1129 /* It's necessary to have all stack variables aligned to ASAN granularity
1131 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1132 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1134 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1137 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1138 build_int_cst (integer_type_node, flags),
1142 gsi_insert_before (it, g, GSI_NEW_STMT);
1144 gsi_insert_after (it, g, GSI_NEW_STMT);
1147 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1148 either poisons or unpoisons a DECL. Created statement is appended
1149 to SEQ_P gimple sequence. */
1152 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1154 gimple_stmt_iterator it = gsi_last (*seq_p);
1155 bool before = false;
1160 asan_poison_variable (decl, poison, &it, before);
1163 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1166 sort_by_decl_uid (const void *a, const void *b)
1168 const tree *t1 = (const tree *)a;
1169 const tree *t2 = (const tree *)b;
1171 int uid1 = DECL_UID (*t1);
1172 int uid2 = DECL_UID (*t2);
1176 else if (uid1 > uid2)
1182 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1183 depending on POISON flag. Created statement is appended
1184 to SEQ_P gimple sequence. */
1187 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1189 unsigned c = variables->elements ();
1193 auto_vec<tree> sorted_variables (c);
1195 for (hash_set<tree>::iterator it = variables->begin ();
1196 it != variables->end (); ++it)
1197 sorted_variables.safe_push (*it);
1199 sorted_variables.qsort (sort_by_decl_uid);
1201 for (unsigned i = 0; i < sorted_variables.length (); i++)
1202 asan_poison_variable (sorted_variables[i], poison, seq_p);
1205 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1207 static enum gimplify_status
1208 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1210 tree bind_expr = *expr_p;
1211 bool old_keep_stack = gimplify_ctxp->keep_stack;
1212 bool old_save_stack = gimplify_ctxp->save_stack;
1215 gimple_seq body, cleanup;
1217 location_t start_locus = 0, end_locus = 0;
1218 tree ret_clauses = NULL;
1220 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1222 /* Mark variables seen in this bind expr. */
1223 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1225 if (TREE_CODE (t) == VAR_DECL)
1227 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1229 /* Mark variable as local. */
1230 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1231 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1232 || splay_tree_lookup (ctx->variables,
1233 (splay_tree_key) t) == NULL))
1235 if (ctx->region_type == ORT_SIMD
1236 && TREE_ADDRESSABLE (t)
1237 && !TREE_STATIC (t))
1238 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1240 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1243 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1245 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1246 cfun->has_local_explicit_reg_vars = true;
1249 /* Preliminarily mark non-addressed complex variables as eligible
1250 for promotion to gimple registers. We'll transform their uses
1252 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1253 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1254 && !TREE_THIS_VOLATILE (t)
1255 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1256 && !needs_to_live_in_memory (t))
1257 DECL_GIMPLE_REG_P (t) = 1;
1260 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1261 BIND_EXPR_BLOCK (bind_expr));
1262 gimple_push_bind_expr (bind_stmt);
1264 gimplify_ctxp->keep_stack = false;
1265 gimplify_ctxp->save_stack = false;
1267 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1269 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1270 gimple_bind_set_body (bind_stmt, body);
1272 /* Source location wise, the cleanup code (stack_restore and clobbers)
1273 belongs to the end of the block, so propagate what we have. The
1274 stack_save operation belongs to the beginning of block, which we can
1275 infer from the bind_expr directly if the block has no explicit
1277 if (BIND_EXPR_BLOCK (bind_expr))
1279 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1280 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1282 if (start_locus == 0)
1283 start_locus = EXPR_LOCATION (bind_expr);
1288 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1289 the stack space allocated to the VLAs. */
1290 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1292 gcall *stack_restore;
1294 /* Save stack on entry and restore it on exit. Add a try_finally
1295 block to achieve this. */
1296 build_stack_save_restore (&stack_save, &stack_restore);
1298 gimple_set_location (stack_save, start_locus);
1299 gimple_set_location (stack_restore, end_locus);
1301 gimplify_seq_add_stmt (&cleanup, stack_restore);
1304 /* Add clobbers for all variables that go out of scope. */
1305 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1307 if (TREE_CODE (t) == VAR_DECL
1308 && !is_global_var (t)
1309 && DECL_CONTEXT (t) == current_function_decl
1310 && !DECL_HARD_REGISTER (t)
1311 && !TREE_THIS_VOLATILE (t)
1312 && !DECL_HAS_VALUE_EXPR_P (t)
1313 /* Only care for variables that have to be in memory. Others
1314 will be rewritten into SSA names, hence moved to the top-level. */
1315 && !is_gimple_reg (t)
1316 && flag_stack_reuse != SR_NONE)
1318 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1319 gimple *clobber_stmt;
1320 TREE_THIS_VOLATILE (clobber) = 1;
1321 clobber_stmt = gimple_build_assign (t, clobber);
1322 gimple_set_location (clobber_stmt, end_locus);
1323 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1325 if (flag_openacc && oacc_declare_returns != NULL)
1327 tree *c = oacc_declare_returns->get (t);
1331 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1335 oacc_declare_returns->remove (t);
1337 if (oacc_declare_returns->elements () == 0)
1339 delete oacc_declare_returns;
1340 oacc_declare_returns = NULL;
1346 if (asan_poisoned_variables != NULL
1347 && asan_poisoned_variables->contains (t))
1349 asan_poisoned_variables->remove (t);
1350 asan_poison_variable (t, true, &cleanup);
1353 if (gimplify_ctxp->live_switch_vars != NULL
1354 && gimplify_ctxp->live_switch_vars->contains (t))
1355 gimplify_ctxp->live_switch_vars->remove (t);
1361 gimple_stmt_iterator si = gsi_start (cleanup);
1363 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1365 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1371 gimple_seq new_body;
1374 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1375 GIMPLE_TRY_FINALLY);
1378 gimplify_seq_add_stmt (&new_body, stack_save);
1379 gimplify_seq_add_stmt (&new_body, gs);
1380 gimple_bind_set_body (bind_stmt, new_body);
1383 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1384 if (!gimplify_ctxp->keep_stack)
1385 gimplify_ctxp->keep_stack = old_keep_stack;
1386 gimplify_ctxp->save_stack = old_save_stack;
1388 gimple_pop_bind_expr ();
1390 gimplify_seq_add_stmt (pre_p, bind_stmt);
1398 *expr_p = NULL_TREE;
1402 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1403 GIMPLE value, it is assigned to a new temporary and the statement is
1404 re-written to return the temporary.
1406 PRE_P points to the sequence where side effects that must happen before
1407 STMT should be stored. */
1409 static enum gimplify_status
1410 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1413 tree ret_expr = TREE_OPERAND (stmt, 0);
1414 tree result_decl, result;
1416 if (ret_expr == error_mark_node)
1419 /* Implicit _Cilk_sync must be inserted right before any return statement
1420 if there is a _Cilk_spawn in the function. If the user has provided a
1421 _Cilk_sync, the optimizer should remove this duplicate one. */
1422 if (fn_contains_cilk_spawn_p (cfun))
1424 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1425 gimplify_and_add (impl_sync, pre_p);
1429 || TREE_CODE (ret_expr) == RESULT_DECL
1430 || ret_expr == error_mark_node)
1432 greturn *ret = gimple_build_return (ret_expr);
1433 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1434 gimplify_seq_add_stmt (pre_p, ret);
1438 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1439 result_decl = NULL_TREE;
1442 result_decl = TREE_OPERAND (ret_expr, 0);
1444 /* See through a return by reference. */
1445 if (TREE_CODE (result_decl) == INDIRECT_REF)
1446 result_decl = TREE_OPERAND (result_decl, 0);
1448 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1449 || TREE_CODE (ret_expr) == INIT_EXPR)
1450 && TREE_CODE (result_decl) == RESULT_DECL);
1453 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1454 Recall that aggregate_value_p is FALSE for any aggregate type that is
1455 returned in registers. If we're returning values in registers, then
1456 we don't want to extend the lifetime of the RESULT_DECL, particularly
1457 across another call. In addition, for those aggregates for which
1458 hard_function_value generates a PARALLEL, we'll die during normal
1459 expansion of structure assignments; there's special code in expand_return
1460 to handle this case that does not exist in expand_expr. */
1463 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1465 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1467 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1468 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1469 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1470 should be effectively allocated by the caller, i.e. all calls to
1471 this function must be subject to the Return Slot Optimization. */
1472 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1473 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1475 result = result_decl;
1477 else if (gimplify_ctxp->return_temp)
1478 result = gimplify_ctxp->return_temp;
1481 result = create_tmp_reg (TREE_TYPE (result_decl));
1483 /* ??? With complex control flow (usually involving abnormal edges),
1484 we can wind up warning about an uninitialized value for this. Due
1485 to how this variable is constructed and initialized, this is never
1486 true. Give up and never warn. */
1487 TREE_NO_WARNING (result) = 1;
1489 gimplify_ctxp->return_temp = result;
1492 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1493 Then gimplify the whole thing. */
1494 if (result != result_decl)
1495 TREE_OPERAND (ret_expr, 0) = result;
1497 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1499 ret = gimple_build_return (result);
1500 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1501 gimplify_seq_add_stmt (pre_p, ret);
1506 /* Gimplify a variable-length array DECL. */
1509 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1511 /* This is a variable-sized decl. Simplify its size and mark it
1512 for deferred expansion. */
1513 tree t, addr, ptr_type;
1515 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1518 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1519 if (DECL_HAS_VALUE_EXPR_P (decl))
1522 /* All occurrences of this decl in final gimplified code will be
1523 replaced by indirection. Setting DECL_VALUE_EXPR does two
1524 things: First, it lets the rest of the gimplifier know what
1525 replacement to use. Second, it lets the debug info know
1526 where to find the value. */
1527 ptr_type = build_pointer_type (TREE_TYPE (decl));
1528 addr = create_tmp_var (ptr_type, get_name (decl));
1529 DECL_IGNORED_P (addr) = 0;
1530 t = build_fold_indirect_ref (addr);
1531 TREE_THIS_NOTRAP (t) = 1;
1532 SET_DECL_VALUE_EXPR (decl, t);
1533 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1535 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1536 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1537 size_int (DECL_ALIGN (decl)));
1538 /* The call has been built for a variable-sized object. */
1539 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1540 t = fold_convert (ptr_type, t);
1541 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1543 gimplify_and_add (t, seq_p);
1546 /* A helper function to be called via walk_tree. Mark all labels under *TP
1547 as being forced. To be called for DECL_INITIAL of static variables. */
1550 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1554 if (TREE_CODE (*tp) == LABEL_DECL)
1556 FORCED_LABEL (*tp) = 1;
1557 cfun->has_forced_label_in_static = 1;
1563 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1564 and initialization explicit. */
1566 static enum gimplify_status
1567 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1569 tree stmt = *stmt_p;
1570 tree decl = DECL_EXPR_DECL (stmt);
1572 *stmt_p = NULL_TREE;
1574 if (TREE_TYPE (decl) == error_mark_node)
1577 if ((TREE_CODE (decl) == TYPE_DECL
1578 || TREE_CODE (decl) == VAR_DECL)
1579 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1581 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1582 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1583 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1586 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1587 in case its size expressions contain problematic nodes like CALL_EXPR. */
1588 if (TREE_CODE (decl) == TYPE_DECL
1589 && DECL_ORIGINAL_TYPE (decl)
1590 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1592 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1593 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1594 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1597 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1599 tree init = DECL_INITIAL (decl);
1600 bool is_vla = false;
1602 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1603 || (!TREE_STATIC (decl)
1604 && flag_stack_check == GENERIC_STACK_CHECK
1605 && compare_tree_int (DECL_SIZE_UNIT (decl),
1606 STACK_CHECK_MAX_VAR_SIZE) > 0))
1608 gimplify_vla_decl (decl, seq_p);
1612 if (asan_sanitize_use_after_scope ()
1613 && !asan_no_sanitize_address_p ()
1615 && TREE_ADDRESSABLE (decl)
1616 && !TREE_STATIC (decl)
1617 && !DECL_HAS_VALUE_EXPR_P (decl)
1618 && dbg_cnt (asan_use_after_scope))
1620 asan_poisoned_variables->add (decl);
1621 asan_poison_variable (decl, false, seq_p);
1622 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1623 gimplify_ctxp->live_switch_vars->add (decl);
1626 /* Some front ends do not explicitly declare all anonymous
1627 artificial variables. We compensate here by declaring the
1628 variables, though it would be better if the front ends would
1629 explicitly declare them. */
1630 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1631 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1632 gimple_add_tmp_var (decl);
1634 if (init && init != error_mark_node)
1636 if (!TREE_STATIC (decl))
1638 DECL_INITIAL (decl) = NULL_TREE;
1639 init = build2 (INIT_EXPR, void_type_node, decl, init);
1640 gimplify_and_add (init, seq_p);
1644 /* We must still examine initializers for static variables
1645 as they may contain a label address. */
1646 walk_tree (&init, force_labels_r, NULL, NULL);
1653 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1654 and replacing the LOOP_EXPR with goto, but if the loop contains an
1655 EXIT_EXPR, we need to append a label for it to jump to. */
1657 static enum gimplify_status
1658 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1660 tree saved_label = gimplify_ctxp->exit_label;
1661 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1663 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1665 gimplify_ctxp->exit_label = NULL_TREE;
1667 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1669 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1671 if (gimplify_ctxp->exit_label)
1672 gimplify_seq_add_stmt (pre_p,
1673 gimple_build_label (gimplify_ctxp->exit_label));
1675 gimplify_ctxp->exit_label = saved_label;
1681 /* Gimplify a statement list onto a sequence. These may be created either
1682 by an enlightened front-end, or by shortcut_cond_expr. */
1684 static enum gimplify_status
1685 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1687 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1689 tree_stmt_iterator i = tsi_start (*expr_p);
1691 while (!tsi_end_p (i))
1693 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1706 /* Callback for walk_gimple_seq. */
1709 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1710 struct walk_stmt_info *wi)
1712 gimple *stmt = gsi_stmt (*gsi_p);
1714 *handled_ops_p = true;
1715 switch (gimple_code (stmt))
1718 /* A compiler-generated cleanup or a user-written try block.
1719 If it's empty, don't dive into it--that would result in
1720 worse location info. */
1721 if (gimple_try_eval (stmt) == NULL)
1724 return integer_zero_node;
1729 case GIMPLE_EH_FILTER:
1730 case GIMPLE_TRANSACTION:
1731 /* Walk the sub-statements. */
1732 *handled_ops_p = false;
1735 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1737 *handled_ops_p = false;
1742 /* Save the first "real" statement (not a decl/lexical scope/...). */
1744 return integer_zero_node;
1749 /* Possibly warn about unreachable statements between switch's controlling
1750 expression and the first case. SEQ is the body of a switch expression. */
1753 maybe_warn_switch_unreachable (gimple_seq seq)
1755 if (!warn_switch_unreachable
1756 /* This warning doesn't play well with Fortran when optimizations
1758 || lang_GNU_Fortran ()
1762 struct walk_stmt_info wi;
1763 memset (&wi, 0, sizeof (wi));
1764 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1765 gimple *stmt = (gimple *) wi.info;
1767 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1769 if (gimple_code (stmt) == GIMPLE_GOTO
1770 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1771 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1772 /* Don't warn for compiler-generated gotos. These occur
1773 in Duff's devices, for example. */;
1775 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1776 "statement will never be executed");
1781 /* A label entry that pairs label and a location. */
1788 /* Find LABEL in vector of label entries VEC. */
1790 static struct label_entry *
1791 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1794 struct label_entry *l;
1796 FOR_EACH_VEC_ELT (*vec, i, l)
1797 if (l->label == label)
1802 /* Return true if LABEL, a LABEL_DECL, represents a case label
1803 in a vector of labels CASES. */
1806 case_label_p (const vec<tree> *cases, tree label)
1811 FOR_EACH_VEC_ELT (*cases, i, l)
1812 if (CASE_LABEL (l) == label)
1817 /* Find the last statement in a scope STMT. */
1820 last_stmt_in_scope (gimple *stmt)
1825 switch (gimple_code (stmt))
1829 gbind *bind = as_a <gbind *> (stmt);
1830 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1831 return last_stmt_in_scope (stmt);
1836 gtry *try_stmt = as_a <gtry *> (stmt);
1837 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1838 gimple *last_eval = last_stmt_in_scope (stmt);
1839 if (gimple_stmt_may_fallthru (last_eval)
1840 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1842 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1843 return last_stmt_in_scope (stmt);
1854 /* Collect interesting labels in LABELS and return the statement preceding
1855 another case label, or a user-defined label. */
1858 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1859 auto_vec <struct label_entry> *labels)
1861 gimple *prev = NULL;
1865 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1866 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1868 /* Nested scope. Only look at the last statement of
1869 the innermost scope. */
1870 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1871 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1875 /* It might be a label without a location. Use the
1876 location of the scope then. */
1877 if (!gimple_has_location (prev))
1878 gimple_set_location (prev, bind_loc);
1884 /* Ifs are tricky. */
1885 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1887 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1888 tree false_lab = gimple_cond_false_label (cond_stmt);
1889 location_t if_loc = gimple_location (cond_stmt);
1892 if (i > 1) goto <D.2259>; else goto D;
1893 we can't do much with the else-branch. */
1894 if (!DECL_ARTIFICIAL (false_lab))
1897 /* Go on until the false label, then one step back. */
1898 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1900 gimple *stmt = gsi_stmt (*gsi_p);
1901 if (gimple_code (stmt) == GIMPLE_LABEL
1902 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1906 /* Not found? Oops. */
1907 if (gsi_end_p (*gsi_p))
1910 struct label_entry l = { false_lab, if_loc };
1911 labels->safe_push (l);
1913 /* Go to the last statement of the then branch. */
1916 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1922 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1923 && !gimple_has_location (gsi_stmt (*gsi_p)))
1925 /* Look at the statement before, it might be
1926 attribute fallthrough, in which case don't warn. */
1928 bool fallthru_before_dest
1929 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1931 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1932 if (!fallthru_before_dest)
1934 struct label_entry l = { goto_dest, if_loc };
1935 labels->safe_push (l);
1938 /* And move back. */
1942 /* Remember the last statement. Skip labels that are of no interest
1944 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1946 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1947 if (find_label_entry (labels, label))
1948 prev = gsi_stmt (*gsi_p);
1950 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1953 prev = gsi_stmt (*gsi_p);
1956 while (!gsi_end_p (*gsi_p)
1957 /* Stop if we find a case or a user-defined label. */
1958 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
1959 || !gimple_has_location (gsi_stmt (*gsi_p))));
1964 /* Return true if the switch fallthough warning should occur. LABEL is
1965 the label statement that we're falling through to. */
1968 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
1970 gimple_stmt_iterator gsi = *gsi_p;
1972 /* Don't warn for a non-case label followed by a statement:
1977 as these are likely intentional. */
1978 if (!case_label_p (&gimplify_ctxp->case_labels, label))
1981 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
1985 /* Don't warn for terminated branches, i.e. when the subsequent case labels
1986 immediately breaks. */
1989 /* Skip all immediately following labels. */
1990 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1993 /* { ... something; default:; } */
1995 /* { ... something; default: break; } or
1996 { ... something; default: goto L; } */
1997 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
1998 /* { ... something; default: return; } */
1999 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2005 /* Callback for walk_gimple_seq. */
2008 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2009 struct walk_stmt_info *)
2011 gimple *stmt = gsi_stmt (*gsi_p);
2013 *handled_ops_p = true;
2014 switch (gimple_code (stmt))
2019 case GIMPLE_EH_FILTER:
2020 case GIMPLE_TRANSACTION:
2021 /* Walk the sub-statements. */
2022 *handled_ops_p = false;
2025 /* Find a sequence of form:
2032 and possibly warn. */
2035 /* Found a label. Skip all immediately following labels. */
2036 while (!gsi_end_p (*gsi_p)
2037 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2040 /* There might be no more statements. */
2041 if (gsi_end_p (*gsi_p))
2042 return integer_zero_node;
2044 /* Vector of labels that fall through. */
2045 auto_vec <struct label_entry> labels;
2046 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2048 /* There might be no more statements. */
2049 if (gsi_end_p (*gsi_p))
2050 return integer_zero_node;
2052 gimple *next = gsi_stmt (*gsi_p);
2054 /* If what follows is a label, then we may have a fallthrough. */
2055 if (gimple_code (next) == GIMPLE_LABEL
2056 && gimple_has_location (next)
2057 && (label = gimple_label_label (as_a <glabel *> (next)))
2058 && !FALLTHROUGH_LABEL_P (label)
2061 struct label_entry *l;
2062 bool warned_p = false;
2063 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2065 else if (gimple_code (prev) == GIMPLE_LABEL
2066 && (label = gimple_label_label (as_a <glabel *> (prev)))
2067 && (l = find_label_entry (&labels, label)))
2068 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_disabled,
2069 "this statement may fall through");
2070 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2071 /* Try to be clever and don't warn when the statement
2072 can't actually fall through. */
2073 && gimple_stmt_may_fallthru (prev)
2074 && gimple_has_location (prev))
2075 warned_p = warning_at (gimple_location (prev),
2076 OPT_Wimplicit_fallthrough_disabled,
2077 "this statement may fall through");
2079 inform (gimple_location (next), "here");
2081 /* Mark this label as processed so as to prevent multiple
2082 warnings in nested switches. */
2083 FALLTHROUGH_LABEL_P (label) = true;
2085 /* So that next warn_implicit_fallthrough_r will start looking for
2086 a new sequence starting with this label. */
2097 /* Warn when a switch case falls through. */
2100 maybe_warn_implicit_fallthrough (gimple_seq seq)
2102 if (!warn_implicit_fallthrough)
2105 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2108 || lang_GNU_OBJC ()))
2111 struct walk_stmt_info wi;
2112 memset (&wi, 0, sizeof (wi));
2113 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2116 /* Callback for walk_gimple_seq. */
2119 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2120 struct walk_stmt_info *)
2122 gimple *stmt = gsi_stmt (*gsi_p);
2124 *handled_ops_p = true;
2125 switch (gimple_code (stmt))
2130 case GIMPLE_EH_FILTER:
2131 case GIMPLE_TRANSACTION:
2132 /* Walk the sub-statements. */
2133 *handled_ops_p = false;
2136 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2138 gsi_remove (gsi_p, true);
2139 if (gsi_end_p (*gsi_p))
2140 return integer_zero_node;
2143 location_t loc = gimple_location (stmt);
2145 gimple_stmt_iterator gsi2 = *gsi_p;
2146 stmt = gsi_stmt (gsi2);
2147 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2149 /* Go on until the artificial label. */
2150 tree goto_dest = gimple_goto_dest (stmt);
2151 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2153 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2154 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2159 /* Not found? Stop. */
2160 if (gsi_end_p (gsi2))
2163 /* Look one past it. */
2167 /* We're looking for a case label or default label here. */
2168 while (!gsi_end_p (gsi2))
2170 stmt = gsi_stmt (gsi2);
2171 if (gimple_code (stmt) == GIMPLE_LABEL)
2173 tree label = gimple_label_label (as_a <glabel *> (stmt));
2174 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2181 /* Something other than a label. That's not expected. */
2186 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2187 "a case label or default label");
2196 /* Expand all FALLTHROUGH () calls in SEQ. */
2199 expand_FALLTHROUGH (gimple_seq *seq_p)
2201 struct walk_stmt_info wi;
2202 memset (&wi, 0, sizeof (wi));
2203 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2207 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2210 static enum gimplify_status
2211 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2213 tree switch_expr = *expr_p;
2214 gimple_seq switch_body_seq = NULL;
2215 enum gimplify_status ret;
2216 tree index_type = TREE_TYPE (switch_expr);
2217 if (index_type == NULL_TREE)
2218 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2220 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2222 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2225 if (SWITCH_BODY (switch_expr))
2228 vec<tree> saved_labels;
2229 hash_set<tree> *saved_live_switch_vars = NULL;
2230 tree default_case = NULL_TREE;
2231 gswitch *switch_stmt;
2233 /* If someone can be bothered to fill in the labels, they can
2234 be bothered to null out the body too. */
2235 gcc_assert (!SWITCH_LABELS (switch_expr));
2237 /* Save old labels, get new ones from body, then restore the old
2238 labels. Save all the things from the switch body to append after. */
2239 saved_labels = gimplify_ctxp->case_labels;
2240 gimplify_ctxp->case_labels.create (8);
2242 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2243 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2244 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2245 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2246 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2248 gimplify_ctxp->live_switch_vars = NULL;
2250 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2251 gimplify_ctxp->in_switch_expr = true;
2253 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2255 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2256 maybe_warn_switch_unreachable (switch_body_seq);
2257 maybe_warn_implicit_fallthrough (switch_body_seq);
2258 /* Only do this for the outermost GIMPLE_SWITCH. */
2259 if (!gimplify_ctxp->in_switch_expr)
2260 expand_FALLTHROUGH (&switch_body_seq);
2262 labels = gimplify_ctxp->case_labels;
2263 gimplify_ctxp->case_labels = saved_labels;
2265 if (gimplify_ctxp->live_switch_vars)
2267 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2268 delete gimplify_ctxp->live_switch_vars;
2270 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2272 preprocess_case_label_vec_for_gimple (labels, index_type,
2277 glabel *new_default;
2280 = build_case_label (NULL_TREE, NULL_TREE,
2281 create_artificial_label (UNKNOWN_LOCATION));
2282 new_default = gimple_build_label (CASE_LABEL (default_case));
2283 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2286 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2287 default_case, labels);
2288 gimplify_seq_add_stmt (pre_p, switch_stmt);
2289 gimplify_seq_add_seq (pre_p, switch_body_seq);
2293 gcc_assert (SWITCH_LABELS (switch_expr));
2298 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2300 static enum gimplify_status
2301 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2303 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2304 == current_function_decl);
2306 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p));
2307 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2308 gimplify_seq_add_stmt (pre_p, label_stmt);
2313 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2315 static enum gimplify_status
2316 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2318 struct gimplify_ctx *ctxp;
2321 /* Invalid programs can play Duff's Device type games with, for example,
2322 #pragma omp parallel. At least in the C front end, we don't
2323 detect such invalid branches until after gimplification, in the
2324 diagnose_omp_blocks pass. */
2325 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2326 if (ctxp->case_labels.exists ())
2329 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2330 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2331 ctxp->case_labels.safe_push (*expr_p);
2332 gimplify_seq_add_stmt (pre_p, label_stmt);
2337 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2341 build_and_jump (tree *label_p)
2343 if (label_p == NULL)
2344 /* If there's nowhere to jump, just fall through. */
2347 if (*label_p == NULL_TREE)
2349 tree label = create_artificial_label (UNKNOWN_LOCATION);
2353 return build1 (GOTO_EXPR, void_type_node, *label_p);
2356 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2357 This also involves building a label to jump to and communicating it to
2358 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2360 static enum gimplify_status
2361 gimplify_exit_expr (tree *expr_p)
2363 tree cond = TREE_OPERAND (*expr_p, 0);
2366 expr = build_and_jump (&gimplify_ctxp->exit_label);
2367 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2373 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2374 different from its canonical type, wrap the whole thing inside a
2375 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2378 The canonical type of a COMPONENT_REF is the type of the field being
2379 referenced--unless the field is a bit-field which can be read directly
2380 in a smaller mode, in which case the canonical type is the
2381 sign-appropriate type corresponding to that mode. */
2384 canonicalize_component_ref (tree *expr_p)
2386 tree expr = *expr_p;
2389 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2391 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2392 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2394 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2396 /* One could argue that all the stuff below is not necessary for
2397 the non-bitfield case and declare it a FE error if type
2398 adjustment would be needed. */
2399 if (TREE_TYPE (expr) != type)
2401 #ifdef ENABLE_TYPES_CHECKING
2402 tree old_type = TREE_TYPE (expr);
2406 /* We need to preserve qualifiers and propagate them from
2408 type_quals = TYPE_QUALS (type)
2409 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2410 if (TYPE_QUALS (type) != type_quals)
2411 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2413 /* Set the type of the COMPONENT_REF to the underlying type. */
2414 TREE_TYPE (expr) = type;
2416 #ifdef ENABLE_TYPES_CHECKING
2417 /* It is now a FE error, if the conversion from the canonical
2418 type to the original expression type is not useless. */
2419 gcc_assert (useless_type_conversion_p (old_type, type));
2424 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2425 to foo, embed that change in the ADDR_EXPR by converting
2430 where L is the lower bound. For simplicity, only do this for constant
2432 The constraint is that the type of &array[L] is trivially convertible
2436 canonicalize_addr_expr (tree *expr_p)
2438 tree expr = *expr_p;
2439 tree addr_expr = TREE_OPERAND (expr, 0);
2440 tree datype, ddatype, pddatype;
2442 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2443 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2444 || TREE_CODE (addr_expr) != ADDR_EXPR)
2447 /* The addr_expr type should be a pointer to an array. */
2448 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2449 if (TREE_CODE (datype) != ARRAY_TYPE)
2452 /* The pointer to element type shall be trivially convertible to
2453 the expression pointer type. */
2454 ddatype = TREE_TYPE (datype);
2455 pddatype = build_pointer_type (ddatype);
2456 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2460 /* The lower bound and element sizes must be constant. */
2461 if (!TYPE_SIZE_UNIT (ddatype)
2462 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2463 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2464 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2467 /* All checks succeeded. Build a new node to merge the cast. */
2468 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2469 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2470 NULL_TREE, NULL_TREE);
2471 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2473 /* We can have stripped a required restrict qualifier above. */
2474 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2475 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2478 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2479 underneath as appropriate. */
2481 static enum gimplify_status
2482 gimplify_conversion (tree *expr_p)
2484 location_t loc = EXPR_LOCATION (*expr_p);
2485 gcc_assert (CONVERT_EXPR_P (*expr_p));
2487 /* Then strip away all but the outermost conversion. */
2488 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2490 /* And remove the outermost conversion if it's useless. */
2491 if (tree_ssa_useless_type_conversion (*expr_p))
2492 *expr_p = TREE_OPERAND (*expr_p, 0);
2494 /* If we still have a conversion at the toplevel,
2495 then canonicalize some constructs. */
2496 if (CONVERT_EXPR_P (*expr_p))
2498 tree sub = TREE_OPERAND (*expr_p, 0);
2500 /* If a NOP conversion is changing the type of a COMPONENT_REF
2501 expression, then canonicalize its type now in order to expose more
2502 redundant conversions. */
2503 if (TREE_CODE (sub) == COMPONENT_REF)
2504 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2506 /* If a NOP conversion is changing a pointer to array of foo
2507 to a pointer to foo, embed that change in the ADDR_EXPR. */
2508 else if (TREE_CODE (sub) == ADDR_EXPR)
2509 canonicalize_addr_expr (expr_p);
2512 /* If we have a conversion to a non-register type force the
2513 use of a VIEW_CONVERT_EXPR instead. */
2514 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2515 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2516 TREE_OPERAND (*expr_p, 0));
2518 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2519 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2520 TREE_SET_CODE (*expr_p, NOP_EXPR);
2525 /* Nonlocal VLAs seen in the current function. */
2526 static hash_set<tree> *nonlocal_vlas;
2528 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2529 static tree nonlocal_vla_vars;
2531 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2532 DECL_VALUE_EXPR, and it's worth re-examining things. */
2534 static enum gimplify_status
2535 gimplify_var_or_parm_decl (tree *expr_p)
2537 tree decl = *expr_p;
2539 /* ??? If this is a local variable, and it has not been seen in any
2540 outer BIND_EXPR, then it's probably the result of a duplicate
2541 declaration, for which we've already issued an error. It would
2542 be really nice if the front end wouldn't leak these at all.
2543 Currently the only known culprit is C++ destructors, as seen
2544 in g++.old-deja/g++.jason/binding.C. */
2545 if (TREE_CODE (decl) == VAR_DECL
2546 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2547 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2548 && decl_function_context (decl) == current_function_decl)
2550 gcc_assert (seen_error ());
2554 /* When within an OMP context, notice uses of variables. */
2555 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2558 /* If the decl is an alias for another expression, substitute it now. */
2559 if (DECL_HAS_VALUE_EXPR_P (decl))
2561 tree value_expr = DECL_VALUE_EXPR (decl);
2563 /* For referenced nonlocal VLAs add a decl for debugging purposes
2564 to the current function. */
2565 if (TREE_CODE (decl) == VAR_DECL
2566 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2567 && nonlocal_vlas != NULL
2568 && TREE_CODE (value_expr) == INDIRECT_REF
2569 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2570 && decl_function_context (decl) != current_function_decl)
2572 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2574 && (ctx->region_type == ORT_WORKSHARE
2575 || ctx->region_type == ORT_SIMD
2576 || ctx->region_type == ORT_ACC))
2577 ctx = ctx->outer_context;
2578 if (!ctx && !nonlocal_vlas->add (decl))
2580 tree copy = copy_node (decl);
2582 lang_hooks.dup_lang_specific_decl (copy);
2583 SET_DECL_RTL (copy, 0);
2584 TREE_USED (copy) = 1;
2585 DECL_CHAIN (copy) = nonlocal_vla_vars;
2586 nonlocal_vla_vars = copy;
2587 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2588 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2592 *expr_p = unshare_expr (value_expr);
2599 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2602 recalculate_side_effects (tree t)
2604 enum tree_code code = TREE_CODE (t);
2605 int len = TREE_OPERAND_LENGTH (t);
2608 switch (TREE_CODE_CLASS (code))
2610 case tcc_expression:
2616 case PREDECREMENT_EXPR:
2617 case PREINCREMENT_EXPR:
2618 case POSTDECREMENT_EXPR:
2619 case POSTINCREMENT_EXPR:
2620 /* All of these have side-effects, no matter what their
2629 case tcc_comparison: /* a comparison expression */
2630 case tcc_unary: /* a unary arithmetic expression */
2631 case tcc_binary: /* a binary arithmetic expression */
2632 case tcc_reference: /* a reference */
2633 case tcc_vl_exp: /* a function call */
2634 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2635 for (i = 0; i < len; ++i)
2637 tree op = TREE_OPERAND (t, i);
2638 if (op && TREE_SIDE_EFFECTS (op))
2639 TREE_SIDE_EFFECTS (t) = 1;
2644 /* No side-effects. */
2652 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2656 : min_lval '[' val ']'
2658 | compound_lval '[' val ']'
2659 | compound_lval '.' ID
2661 This is not part of the original SIMPLE definition, which separates
2662 array and member references, but it seems reasonable to handle them
2663 together. Also, this way we don't run into problems with union
2664 aliasing; gcc requires that for accesses through a union to alias, the
2665 union reference must be explicit, which was not always the case when we
2666 were splitting up array and member refs.
2668 PRE_P points to the sequence where side effects that must happen before
2669 *EXPR_P should be stored.
2671 POST_P points to the sequence where side effects that must happen after
2672 *EXPR_P should be stored. */
2674 static enum gimplify_status
2675 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2676 fallback_t fallback)
2679 enum gimplify_status ret = GS_ALL_DONE, tret;
2681 location_t loc = EXPR_LOCATION (*expr_p);
2682 tree expr = *expr_p;
2684 /* Create a stack of the subexpressions so later we can walk them in
2685 order from inner to outer. */
2686 auto_vec<tree, 10> expr_stack;
2688 /* We can handle anything that get_inner_reference can deal with. */
2689 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2692 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2693 if (TREE_CODE (*p) == INDIRECT_REF)
2694 *p = fold_indirect_ref_loc (loc, *p);
2696 if (handled_component_p (*p))
2698 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2699 additional COMPONENT_REFs. */
2700 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2701 && gimplify_var_or_parm_decl (p) == GS_OK)
2706 expr_stack.safe_push (*p);
2709 gcc_assert (expr_stack.length ());
2711 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2712 walked through and P points to the innermost expression.
2714 Java requires that we elaborated nodes in source order. That
2715 means we must gimplify the inner expression followed by each of
2716 the indices, in order. But we can't gimplify the inner
2717 expression until we deal with any variable bounds, sizes, or
2718 positions in order to deal with PLACEHOLDER_EXPRs.
2720 So we do this in three steps. First we deal with the annotations
2721 for any variables in the components, then we gimplify the base,
2722 then we gimplify any indices, from left to right. */
2723 for (i = expr_stack.length () - 1; i >= 0; i--)
2725 tree t = expr_stack[i];
2727 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2729 /* Gimplify the low bound and element type size and put them into
2730 the ARRAY_REF. If these values are set, they have already been
2732 if (TREE_OPERAND (t, 2) == NULL_TREE)
2734 tree low = unshare_expr (array_ref_low_bound (t));
2735 if (!is_gimple_min_invariant (low))
2737 TREE_OPERAND (t, 2) = low;
2738 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2739 post_p, is_gimple_reg,
2741 ret = MIN (ret, tret);
2746 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2747 is_gimple_reg, fb_rvalue);
2748 ret = MIN (ret, tret);
2751 if (TREE_OPERAND (t, 3) == NULL_TREE)
2753 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2754 tree elmt_size = unshare_expr (array_ref_element_size (t));
2755 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2757 /* Divide the element size by the alignment of the element
2760 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2762 if (!is_gimple_min_invariant (elmt_size))
2764 TREE_OPERAND (t, 3) = elmt_size;
2765 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2766 post_p, is_gimple_reg,
2768 ret = MIN (ret, tret);
2773 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2774 is_gimple_reg, fb_rvalue);
2775 ret = MIN (ret, tret);
2778 else if (TREE_CODE (t) == COMPONENT_REF)
2780 /* Set the field offset into T and gimplify it. */
2781 if (TREE_OPERAND (t, 2) == NULL_TREE)
2783 tree offset = unshare_expr (component_ref_field_offset (t));
2784 tree field = TREE_OPERAND (t, 1);
2786 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2788 /* Divide the offset by its alignment. */
2789 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2791 if (!is_gimple_min_invariant (offset))
2793 TREE_OPERAND (t, 2) = offset;
2794 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2795 post_p, is_gimple_reg,
2797 ret = MIN (ret, tret);
2802 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2803 is_gimple_reg, fb_rvalue);
2804 ret = MIN (ret, tret);
2809 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2810 so as to match the min_lval predicate. Failure to do so may result
2811 in the creation of large aggregate temporaries. */
2812 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2813 fallback | fb_lvalue);
2814 ret = MIN (ret, tret);
2816 /* And finally, the indices and operands of ARRAY_REF. During this
2817 loop we also remove any useless conversions. */
2818 for (; expr_stack.length () > 0; )
2820 tree t = expr_stack.pop ();
2822 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2824 /* Gimplify the dimension. */
2825 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2827 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2828 is_gimple_val, fb_rvalue);
2829 ret = MIN (ret, tret);
2833 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2835 /* The innermost expression P may have originally had
2836 TREE_SIDE_EFFECTS set which would have caused all the outer
2837 expressions in *EXPR_P leading to P to also have had
2838 TREE_SIDE_EFFECTS set. */
2839 recalculate_side_effects (t);
2842 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2843 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2845 canonicalize_component_ref (expr_p);
2848 expr_stack.release ();
2850 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2855 /* Gimplify the self modifying expression pointed to by EXPR_P
2858 PRE_P points to the list where side effects that must happen before
2859 *EXPR_P should be stored.
2861 POST_P points to the list where side effects that must happen after
2862 *EXPR_P should be stored.
2864 WANT_VALUE is nonzero iff we want to use the value of this expression
2865 in another expression.
2867 ARITH_TYPE is the type the computation should be performed in. */
2869 enum gimplify_status
2870 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2871 bool want_value, tree arith_type)
2873 enum tree_code code;
2874 tree lhs, lvalue, rhs, t1;
2875 gimple_seq post = NULL, *orig_post_p = post_p;
2877 enum tree_code arith_code;
2878 enum gimplify_status ret;
2879 location_t loc = EXPR_LOCATION (*expr_p);
2881 code = TREE_CODE (*expr_p);
2883 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2884 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2886 /* Prefix or postfix? */
2887 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2888 /* Faster to treat as prefix if result is not used. */
2889 postfix = want_value;
2893 /* For postfix, make sure the inner expression's post side effects
2894 are executed after side effects from this expression. */
2898 /* Add or subtract? */
2899 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2900 arith_code = PLUS_EXPR;
2902 arith_code = MINUS_EXPR;
2904 /* Gimplify the LHS into a GIMPLE lvalue. */
2905 lvalue = TREE_OPERAND (*expr_p, 0);
2906 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2907 if (ret == GS_ERROR)
2910 /* Extract the operands to the arithmetic operation. */
2912 rhs = TREE_OPERAND (*expr_p, 1);
2914 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2915 that as the result value and in the postqueue operation. */
2918 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2919 if (ret == GS_ERROR)
2922 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2925 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2926 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2928 rhs = convert_to_ptrofftype_loc (loc, rhs);
2929 if (arith_code == MINUS_EXPR)
2930 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2931 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2934 t1 = fold_convert (TREE_TYPE (*expr_p),
2935 fold_build2 (arith_code, arith_type,
2936 fold_convert (arith_type, lhs),
2937 fold_convert (arith_type, rhs)));
2941 gimplify_assign (lvalue, t1, pre_p);
2942 gimplify_seq_add_seq (orig_post_p, post);
2948 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2953 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2956 maybe_with_size_expr (tree *expr_p)
2958 tree expr = *expr_p;
2959 tree type = TREE_TYPE (expr);
2962 /* If we've already wrapped this or the type is error_mark_node, we can't do
2964 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2965 || type == error_mark_node)
2968 /* If the size isn't known or is a constant, we have nothing to do. */
2969 size = TYPE_SIZE_UNIT (type);
2970 if (!size || TREE_CODE (size) == INTEGER_CST)
2973 /* Otherwise, make a WITH_SIZE_EXPR. */
2974 size = unshare_expr (size);
2975 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2976 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2979 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2980 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2983 enum gimplify_status
2984 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2986 bool (*test) (tree);
2989 /* In general, we allow lvalues for function arguments to avoid
2990 extra overhead of copying large aggregates out of even larger
2991 aggregates into temporaries only to copy the temporaries to
2992 the argument list. Make optimizers happy by pulling out to
2993 temporaries those types that fit in registers. */
2994 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2995 test = is_gimple_val, fb = fb_rvalue;
2998 test = is_gimple_lvalue, fb = fb_either;
2999 /* Also strip a TARGET_EXPR that would force an extra copy. */
3000 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3002 tree init = TARGET_EXPR_INITIAL (*arg_p);
3004 && !VOID_TYPE_P (TREE_TYPE (init)))
3009 /* If this is a variable sized type, we must remember the size. */
3010 maybe_with_size_expr (arg_p);
3012 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3013 /* Make sure arguments have the same location as the function call
3015 protected_set_expr_location (*arg_p, call_location);
3017 /* There is a sequence point before a function call. Side effects in
3018 the argument list must occur before the actual call. So, when
3019 gimplifying arguments, force gimplify_expr to use an internal
3020 post queue which is then appended to the end of PRE_P. */
3021 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
3024 /* Don't fold inside offloading or taskreg regions: it can break code by
3025 adding decl references that weren't in the source. We'll do it during
3026 omplower pass instead. */
3029 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3031 struct gimplify_omp_ctx *ctx;
3032 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3033 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3035 return fold_stmt (gsi);
3038 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3039 WANT_VALUE is true if the result of the call is desired. */
3041 static enum gimplify_status
3042 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3044 tree fndecl, parms, p, fnptrtype;
3045 enum gimplify_status ret;
3048 bool builtin_va_start_p = false;
3049 location_t loc = EXPR_LOCATION (*expr_p);
3051 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3053 /* For reliable diagnostics during inlining, it is necessary that
3054 every call_expr be annotated with file and line. */
3055 if (! EXPR_HAS_LOCATION (*expr_p))
3056 SET_EXPR_LOCATION (*expr_p, input_location);
3058 /* Gimplify internal functions created in the FEs. */
3059 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3064 nargs = call_expr_nargs (*expr_p);
3065 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3066 auto_vec<tree> vargs (nargs);
3068 /* Ugh, sometimes FE can optimize conditional thus it doesn't
3069 depend on ISAN_*_CHECK return value. Don't fold the check in this
3070 case, but we'll probably want introduce temporary here. */
3071 if (ifn == IFN_ISAN_CHECK_ADD || ifn == IFN_ISAN_CHECK_SUB
3072 || ifn == IFN_ISAN_CHECK_MUL)
3075 for (i = 0; i < nargs; i++)
3077 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3078 EXPR_LOCATION (*expr_p));
3079 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3081 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3082 gimplify_seq_add_stmt (pre_p, call);
3086 /* This may be a call to a builtin function.
3088 Builtin function calls may be transformed into different
3089 (and more efficient) builtin function calls under certain
3090 circumstances. Unfortunately, gimplification can muck things
3091 up enough that the builtin expanders are not aware that certain
3092 transformations are still valid.
3094 So we attempt transformation/gimplification of the call before
3095 we gimplify the CALL_EXPR. At this time we do not manage to
3096 transform all calls in the same manner as the expanders do, but
3097 we do transform most of them. */
3098 fndecl = get_callee_fndecl (*expr_p);
3100 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3101 switch (DECL_FUNCTION_CODE (fndecl))
3103 case BUILT_IN_ALLOCA:
3104 case BUILT_IN_ALLOCA_WITH_ALIGN:
3105 /* If the call has been built for a variable-sized object, then we
3106 want to restore the stack level when the enclosing BIND_EXPR is
3107 exited to reclaim the allocated space; otherwise, we precisely
3108 need to do the opposite and preserve the latest stack level. */
3109 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3110 gimplify_ctxp->save_stack = true;
3112 gimplify_ctxp->keep_stack = true;
3115 case BUILT_IN_VA_START:
3117 builtin_va_start_p = TRUE;
3118 if (call_expr_nargs (*expr_p) < 2)
3120 error ("too few arguments to function %<va_start%>");
3121 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3125 if (fold_builtin_next_arg (*expr_p, true))
3127 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3134 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
3135 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
3140 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
3141 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
3144 case BUILT_IN_FUNCTION:
3146 const char *function;
3147 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
3148 *expr_p = build_string_literal (strlen (function) + 1, function);
3154 if (fndecl && DECL_BUILT_IN (fndecl))
3156 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3157 if (new_tree && new_tree != *expr_p)
3159 /* There was a transformation of this call which computes the
3160 same value, but in a more efficient way. Return and try
3167 /* Remember the original function pointer type. */
3168 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3170 /* There is a sequence point before the call, so any side effects in
3171 the calling expression must occur before the actual call. Force
3172 gimplify_expr to use an internal post queue. */
3173 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3174 is_gimple_call_addr, fb_rvalue);
3176 nargs = call_expr_nargs (*expr_p);
3178 /* Get argument types for verification. */
3179 fndecl = get_callee_fndecl (*expr_p);
3182 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3184 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3186 if (fndecl && DECL_ARGUMENTS (fndecl))
3187 p = DECL_ARGUMENTS (fndecl);
3192 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3195 /* If the last argument is __builtin_va_arg_pack () and it is not
3196 passed as a named argument, decrease the number of CALL_EXPR
3197 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3200 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3202 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3203 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3206 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3207 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3208 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3210 tree call = *expr_p;
3213 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3214 CALL_EXPR_FN (call),
3215 nargs, CALL_EXPR_ARGP (call));
3217 /* Copy all CALL_EXPR flags, location and block, except
3218 CALL_EXPR_VA_ARG_PACK flag. */
3219 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3220 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3221 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3222 = CALL_EXPR_RETURN_SLOT_OPT (call);
3223 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3224 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3226 /* Set CALL_EXPR_VA_ARG_PACK. */
3227 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3231 /* Gimplify the function arguments. */
3234 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3235 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3236 PUSH_ARGS_REVERSED ? i-- : i++)
3238 enum gimplify_status t;
3240 /* Avoid gimplifying the second argument to va_start, which needs to
3241 be the plain PARM_DECL. */
3242 if ((i != 1) || !builtin_va_start_p)
3244 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3245 EXPR_LOCATION (*expr_p));
3253 /* Gimplify the static chain. */
3254 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3256 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3257 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3260 enum gimplify_status t;
3261 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3262 EXPR_LOCATION (*expr_p));
3268 /* Verify the function result. */
3269 if (want_value && fndecl
3270 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3272 error_at (loc, "using result of function returning %<void%>");
3276 /* Try this again in case gimplification exposed something. */
3277 if (ret != GS_ERROR)
3279 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3281 if (new_tree && new_tree != *expr_p)
3283 /* There was a transformation of this call which computes the
3284 same value, but in a more efficient way. Return and try
3292 *expr_p = error_mark_node;
3296 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3297 decl. This allows us to eliminate redundant or useless
3298 calls to "const" functions. */
3299 if (TREE_CODE (*expr_p) == CALL_EXPR)
3301 int flags = call_expr_flags (*expr_p);
3302 if (flags & (ECF_CONST | ECF_PURE)
3303 /* An infinite loop is considered a side effect. */
3304 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3305 TREE_SIDE_EFFECTS (*expr_p) = 0;
3308 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3309 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3310 form and delegate the creation of a GIMPLE_CALL to
3311 gimplify_modify_expr. This is always possible because when
3312 WANT_VALUE is true, the caller wants the result of this call into
3313 a temporary, which means that we will emit an INIT_EXPR in
3314 internal_get_tmp_var which will then be handled by
3315 gimplify_modify_expr. */
3318 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3319 have to do is replicate it as a GIMPLE_CALL tuple. */
3320 gimple_stmt_iterator gsi;
3321 call = gimple_build_call_from_tree (*expr_p);
3322 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3323 notice_special_calls (call);
3324 gimplify_seq_add_stmt (pre_p, call);
3325 gsi = gsi_last (*pre_p);
3326 maybe_fold_stmt (&gsi);
3327 *expr_p = NULL_TREE;
3330 /* Remember the original function type. */
3331 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3332 CALL_EXPR_FN (*expr_p));
3337 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3338 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3340 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3341 condition is true or false, respectively. If null, we should generate
3342 our own to skip over the evaluation of this specific expression.
3344 LOCUS is the source location of the COND_EXPR.
3346 This function is the tree equivalent of do_jump.
3348 shortcut_cond_r should only be called by shortcut_cond_expr. */
3351 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3354 tree local_label = NULL_TREE;
3355 tree t, expr = NULL;
3357 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3358 retain the shortcut semantics. Just insert the gotos here;
3359 shortcut_cond_expr will append the real blocks later. */
3360 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3362 location_t new_locus;
3364 /* Turn if (a && b) into
3366 if (a); else goto no;
3367 if (b) goto yes; else goto no;
3370 if (false_label_p == NULL)
3371 false_label_p = &local_label;
3373 /* Keep the original source location on the first 'if'. */
3374 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3375 append_to_statement_list (t, &expr);
3377 /* Set the source location of the && on the second 'if'. */
3378 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3379 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3381 append_to_statement_list (t, &expr);
3383 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3385 location_t new_locus;
3387 /* Turn if (a || b) into
3390 if (b) goto yes; else goto no;
3393 if (true_label_p == NULL)
3394 true_label_p = &local_label;
3396 /* Keep the original source location on the first 'if'. */
3397 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3398 append_to_statement_list (t, &expr);
3400 /* Set the source location of the || on the second 'if'. */
3401 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3402 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3404 append_to_statement_list (t, &expr);
3406 else if (TREE_CODE (pred) == COND_EXPR
3407 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3408 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3410 location_t new_locus;
3412 /* As long as we're messing with gotos, turn if (a ? b : c) into
3414 if (b) goto yes; else goto no;
3416 if (c) goto yes; else goto no;
3418 Don't do this if one of the arms has void type, which can happen
3419 in C++ when the arm is throw. */
3421 /* Keep the original source location on the first 'if'. Set the source
3422 location of the ? on the second 'if'. */
3423 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3424 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3425 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3426 false_label_p, locus),
3427 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3428 false_label_p, new_locus));
3432 expr = build3 (COND_EXPR, void_type_node, pred,
3433 build_and_jump (true_label_p),
3434 build_and_jump (false_label_p));
3435 SET_EXPR_LOCATION (expr, locus);
3440 t = build1 (LABEL_EXPR, void_type_node, local_label);
3441 append_to_statement_list (t, &expr);
3447 /* Given a conditional expression EXPR with short-circuit boolean
3448 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3449 predicate apart into the equivalent sequence of conditionals. */
3452 shortcut_cond_expr (tree expr)
3454 tree pred = TREE_OPERAND (expr, 0);
3455 tree then_ = TREE_OPERAND (expr, 1);
3456 tree else_ = TREE_OPERAND (expr, 2);
3457 tree true_label, false_label, end_label, t;
3459 tree *false_label_p;
3460 bool emit_end, emit_false, jump_over_else;
3461 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3462 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3464 /* First do simple transformations. */
3467 /* If there is no 'else', turn
3470 if (a) if (b) then c. */
3471 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3473 /* Keep the original source location on the first 'if'. */
3474 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3475 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3476 /* Set the source location of the && on the second 'if'. */
3477 if (EXPR_HAS_LOCATION (pred))
3478 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3479 then_ = shortcut_cond_expr (expr);
3480 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3481 pred = TREE_OPERAND (pred, 0);
3482 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3483 SET_EXPR_LOCATION (expr, locus);
3489 /* If there is no 'then', turn
3492 if (a); else if (b); else d. */
3493 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3495 /* Keep the original source location on the first 'if'. */
3496 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3497 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3498 /* Set the source location of the || on the second 'if'. */
3499 if (EXPR_HAS_LOCATION (pred))
3500 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3501 else_ = shortcut_cond_expr (expr);
3502 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3503 pred = TREE_OPERAND (pred, 0);
3504 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3505 SET_EXPR_LOCATION (expr, locus);
3509 /* If we're done, great. */
3510 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3511 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3514 /* Otherwise we need to mess with gotos. Change
3517 if (a); else goto no;
3520 and recursively gimplify the condition. */
3522 true_label = false_label = end_label = NULL_TREE;
3524 /* If our arms just jump somewhere, hijack those labels so we don't
3525 generate jumps to jumps. */
3528 && TREE_CODE (then_) == GOTO_EXPR
3529 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3531 true_label = GOTO_DESTINATION (then_);
3537 && TREE_CODE (else_) == GOTO_EXPR
3538 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3540 false_label = GOTO_DESTINATION (else_);
3545 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3547 true_label_p = &true_label;
3549 true_label_p = NULL;
3551 /* The 'else' branch also needs a label if it contains interesting code. */
3552 if (false_label || else_se)
3553 false_label_p = &false_label;
3555 false_label_p = NULL;
3557 /* If there was nothing else in our arms, just forward the label(s). */
3558 if (!then_se && !else_se)
3559 return shortcut_cond_r (pred, true_label_p, false_label_p,
3560 EXPR_LOC_OR_LOC (expr, input_location));
3562 /* If our last subexpression already has a terminal label, reuse it. */
3564 t = expr_last (else_);
3566 t = expr_last (then_);
3569 if (t && TREE_CODE (t) == LABEL_EXPR)
3570 end_label = LABEL_EXPR_LABEL (t);
3572 /* If we don't care about jumping to the 'else' branch, jump to the end
3573 if the condition is false. */
3575 false_label_p = &end_label;
3577 /* We only want to emit these labels if we aren't hijacking them. */
3578 emit_end = (end_label == NULL_TREE);
3579 emit_false = (false_label == NULL_TREE);
3581 /* We only emit the jump over the else clause if we have to--if the
3582 then clause may fall through. Otherwise we can wind up with a
3583 useless jump and a useless label at the end of gimplified code,
3584 which will cause us to think that this conditional as a whole
3585 falls through even if it doesn't. If we then inline a function
3586 which ends with such a condition, that can cause us to issue an
3587 inappropriate warning about control reaching the end of a
3588 non-void function. */
3589 jump_over_else = block_may_fallthru (then_);
3591 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3592 EXPR_LOC_OR_LOC (expr, input_location));
3595 append_to_statement_list (pred, &expr);
3597 append_to_statement_list (then_, &expr);
3602 tree last = expr_last (expr);
3603 t = build_and_jump (&end_label);
3604 if (EXPR_HAS_LOCATION (last))
3605 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3606 append_to_statement_list (t, &expr);
3610 t = build1 (LABEL_EXPR, void_type_node, false_label);
3611 append_to_statement_list (t, &expr);
3613 append_to_statement_list (else_, &expr);
3615 if (emit_end && end_label)
3617 t = build1 (LABEL_EXPR, void_type_node, end_label);
3618 append_to_statement_list (t, &expr);
3624 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3627 gimple_boolify (tree expr)
3629 tree type = TREE_TYPE (expr);
3630 location_t loc = EXPR_LOCATION (expr);
3632 if (TREE_CODE (expr) == NE_EXPR
3633 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3634 && integer_zerop (TREE_OPERAND (expr, 1)))
3636 tree call = TREE_OPERAND (expr, 0);
3637 tree fn = get_callee_fndecl (call);
3639 /* For __builtin_expect ((long) (x), y) recurse into x as well
3640 if x is truth_value_p. */
3642 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3643 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3644 && call_expr_nargs (call) == 2)
3646 tree arg = CALL_EXPR_ARG (call, 0);
3649 if (TREE_CODE (arg) == NOP_EXPR
3650 && TREE_TYPE (arg) == TREE_TYPE (call))
3651 arg = TREE_OPERAND (arg, 0);
3652 if (truth_value_p (TREE_CODE (arg)))
3654 arg = gimple_boolify (arg);
3655 CALL_EXPR_ARG (call, 0)
3656 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3662 switch (TREE_CODE (expr))
3664 case TRUTH_AND_EXPR:
3666 case TRUTH_XOR_EXPR:
3667 case TRUTH_ANDIF_EXPR:
3668 case TRUTH_ORIF_EXPR:
3669 /* Also boolify the arguments of truth exprs. */
3670 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3673 case TRUTH_NOT_EXPR:
3674 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3676 /* These expressions always produce boolean results. */
3677 if (TREE_CODE (type) != BOOLEAN_TYPE)
3678 TREE_TYPE (expr) = boolean_type_node;
3682 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3684 case annot_expr_ivdep_kind:
3685 case annot_expr_no_vector_kind:
3686 case annot_expr_vector_kind:
3687 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3688 if (TREE_CODE (type) != BOOLEAN_TYPE)
3689 TREE_TYPE (expr) = boolean_type_node;
3696 if (COMPARISON_CLASS_P (expr))
3698 /* There expressions always prduce boolean results. */
3699 if (TREE_CODE (type) != BOOLEAN_TYPE)
3700 TREE_TYPE (expr) = boolean_type_node;
3703 /* Other expressions that get here must have boolean values, but
3704 might need to be converted to the appropriate mode. */
3705 if (TREE_CODE (type) == BOOLEAN_TYPE)
3707 return fold_convert_loc (loc, boolean_type_node, expr);
3711 /* Given a conditional expression *EXPR_P without side effects, gimplify
3712 its operands. New statements are inserted to PRE_P. */
3714 static enum gimplify_status
3715 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3717 tree expr = *expr_p, cond;
3718 enum gimplify_status ret, tret;
3719 enum tree_code code;
3721 cond = gimple_boolify (COND_EXPR_COND (expr));
3723 /* We need to handle && and || specially, as their gimplification
3724 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3725 code = TREE_CODE (cond);
3726 if (code == TRUTH_ANDIF_EXPR)
3727 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3728 else if (code == TRUTH_ORIF_EXPR)
3729 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3730 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3731 COND_EXPR_COND (*expr_p) = cond;
3733 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3734 is_gimple_val, fb_rvalue);
3735 ret = MIN (ret, tret);
3736 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3737 is_gimple_val, fb_rvalue);
3739 return MIN (ret, tret);
3742 /* Return true if evaluating EXPR could trap.
3743 EXPR is GENERIC, while tree_could_trap_p can be called
3747 generic_expr_could_trap_p (tree expr)
3751 if (!expr || is_gimple_val (expr))
3754 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3757 n = TREE_OPERAND_LENGTH (expr);
3758 for (i = 0; i < n; i++)
3759 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3765 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3774 The second form is used when *EXPR_P is of type void.
3776 PRE_P points to the list where side effects that must happen before
3777 *EXPR_P should be stored. */
3779 static enum gimplify_status
3780 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3782 tree expr = *expr_p;
3783 tree type = TREE_TYPE (expr);
3784 location_t loc = EXPR_LOCATION (expr);
3785 tree tmp, arm1, arm2;
3786 enum gimplify_status ret;
3787 tree label_true, label_false, label_cont;
3788 bool have_then_clause_p, have_else_clause_p;
3790 enum tree_code pred_code;
3791 gimple_seq seq = NULL;
3793 /* If this COND_EXPR has a value, copy the values into a temporary within
3795 if (!VOID_TYPE_P (type))
3797 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3800 /* If either an rvalue is ok or we do not require an lvalue, create the
3801 temporary. But we cannot do that if the type is addressable. */
3802 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3803 && !TREE_ADDRESSABLE (type))
3805 if (gimplify_ctxp->allow_rhs_cond_expr
3806 /* If either branch has side effects or could trap, it can't be
3807 evaluated unconditionally. */
3808 && !TREE_SIDE_EFFECTS (then_)
3809 && !generic_expr_could_trap_p (then_)
3810 && !TREE_SIDE_EFFECTS (else_)
3811 && !generic_expr_could_trap_p (else_))
3812 return gimplify_pure_cond_expr (expr_p, pre_p);
3814 tmp = create_tmp_var (type, "iftmp");
3818 /* Otherwise, only create and copy references to the values. */
3821 type = build_pointer_type (type);
3823 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3824 then_ = build_fold_addr_expr_loc (loc, then_);
3826 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3827 else_ = build_fold_addr_expr_loc (loc, else_);
3830 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3832 tmp = create_tmp_var (type, "iftmp");
3833 result = build_simple_mem_ref_loc (loc, tmp);
3836 /* Build the new then clause, `tmp = then_;'. But don't build the
3837 assignment if the value is void; in C++ it can be if it's a throw. */
3838 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3839 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3841 /* Similarly, build the new else clause, `tmp = else_;'. */
3842 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3843 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3845 TREE_TYPE (expr) = void_type_node;
3846 recalculate_side_effects (expr);
3848 /* Move the COND_EXPR to the prequeue. */
3849 gimplify_stmt (&expr, pre_p);
3855 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3856 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3857 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3858 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3860 /* Make sure the condition has BOOLEAN_TYPE. */
3861 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3863 /* Break apart && and || conditions. */
3864 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3865 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3867 expr = shortcut_cond_expr (expr);
3869 if (expr != *expr_p)
3873 /* We can't rely on gimplify_expr to re-gimplify the expanded
3874 form properly, as cleanups might cause the target labels to be
3875 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3876 set up a conditional context. */
3877 gimple_push_condition ();
3878 gimplify_stmt (expr_p, &seq);
3879 gimple_pop_condition (pre_p);
3880 gimple_seq_add_seq (pre_p, seq);
3886 /* Now do the normal gimplification. */
3888 /* Gimplify condition. */
3889 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3891 if (ret == GS_ERROR)
3893 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3895 gimple_push_condition ();
3897 have_then_clause_p = have_else_clause_p = false;
3898 if (TREE_OPERAND (expr, 1) != NULL
3899 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3900 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3901 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3902 == current_function_decl)
3903 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3904 have different locations, otherwise we end up with incorrect
3905 location information on the branches. */
3907 || !EXPR_HAS_LOCATION (expr)
3908 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3909 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3911 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3912 have_then_clause_p = true;
3915 label_true = create_artificial_label (UNKNOWN_LOCATION);
3916 if (TREE_OPERAND (expr, 2) != NULL
3917 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3918 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3919 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3920 == current_function_decl)
3921 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3922 have different locations, otherwise we end up with incorrect
3923 location information on the branches. */
3925 || !EXPR_HAS_LOCATION (expr)
3926 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3927 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3929 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3930 have_else_clause_p = true;
3933 label_false = create_artificial_label (UNKNOWN_LOCATION);
3935 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3937 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3939 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3940 gimplify_seq_add_stmt (&seq, cond_stmt);
3941 gimple_stmt_iterator gsi = gsi_last (seq);
3942 maybe_fold_stmt (&gsi);
3944 label_cont = NULL_TREE;
3945 if (!have_then_clause_p)
3947 /* For if (...) {} else { code; } put label_true after
3949 if (TREE_OPERAND (expr, 1) == NULL_TREE
3950 && !have_else_clause_p
3951 && TREE_OPERAND (expr, 2) != NULL_TREE)
3952 label_cont = label_true;
3955 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3956 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3957 /* For if (...) { code; } else {} or
3958 if (...) { code; } else goto label; or
3959 if (...) { code; return; } else { ... }
3960 label_cont isn't needed. */
3961 if (!have_else_clause_p
3962 && TREE_OPERAND (expr, 2) != NULL_TREE
3963 && gimple_seq_may_fallthru (seq))
3966 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3968 g = gimple_build_goto (label_cont);
3970 /* GIMPLE_COND's are very low level; they have embedded
3971 gotos. This particular embedded goto should not be marked
3972 with the location of the original COND_EXPR, as it would
3973 correspond to the COND_EXPR's condition, not the ELSE or the
3974 THEN arms. To avoid marking it with the wrong location, flag
3975 it as "no location". */
3976 gimple_set_do_not_emit_location (g);
3978 gimplify_seq_add_stmt (&seq, g);
3982 if (!have_else_clause_p)
3984 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3985 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3988 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3990 gimple_pop_condition (pre_p);
3991 gimple_seq_add_seq (pre_p, seq);
3993 if (ret == GS_ERROR)
3995 else if (have_then_clause_p || have_else_clause_p)
3999 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4000 expr = TREE_OPERAND (expr, 0);
4001 gimplify_stmt (&expr, pre_p);
4008 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4009 to be marked addressable.
4011 We cannot rely on such an expression being directly markable if a temporary
4012 has been created by the gimplification. In this case, we create another
4013 temporary and initialize it with a copy, which will become a store after we
4014 mark it addressable. This can happen if the front-end passed us something
4015 that it could not mark addressable yet, like a Fortran pass-by-reference
4016 parameter (int) floatvar. */
4019 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4021 while (handled_component_p (*expr_p))
4022 expr_p = &TREE_OPERAND (*expr_p, 0);
4023 if (is_gimple_reg (*expr_p))
4025 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
4026 DECL_GIMPLE_REG_P (var) = 0;
4031 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4032 a call to __builtin_memcpy. */
4034 static enum gimplify_status
4035 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4038 tree t, to, to_ptr, from, from_ptr;
4040 location_t loc = EXPR_LOCATION (*expr_p);
4042 to = TREE_OPERAND (*expr_p, 0);
4043 from = TREE_OPERAND (*expr_p, 1);
4045 /* Mark the RHS addressable. Beware that it may not be possible to do so
4046 directly if a temporary has been created by the gimplification. */
4047 prepare_gimple_addressable (&from, seq_p);
4049 mark_addressable (from);
4050 from_ptr = build_fold_addr_expr_loc (loc, from);
4051 gimplify_arg (&from_ptr, seq_p, loc);
4053 mark_addressable (to);
4054 to_ptr = build_fold_addr_expr_loc (loc, to);
4055 gimplify_arg (&to_ptr, seq_p, loc);
4057 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4059 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4063 /* tmp = memcpy() */
4064 t = create_tmp_var (TREE_TYPE (to_ptr));
4065 gimple_call_set_lhs (gs, t);
4066 gimplify_seq_add_stmt (seq_p, gs);
4068 *expr_p = build_simple_mem_ref (t);
4072 gimplify_seq_add_stmt (seq_p, gs);
4077 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4078 a call to __builtin_memset. In this case we know that the RHS is
4079 a CONSTRUCTOR with an empty element list. */
4081 static enum gimplify_status
4082 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4085 tree t, from, to, to_ptr;
4087 location_t loc = EXPR_LOCATION (*expr_p);
4089 /* Assert our assumptions, to abort instead of producing wrong code
4090 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4091 not be immediately exposed. */
4092 from = TREE_OPERAND (*expr_p, 1);
4093 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4094 from = TREE_OPERAND (from, 0);
4096 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4097 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4100 to = TREE_OPERAND (*expr_p, 0);
4102 to_ptr = build_fold_addr_expr_loc (loc, to);
4103 gimplify_arg (&to_ptr, seq_p, loc);
4104 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4106 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4110 /* tmp = memset() */
4111 t = create_tmp_var (TREE_TYPE (to_ptr));
4112 gimple_call_set_lhs (gs, t);
4113 gimplify_seq_add_stmt (seq_p, gs);
4115 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4119 gimplify_seq_add_stmt (seq_p, gs);
4124 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4125 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4126 assignment. Return non-null if we detect a potential overlap. */
4128 struct gimplify_init_ctor_preeval_data
4130 /* The base decl of the lhs object. May be NULL, in which case we
4131 have to assume the lhs is indirect. */
4134 /* The alias set of the lhs object. */
4135 alias_set_type lhs_alias_set;
4139 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4141 struct gimplify_init_ctor_preeval_data *data
4142 = (struct gimplify_init_ctor_preeval_data *) xdata;
4145 /* If we find the base object, obviously we have overlap. */
4146 if (data->lhs_base_decl == t)
4149 /* If the constructor component is indirect, determine if we have a
4150 potential overlap with the lhs. The only bits of information we
4151 have to go on at this point are addressability and alias sets. */
4152 if ((INDIRECT_REF_P (t)
4153 || TREE_CODE (t) == MEM_REF)
4154 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4155 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4158 /* If the constructor component is a call, determine if it can hide a
4159 potential overlap with the lhs through an INDIRECT_REF like above.
4160 ??? Ugh - this is completely broken. In fact this whole analysis
4161 doesn't look conservative. */
4162 if (TREE_CODE (t) == CALL_EXPR)
4164 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4166 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4167 if (POINTER_TYPE_P (TREE_VALUE (type))
4168 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4169 && alias_sets_conflict_p (data->lhs_alias_set,
4171 (TREE_TYPE (TREE_VALUE (type)))))
4175 if (IS_TYPE_OR_DECL_P (t))
4180 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4181 force values that overlap with the lhs (as described by *DATA)
4182 into temporaries. */
4185 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4186 struct gimplify_init_ctor_preeval_data *data)
4188 enum gimplify_status one;
4190 /* If the value is constant, then there's nothing to pre-evaluate. */
4191 if (TREE_CONSTANT (*expr_p))
4193 /* Ensure it does not have side effects, it might contain a reference to
4194 the object we're initializing. */
4195 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4199 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4200 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4203 /* Recurse for nested constructors. */
4204 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4206 unsigned HOST_WIDE_INT ix;
4207 constructor_elt *ce;
4208 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4210 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4211 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4216 /* If this is a variable sized type, we must remember the size. */
4217 maybe_with_size_expr (expr_p);
4219 /* Gimplify the constructor element to something appropriate for the rhs
4220 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4221 the gimplifier will consider this a store to memory. Doing this
4222 gimplification now means that we won't have to deal with complicated
4223 language-specific trees, nor trees like SAVE_EXPR that can induce
4224 exponential search behavior. */
4225 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4226 if (one == GS_ERROR)
4232 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4233 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4234 always be true for all scalars, since is_gimple_mem_rhs insists on a
4235 temporary variable for them. */
4236 if (DECL_P (*expr_p))
4239 /* If this is of variable size, we have no choice but to assume it doesn't
4240 overlap since we can't make a temporary for it. */
4241 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4244 /* Otherwise, we must search for overlap ... */
4245 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4248 /* ... and if found, force the value into a temporary. */
4249 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4252 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4253 a RANGE_EXPR in a CONSTRUCTOR for an array.
4257 object[var] = value;
4264 We increment var _after_ the loop exit check because we might otherwise
4265 fail if upper == TYPE_MAX_VALUE (type for upper).
4267 Note that we never have to deal with SAVE_EXPRs here, because this has
4268 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4270 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4271 gimple_seq *, bool);
4274 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4275 tree value, tree array_elt_type,
4276 gimple_seq *pre_p, bool cleared)
4278 tree loop_entry_label, loop_exit_label, fall_thru_label;
4279 tree var, var_type, cref, tmp;
4281 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4282 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4283 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4285 /* Create and initialize the index variable. */
4286 var_type = TREE_TYPE (upper);
4287 var = create_tmp_var (var_type);
4288 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4290 /* Add the loop entry label. */
4291 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4293 /* Build the reference. */
4294 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4295 var, NULL_TREE, NULL_TREE);
4297 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4298 the store. Otherwise just assign value to the reference. */
4300 if (TREE_CODE (value) == CONSTRUCTOR)
4301 /* NB we might have to call ourself recursively through
4302 gimplify_init_ctor_eval if the value is a constructor. */
4303 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4306 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4308 /* We exit the loop when the index var is equal to the upper bound. */
4309 gimplify_seq_add_stmt (pre_p,
4310 gimple_build_cond (EQ_EXPR, var, upper,
4311 loop_exit_label, fall_thru_label));
4313 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4315 /* Otherwise, increment the index var... */
4316 tmp = build2 (PLUS_EXPR, var_type, var,
4317 fold_convert (var_type, integer_one_node));
4318 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4320 /* ...and jump back to the loop entry. */
4321 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4323 /* Add the loop exit label. */
4324 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4327 /* Return true if FDECL is accessing a field that is zero sized. */
4330 zero_sized_field_decl (const_tree fdecl)
4332 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4333 && integer_zerop (DECL_SIZE (fdecl)))
4338 /* Return true if TYPE is zero sized. */
4341 zero_sized_type (const_tree type)
4343 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4344 && integer_zerop (TYPE_SIZE (type)))
4349 /* A subroutine of gimplify_init_constructor. Generate individual
4350 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4351 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4352 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4356 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4357 gimple_seq *pre_p, bool cleared)
4359 tree array_elt_type = NULL;
4360 unsigned HOST_WIDE_INT ix;
4361 tree purpose, value;
4363 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4364 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4366 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4370 /* NULL values are created above for gimplification errors. */
4374 if (cleared && initializer_zerop (value))
4377 /* ??? Here's to hoping the front end fills in all of the indices,
4378 so we don't have to figure out what's missing ourselves. */
4379 gcc_assert (purpose);
4381 /* Skip zero-sized fields, unless value has side-effects. This can
4382 happen with calls to functions returning a zero-sized type, which
4383 we shouldn't discard. As a number of downstream passes don't
4384 expect sets of zero-sized fields, we rely on the gimplification of
4385 the MODIFY_EXPR we make below to drop the assignment statement. */
4386 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4389 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4391 if (TREE_CODE (purpose) == RANGE_EXPR)
4393 tree lower = TREE_OPERAND (purpose, 0);
4394 tree upper = TREE_OPERAND (purpose, 1);
4396 /* If the lower bound is equal to upper, just treat it as if
4397 upper was the index. */
4398 if (simple_cst_equal (lower, upper))
4402 gimplify_init_ctor_eval_range (object, lower, upper, value,
4403 array_elt_type, pre_p, cleared);
4410 /* Do not use bitsizetype for ARRAY_REF indices. */
4411 if (TYPE_DOMAIN (TREE_TYPE (object)))
4413 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4415 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4416 purpose, NULL_TREE, NULL_TREE);
4420 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4421 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4422 unshare_expr (object), purpose, NULL_TREE);
4425 if (TREE_CODE (value) == CONSTRUCTOR
4426 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4427 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4431 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4432 gimplify_and_add (init, pre_p);
4438 /* Return the appropriate RHS predicate for this LHS. */
4441 rhs_predicate_for (tree lhs)
4443 if (is_gimple_reg (lhs))
4444 return is_gimple_reg_rhs_or_call;
4446 return is_gimple_mem_rhs_or_call;
4449 /* Gimplify a C99 compound literal expression. This just means adding
4450 the DECL_EXPR before the current statement and using its anonymous
4453 static enum gimplify_status
4454 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4455 bool (*gimple_test_f) (tree),
4456 fallback_t fallback)
4458 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4459 tree decl = DECL_EXPR_DECL (decl_s);
4460 tree init = DECL_INITIAL (decl);
4461 /* Mark the decl as addressable if the compound literal
4462 expression is addressable now, otherwise it is marked too late
4463 after we gimplify the initialization expression. */
4464 if (TREE_ADDRESSABLE (*expr_p))
4465 TREE_ADDRESSABLE (decl) = 1;
4466 /* Otherwise, if we don't need an lvalue and have a literal directly
4467 substitute it. Check if it matches the gimple predicate, as
4468 otherwise we'd generate a new temporary, and we can as well just
4469 use the decl we already have. */
4470 else if (!TREE_ADDRESSABLE (decl)
4472 && (fallback & fb_lvalue) == 0
4473 && gimple_test_f (init))
4479 /* Preliminarily mark non-addressed complex variables as eligible
4480 for promotion to gimple registers. We'll transform their uses
4482 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4483 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4484 && !TREE_THIS_VOLATILE (decl)
4485 && !needs_to_live_in_memory (decl))
4486 DECL_GIMPLE_REG_P (decl) = 1;
4488 /* If the decl is not addressable, then it is being used in some
4489 expression or on the right hand side of a statement, and it can
4490 be put into a readonly data section. */
4491 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4492 TREE_READONLY (decl) = 1;
4494 /* This decl isn't mentioned in the enclosing block, so add it to the
4495 list of temps. FIXME it seems a bit of a kludge to say that
4496 anonymous artificial vars aren't pushed, but everything else is. */
4497 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4498 gimple_add_tmp_var (decl);
4500 gimplify_and_add (decl_s, pre_p);
4505 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4506 return a new CONSTRUCTOR if something changed. */
4509 optimize_compound_literals_in_ctor (tree orig_ctor)
4511 tree ctor = orig_ctor;
4512 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4513 unsigned int idx, num = vec_safe_length (elts);
4515 for (idx = 0; idx < num; idx++)
4517 tree value = (*elts)[idx].value;
4518 tree newval = value;
4519 if (TREE_CODE (value) == CONSTRUCTOR)
4520 newval = optimize_compound_literals_in_ctor (value);
4521 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4523 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4524 tree decl = DECL_EXPR_DECL (decl_s);
4525 tree init = DECL_INITIAL (decl);
4527 if (!TREE_ADDRESSABLE (value)
4528 && !TREE_ADDRESSABLE (decl)
4530 && TREE_CODE (init) == CONSTRUCTOR)
4531 newval = optimize_compound_literals_in_ctor (init);
4533 if (newval == value)
4536 if (ctor == orig_ctor)
4538 ctor = copy_node (orig_ctor);
4539 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4540 elts = CONSTRUCTOR_ELTS (ctor);
4542 (*elts)[idx].value = newval;
4547 /* A subroutine of gimplify_modify_expr. Break out elements of a
4548 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4550 Note that we still need to clear any elements that don't have explicit
4551 initializers, so if not all elements are initialized we keep the
4552 original MODIFY_EXPR, we just remove all of the constructor elements.
4554 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4555 GS_ERROR if we would have to create a temporary when gimplifying
4556 this constructor. Otherwise, return GS_OK.
4558 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4560 static enum gimplify_status
4561 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4562 bool want_value, bool notify_temp_creation)
4564 tree object, ctor, type;
4565 enum gimplify_status ret;
4566 vec<constructor_elt, va_gc> *elts;
4568 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4570 if (!notify_temp_creation)
4572 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4573 is_gimple_lvalue, fb_lvalue);
4574 if (ret == GS_ERROR)
4578 object = TREE_OPERAND (*expr_p, 0);
4579 ctor = TREE_OPERAND (*expr_p, 1) =
4580 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4581 type = TREE_TYPE (ctor);
4582 elts = CONSTRUCTOR_ELTS (ctor);
4585 switch (TREE_CODE (type))
4589 case QUAL_UNION_TYPE:
4592 struct gimplify_init_ctor_preeval_data preeval_data;
4593 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4594 bool cleared, complete_p, valid_const_initializer;
4596 /* Aggregate types must lower constructors to initialization of
4597 individual elements. The exception is that a CONSTRUCTOR node
4598 with no elements indicates zero-initialization of the whole. */
4599 if (vec_safe_is_empty (elts))
4601 if (notify_temp_creation)
4606 /* Fetch information about the constructor to direct later processing.
4607 We might want to make static versions of it in various cases, and
4608 can only do so if it known to be a valid constant initializer. */
4609 valid_const_initializer
4610 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4611 &num_ctor_elements, &complete_p);
4613 /* If a const aggregate variable is being initialized, then it
4614 should never be a lose to promote the variable to be static. */
4615 if (valid_const_initializer
4616 && num_nonzero_elements > 1
4617 && TREE_READONLY (object)
4618 && TREE_CODE (object) == VAR_DECL
4619 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4621 if (notify_temp_creation)
4623 DECL_INITIAL (object) = ctor;
4624 TREE_STATIC (object) = 1;
4625 if (!DECL_NAME (object))
4626 DECL_NAME (object) = create_tmp_var_name ("C");
4627 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4629 /* ??? C++ doesn't automatically append a .<number> to the
4630 assembler name, and even when it does, it looks at FE private
4631 data structures to figure out what that number should be,
4632 which are not set for this variable. I suppose this is
4633 important for local statics for inline functions, which aren't
4634 "local" in the object file sense. So in order to get a unique
4635 TU-local symbol, we must invoke the lhd version now. */
4636 lhd_set_decl_assembler_name (object);
4638 *expr_p = NULL_TREE;
4642 /* If there are "lots" of initialized elements, even discounting
4643 those that are not address constants (and thus *must* be
4644 computed at runtime), then partition the constructor into
4645 constant and non-constant parts. Block copy the constant
4646 parts in, then generate code for the non-constant parts. */
4647 /* TODO. There's code in cp/typeck.c to do this. */
4649 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4650 /* store_constructor will ignore the clearing of variable-sized
4651 objects. Initializers for such objects must explicitly set
4652 every field that needs to be set. */
4654 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4655 /* If the constructor isn't complete, clear the whole object
4656 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4658 ??? This ought not to be needed. For any element not present
4659 in the initializer, we should simply set them to zero. Except
4660 we'd need to *find* the elements that are not present, and that
4661 requires trickery to avoid quadratic compile-time behavior in
4662 large cases or excessive memory use in small cases. */
4664 else if (num_ctor_elements - num_nonzero_elements
4665 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4666 && num_nonzero_elements < num_ctor_elements / 4)
4667 /* If there are "lots" of zeros, it's more efficient to clear
4668 the memory and then set the nonzero elements. */
4673 /* If there are "lots" of initialized elements, and all of them
4674 are valid address constants, then the entire initializer can
4675 be dropped to memory, and then memcpy'd out. Don't do this
4676 for sparse arrays, though, as it's more efficient to follow
4677 the standard CONSTRUCTOR behavior of memset followed by
4678 individual element initialization. Also don't do this for small
4679 all-zero initializers (which aren't big enough to merit
4680 clearing), and don't try to make bitwise copies of
4681 TREE_ADDRESSABLE types.
4683 We cannot apply such transformation when compiling chkp static
4684 initializer because creation of initializer image in the memory
4685 will require static initialization of bounds for it. It should
4686 result in another gimplification of similar initializer and we
4687 may fall into infinite loop. */
4688 if (valid_const_initializer
4689 && !(cleared || num_nonzero_elements == 0)
4690 && !TREE_ADDRESSABLE (type)
4691 && (!current_function_decl
4692 || !lookup_attribute ("chkp ctor",
4693 DECL_ATTRIBUTES (current_function_decl))))
4695 HOST_WIDE_INT size = int_size_in_bytes (type);
4698 /* ??? We can still get unbounded array types, at least
4699 from the C++ front end. This seems wrong, but attempt
4700 to work around it for now. */
4703 size = int_size_in_bytes (TREE_TYPE (object));
4705 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4708 /* Find the maximum alignment we can assume for the object. */
4709 /* ??? Make use of DECL_OFFSET_ALIGN. */
4710 if (DECL_P (object))
4711 align = DECL_ALIGN (object);
4713 align = TYPE_ALIGN (type);
4715 /* Do a block move either if the size is so small as to make
4716 each individual move a sub-unit move on average, or if it
4717 is so large as to make individual moves inefficient. */
4719 && num_nonzero_elements > 1
4720 && (size < num_nonzero_elements
4721 || !can_move_by_pieces (size, align)))
4723 if (notify_temp_creation)
4726 walk_tree (&ctor, force_labels_r, NULL, NULL);
4727 ctor = tree_output_constant_def (ctor);
4728 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4729 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4730 TREE_OPERAND (*expr_p, 1) = ctor;
4732 /* This is no longer an assignment of a CONSTRUCTOR, but
4733 we still may have processing to do on the LHS. So
4734 pretend we didn't do anything here to let that happen. */
4735 return GS_UNHANDLED;
4739 /* If the target is volatile, we have non-zero elements and more than
4740 one field to assign, initialize the target from a temporary. */
4741 if (TREE_THIS_VOLATILE (object)
4742 && !TREE_ADDRESSABLE (type)
4743 && num_nonzero_elements > 0
4744 && vec_safe_length (elts) > 1)
4746 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4747 TREE_OPERAND (*expr_p, 0) = temp;
4748 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4750 build2 (MODIFY_EXPR, void_type_node,
4755 if (notify_temp_creation)
4758 /* If there are nonzero elements and if needed, pre-evaluate to capture
4759 elements overlapping with the lhs into temporaries. We must do this
4760 before clearing to fetch the values before they are zeroed-out. */
4761 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4763 preeval_data.lhs_base_decl = get_base_address (object);
4764 if (!DECL_P (preeval_data.lhs_base_decl))
4765 preeval_data.lhs_base_decl = NULL;
4766 preeval_data.lhs_alias_set = get_alias_set (object);
4768 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4769 pre_p, post_p, &preeval_data);
4772 bool ctor_has_side_effects_p
4773 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4777 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4778 Note that we still have to gimplify, in order to handle the
4779 case of variable sized types. Avoid shared tree structures. */
4780 CONSTRUCTOR_ELTS (ctor) = NULL;
4781 TREE_SIDE_EFFECTS (ctor) = 0;
4782 object = unshare_expr (object);
4783 gimplify_stmt (expr_p, pre_p);
4786 /* If we have not block cleared the object, or if there are nonzero
4787 elements in the constructor, or if the constructor has side effects,
4788 add assignments to the individual scalar fields of the object. */
4790 || num_nonzero_elements > 0
4791 || ctor_has_side_effects_p)
4792 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4794 *expr_p = NULL_TREE;
4802 if (notify_temp_creation)
4805 /* Extract the real and imaginary parts out of the ctor. */
4806 gcc_assert (elts->length () == 2);
4807 r = (*elts)[0].value;
4808 i = (*elts)[1].value;
4809 if (r == NULL || i == NULL)
4811 tree zero = build_zero_cst (TREE_TYPE (type));
4818 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4819 represent creation of a complex value. */
4820 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4822 ctor = build_complex (type, r, i);
4823 TREE_OPERAND (*expr_p, 1) = ctor;
4827 ctor = build2 (COMPLEX_EXPR, type, r, i);
4828 TREE_OPERAND (*expr_p, 1) = ctor;
4829 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4832 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4840 unsigned HOST_WIDE_INT ix;
4841 constructor_elt *ce;
4843 if (notify_temp_creation)
4846 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4847 if (TREE_CONSTANT (ctor))
4849 bool constant_p = true;
4852 /* Even when ctor is constant, it might contain non-*_CST
4853 elements, such as addresses or trapping values like
4854 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4855 in VECTOR_CST nodes. */
4856 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4857 if (!CONSTANT_CLASS_P (value))
4865 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4869 TREE_CONSTANT (ctor) = 0;
4872 /* Vector types use CONSTRUCTOR all the way through gimple
4873 compilation as a general initializer. */
4874 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4876 enum gimplify_status tret;
4877 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4879 if (tret == GS_ERROR)
4881 else if (TREE_STATIC (ctor)
4882 && !initializer_constant_valid_p (ce->value,
4883 TREE_TYPE (ce->value)))
4884 TREE_STATIC (ctor) = 0;
4886 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4887 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4892 /* So how did we get a CONSTRUCTOR for a scalar type? */
4896 if (ret == GS_ERROR)
4898 /* If we have gimplified both sides of the initializer but have
4899 not emitted an assignment, do so now. */
4902 tree lhs = TREE_OPERAND (*expr_p, 0);
4903 tree rhs = TREE_OPERAND (*expr_p, 1);
4904 gassign *init = gimple_build_assign (lhs, rhs);
4905 gimplify_seq_add_stmt (pre_p, init);
4919 /* Given a pointer value OP0, return a simplified version of an
4920 indirection through OP0, or NULL_TREE if no simplification is
4921 possible. This may only be applied to a rhs of an expression.
4922 Note that the resulting type may be different from the type pointed
4923 to in the sense that it is still compatible from the langhooks
4927 gimple_fold_indirect_ref_rhs (tree t)
4929 return gimple_fold_indirect_ref (t);
4932 /* Subroutine of gimplify_modify_expr to do simplifications of
4933 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4934 something changes. */
4936 static enum gimplify_status
4937 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4938 gimple_seq *pre_p, gimple_seq *post_p,
4941 enum gimplify_status ret = GS_UNHANDLED;
4947 switch (TREE_CODE (*from_p))
4950 /* If we're assigning from a read-only variable initialized with
4951 a constructor, do the direct assignment from the constructor,
4952 but only if neither source nor target are volatile since this
4953 latter assignment might end up being done on a per-field basis. */
4954 if (DECL_INITIAL (*from_p)
4955 && TREE_READONLY (*from_p)
4956 && !TREE_THIS_VOLATILE (*from_p)
4957 && !TREE_THIS_VOLATILE (*to_p)
4958 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4960 tree old_from = *from_p;
4961 enum gimplify_status subret;
4963 /* Move the constructor into the RHS. */
4964 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4966 /* Let's see if gimplify_init_constructor will need to put
4968 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4970 if (subret == GS_ERROR)
4972 /* If so, revert the change. */
4984 /* If we have code like
4988 where the type of "x" is a (possibly cv-qualified variant
4989 of "A"), treat the entire expression as identical to "x".
4990 This kind of code arises in C++ when an object is bound
4991 to a const reference, and if "x" is a TARGET_EXPR we want
4992 to take advantage of the optimization below. */
4993 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4994 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4997 if (TREE_THIS_VOLATILE (t) != volatile_p)
5000 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5001 build_fold_addr_expr (t));
5002 if (REFERENCE_CLASS_P (t))
5003 TREE_THIS_VOLATILE (t) = volatile_p;
5014 /* If we are initializing something from a TARGET_EXPR, strip the
5015 TARGET_EXPR and initialize it directly, if possible. This can't
5016 be done if the initializer is void, since that implies that the
5017 temporary is set in some non-trivial way.
5019 ??? What about code that pulls out the temp and uses it
5020 elsewhere? I think that such code never uses the TARGET_EXPR as
5021 an initializer. If I'm wrong, we'll die because the temp won't
5022 have any RTL. In that case, I guess we'll need to replace
5023 references somehow. */
5024 tree init = TARGET_EXPR_INITIAL (*from_p);
5027 && !VOID_TYPE_P (TREE_TYPE (init)))
5037 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5039 gimplify_compound_expr (from_p, pre_p, true);
5045 /* If we already made some changes, let the front end have a
5046 crack at this before we break it down. */
5047 if (ret != GS_UNHANDLED)
5049 /* If we're initializing from a CONSTRUCTOR, break this into
5050 individual MODIFY_EXPRs. */
5051 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5055 /* If we're assigning to a non-register type, push the assignment
5056 down into the branches. This is mandatory for ADDRESSABLE types,
5057 since we cannot generate temporaries for such, but it saves a
5058 copy in other cases as well. */
5059 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5061 /* This code should mirror the code in gimplify_cond_expr. */
5062 enum tree_code code = TREE_CODE (*expr_p);
5063 tree cond = *from_p;
5064 tree result = *to_p;
5066 ret = gimplify_expr (&result, pre_p, post_p,
5067 is_gimple_lvalue, fb_lvalue);
5068 if (ret != GS_ERROR)
5071 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5072 TREE_OPERAND (cond, 1)
5073 = build2 (code, void_type_node, result,
5074 TREE_OPERAND (cond, 1));
5075 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5076 TREE_OPERAND (cond, 2)
5077 = build2 (code, void_type_node, unshare_expr (result),
5078 TREE_OPERAND (cond, 2));
5080 TREE_TYPE (cond) = void_type_node;
5081 recalculate_side_effects (cond);
5085 gimplify_and_add (cond, pre_p);
5086 *expr_p = unshare_expr (result);
5095 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5096 return slot so that we don't generate a temporary. */
5097 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5098 && aggregate_value_p (*from_p, *from_p))
5102 if (!(rhs_predicate_for (*to_p))(*from_p))
5103 /* If we need a temporary, *to_p isn't accurate. */
5105 /* It's OK to use the return slot directly unless it's an NRV. */
5106 else if (TREE_CODE (*to_p) == RESULT_DECL
5107 && DECL_NAME (*to_p) == NULL_TREE
5108 && needs_to_live_in_memory (*to_p))
5110 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5111 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5112 /* Don't force regs into memory. */
5114 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5115 /* It's OK to use the target directly if it's being
5118 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5120 /* Always use the target and thus RSO for variable-sized types.
5121 GIMPLE cannot deal with a variable-sized assignment
5122 embedded in a call statement. */
5124 else if (TREE_CODE (*to_p) != SSA_NAME
5125 && (!is_gimple_variable (*to_p)
5126 || needs_to_live_in_memory (*to_p)))
5127 /* Don't use the original target if it's already addressable;
5128 if its address escapes, and the called function uses the
5129 NRV optimization, a conforming program could see *to_p
5130 change before the called function returns; see c++/19317.
5131 When optimizing, the return_slot pass marks more functions
5132 as safe after we have escape info. */
5139 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5140 mark_addressable (*to_p);
5145 case WITH_SIZE_EXPR:
5146 /* Likewise for calls that return an aggregate of non-constant size,
5147 since we would not be able to generate a temporary at all. */
5148 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5150 *from_p = TREE_OPERAND (*from_p, 0);
5151 /* We don't change ret in this case because the
5152 WITH_SIZE_EXPR might have been added in
5153 gimplify_modify_expr, so returning GS_OK would lead to an
5159 /* If we're initializing from a container, push the initialization
5161 case CLEANUP_POINT_EXPR:
5163 case STATEMENT_LIST:
5165 tree wrap = *from_p;
5168 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5170 if (ret != GS_ERROR)
5173 t = voidify_wrapper_expr (wrap, *expr_p);
5174 gcc_assert (t == *expr_p);
5178 gimplify_and_add (wrap, pre_p);
5179 *expr_p = unshare_expr (*to_p);
5186 case COMPOUND_LITERAL_EXPR:
5188 tree complit = TREE_OPERAND (*expr_p, 1);
5189 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5190 tree decl = DECL_EXPR_DECL (decl_s);
5191 tree init = DECL_INITIAL (decl);
5193 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5194 into struct T x = { 0, 1, 2 } if the address of the
5195 compound literal has never been taken. */
5196 if (!TREE_ADDRESSABLE (complit)
5197 && !TREE_ADDRESSABLE (decl)
5200 *expr_p = copy_node (*expr_p);
5201 TREE_OPERAND (*expr_p, 1) = init;
5216 /* Return true if T looks like a valid GIMPLE statement. */
5219 is_gimple_stmt (tree t)
5221 const enum tree_code code = TREE_CODE (t);
5226 /* The only valid NOP_EXPR is the empty statement. */
5227 return IS_EMPTY_STMT (t);
5231 /* These are only valid if they're void. */
5232 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5238 case CASE_LABEL_EXPR:
5239 case TRY_CATCH_EXPR:
5240 case TRY_FINALLY_EXPR:
5241 case EH_FILTER_EXPR:
5244 case STATEMENT_LIST:
5248 case OACC_HOST_DATA:
5251 case OACC_ENTER_DATA:
5252 case OACC_EXIT_DATA:
5258 case OMP_DISTRIBUTE:
5269 case OMP_TARGET_DATA:
5270 case OMP_TARGET_UPDATE:
5271 case OMP_TARGET_ENTER_DATA:
5272 case OMP_TARGET_EXIT_DATA:
5275 /* These are always void. */
5281 /* These are valid regardless of their type. */
5290 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5291 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5292 DECL_GIMPLE_REG_P set.
5294 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5295 other, unmodified part of the complex object just before the total store.
5296 As a consequence, if the object is still uninitialized, an undefined value
5297 will be loaded into a register, which may result in a spurious exception
5298 if the register is floating-point and the value happens to be a signaling
5299 NaN for example. Then the fully-fledged complex operations lowering pass
5300 followed by a DCE pass are necessary in order to fix things up. */
5302 static enum gimplify_status
5303 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5306 enum tree_code code, ocode;
5307 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5309 lhs = TREE_OPERAND (*expr_p, 0);
5310 rhs = TREE_OPERAND (*expr_p, 1);
5311 code = TREE_CODE (lhs);
5312 lhs = TREE_OPERAND (lhs, 0);
5314 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5315 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5316 TREE_NO_WARNING (other) = 1;
5317 other = get_formal_tmp_var (other, pre_p);
5319 realpart = code == REALPART_EXPR ? rhs : other;
5320 imagpart = code == REALPART_EXPR ? other : rhs;
5322 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5323 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5325 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5327 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5328 *expr_p = (want_value) ? rhs : NULL_TREE;
5333 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5339 PRE_P points to the list where side effects that must happen before
5340 *EXPR_P should be stored.
5342 POST_P points to the list where side effects that must happen after
5343 *EXPR_P should be stored.
5345 WANT_VALUE is nonzero iff we want to use the value of this expression
5346 in another expression. */
5348 static enum gimplify_status
5349 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5352 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5353 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5354 enum gimplify_status ret = GS_UNHANDLED;
5356 location_t loc = EXPR_LOCATION (*expr_p);
5357 gimple_stmt_iterator gsi;
5359 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5360 || TREE_CODE (*expr_p) == INIT_EXPR);
5362 /* Trying to simplify a clobber using normal logic doesn't work,
5363 so handle it here. */
5364 if (TREE_CLOBBER_P (*from_p))
5366 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5367 if (ret == GS_ERROR)
5369 gcc_assert (!want_value
5370 && (TREE_CODE (*to_p) == VAR_DECL
5371 || TREE_CODE (*to_p) == MEM_REF));
5372 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5377 /* Insert pointer conversions required by the middle-end that are not
5378 required by the frontend. This fixes middle-end type checking for
5379 for example gcc.dg/redecl-6.c. */
5380 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5382 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5383 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5384 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5387 /* See if any simplifications can be done based on what the RHS is. */
5388 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5390 if (ret != GS_UNHANDLED)
5393 /* For zero sized types only gimplify the left hand side and right hand
5394 side as statements and throw away the assignment. Do this after
5395 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5397 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5399 gimplify_stmt (from_p, pre_p);
5400 gimplify_stmt (to_p, pre_p);
5401 *expr_p = NULL_TREE;
5405 /* If the value being copied is of variable width, compute the length
5406 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5407 before gimplifying any of the operands so that we can resolve any
5408 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5409 the size of the expression to be copied, not of the destination, so
5410 that is what we must do here. */
5411 maybe_with_size_expr (from_p);
5413 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5414 if (ret == GS_ERROR)
5417 /* As a special case, we have to temporarily allow for assignments
5418 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5419 a toplevel statement, when gimplifying the GENERIC expression
5420 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5421 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5423 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5424 prevent gimplify_expr from trying to create a new temporary for
5425 foo's LHS, we tell it that it should only gimplify until it
5426 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5427 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5428 and all we need to do here is set 'a' to be its LHS. */
5429 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
5431 if (ret == GS_ERROR)
5434 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5435 size as argument to the call. */
5436 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5438 tree call = TREE_OPERAND (*from_p, 0);
5439 tree vlasize = TREE_OPERAND (*from_p, 1);
5441 if (TREE_CODE (call) == CALL_EXPR
5442 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5444 int nargs = call_expr_nargs (call);
5445 tree type = TREE_TYPE (call);
5446 tree ap = CALL_EXPR_ARG (call, 0);
5447 tree tag = CALL_EXPR_ARG (call, 1);
5448 tree aptag = CALL_EXPR_ARG (call, 2);
5449 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5453 TREE_OPERAND (*from_p, 0) = newcall;
5457 /* Now see if the above changed *from_p to something we handle specially. */
5458 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5460 if (ret != GS_UNHANDLED)
5463 /* If we've got a variable sized assignment between two lvalues (i.e. does
5464 not involve a call), then we can make things a bit more straightforward
5465 by converting the assignment to memcpy or memset. */
5466 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5468 tree from = TREE_OPERAND (*from_p, 0);
5469 tree size = TREE_OPERAND (*from_p, 1);
5471 if (TREE_CODE (from) == CONSTRUCTOR)
5472 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5474 if (is_gimple_addressable (from))
5477 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5482 /* Transform partial stores to non-addressable complex variables into
5483 total stores. This allows us to use real instead of virtual operands
5484 for these variables, which improves optimization. */
5485 if ((TREE_CODE (*to_p) == REALPART_EXPR
5486 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5487 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5488 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5490 /* Try to alleviate the effects of the gimplification creating artificial
5491 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5492 make sure not to create DECL_DEBUG_EXPR links across functions. */
5493 if (!gimplify_ctxp->into_ssa
5494 && TREE_CODE (*from_p) == VAR_DECL
5495 && DECL_IGNORED_P (*from_p)
5497 && !DECL_IGNORED_P (*to_p)
5498 && decl_function_context (*to_p) == current_function_decl)
5500 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5502 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5503 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5504 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5507 if (want_value && TREE_THIS_VOLATILE (*to_p))
5508 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5510 if (TREE_CODE (*from_p) == CALL_EXPR)
5512 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5513 instead of a GIMPLE_ASSIGN. */
5515 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5517 /* Gimplify internal functions created in the FEs. */
5518 int nargs = call_expr_nargs (*from_p), i;
5519 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5520 auto_vec<tree> vargs (nargs);
5522 for (i = 0; i < nargs; i++)
5524 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5525 EXPR_LOCATION (*from_p));
5526 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5528 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5529 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5533 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5534 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5535 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5536 tree fndecl = get_callee_fndecl (*from_p);
5538 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5539 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5540 && call_expr_nargs (*from_p) == 3)
5541 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5542 CALL_EXPR_ARG (*from_p, 0),
5543 CALL_EXPR_ARG (*from_p, 1),
5544 CALL_EXPR_ARG (*from_p, 2));
5547 call_stmt = gimple_build_call_from_tree (*from_p);
5548 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5551 notice_special_calls (call_stmt);
5552 if (!gimple_call_noreturn_p (call_stmt)
5553 || TREE_ADDRESSABLE (TREE_TYPE (*to_p))
5554 || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) != INTEGER_CST)
5555 gimple_call_set_lhs (call_stmt, *to_p);
5560 assign = gimple_build_assign (*to_p, *from_p);
5561 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5562 if (COMPARISON_CLASS_P (*from_p))
5563 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5566 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5568 /* We should have got an SSA name from the start. */
5569 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
5572 gimplify_seq_add_stmt (pre_p, assign);
5573 gsi = gsi_last (*pre_p);
5574 maybe_fold_stmt (&gsi);
5578 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5587 /* Gimplify a comparison between two variable-sized objects. Do this
5588 with a call to BUILT_IN_MEMCMP. */
5590 static enum gimplify_status
5591 gimplify_variable_sized_compare (tree *expr_p)
5593 location_t loc = EXPR_LOCATION (*expr_p);
5594 tree op0 = TREE_OPERAND (*expr_p, 0);
5595 tree op1 = TREE_OPERAND (*expr_p, 1);
5596 tree t, arg, dest, src, expr;
5598 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5599 arg = unshare_expr (arg);
5600 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5601 src = build_fold_addr_expr_loc (loc, op1);
5602 dest = build_fold_addr_expr_loc (loc, op0);
5603 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5604 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5607 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5608 SET_EXPR_LOCATION (expr, loc);
5614 /* Gimplify a comparison between two aggregate objects of integral scalar
5615 mode as a comparison between the bitwise equivalent scalar values. */
5617 static enum gimplify_status
5618 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5620 location_t loc = EXPR_LOCATION (*expr_p);
5621 tree op0 = TREE_OPERAND (*expr_p, 0);
5622 tree op1 = TREE_OPERAND (*expr_p, 1);
5624 tree type = TREE_TYPE (op0);
5625 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5627 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5628 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5631 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5636 /* Gimplify an expression sequence. This function gimplifies each
5637 expression and rewrites the original expression with the last
5638 expression of the sequence in GIMPLE form.
5640 PRE_P points to the list where the side effects for all the
5641 expressions in the sequence will be emitted.
5643 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5645 static enum gimplify_status
5646 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5652 tree *sub_p = &TREE_OPERAND (t, 0);
5654 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5655 gimplify_compound_expr (sub_p, pre_p, false);
5657 gimplify_stmt (sub_p, pre_p);
5659 t = TREE_OPERAND (t, 1);
5661 while (TREE_CODE (t) == COMPOUND_EXPR);
5668 gimplify_stmt (expr_p, pre_p);
5673 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5674 gimplify. After gimplification, EXPR_P will point to a new temporary
5675 that holds the original value of the SAVE_EXPR node.
5677 PRE_P points to the list where side effects that must happen before
5678 *EXPR_P should be stored. */
5680 static enum gimplify_status
5681 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5683 enum gimplify_status ret = GS_ALL_DONE;
5686 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5687 val = TREE_OPERAND (*expr_p, 0);
5689 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5690 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5692 /* The operand may be a void-valued expression such as SAVE_EXPRs
5693 generated by the Java frontend for class initialization. It is
5694 being executed only for its side-effects. */
5695 if (TREE_TYPE (val) == void_type_node)
5697 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5698 is_gimple_stmt, fb_none);
5702 val = get_initialized_tmp_var (val, pre_p, post_p);
5704 TREE_OPERAND (*expr_p, 0) = val;
5705 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5713 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5720 PRE_P points to the list where side effects that must happen before
5721 *EXPR_P should be stored.
5723 POST_P points to the list where side effects that must happen after
5724 *EXPR_P should be stored. */
5726 static enum gimplify_status
5727 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5729 tree expr = *expr_p;
5730 tree op0 = TREE_OPERAND (expr, 0);
5731 enum gimplify_status ret;
5732 location_t loc = EXPR_LOCATION (*expr_p);
5734 switch (TREE_CODE (op0))
5738 /* Check if we are dealing with an expression of the form '&*ptr'.
5739 While the front end folds away '&*ptr' into 'ptr', these
5740 expressions may be generated internally by the compiler (e.g.,
5741 builtins like __builtin_va_end). */
5742 /* Caution: the silent array decomposition semantics we allow for
5743 ADDR_EXPR means we can't always discard the pair. */
5744 /* Gimplification of the ADDR_EXPR operand may drop
5745 cv-qualification conversions, so make sure we add them if
5748 tree op00 = TREE_OPERAND (op0, 0);
5749 tree t_expr = TREE_TYPE (expr);
5750 tree t_op00 = TREE_TYPE (op00);
5752 if (!useless_type_conversion_p (t_expr, t_op00))
5753 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5759 case VIEW_CONVERT_EXPR:
5760 /* Take the address of our operand and then convert it to the type of
5763 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5764 all clear. The impact of this transformation is even less clear. */
5766 /* If the operand is a useless conversion, look through it. Doing so
5767 guarantees that the ADDR_EXPR and its operand will remain of the
5769 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5770 op0 = TREE_OPERAND (op0, 0);
5772 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5773 build_fold_addr_expr_loc (loc,
5774 TREE_OPERAND (op0, 0)));
5779 if (integer_zerop (TREE_OPERAND (op0, 1)))
5780 goto do_indirect_ref;
5785 /* If we see a call to a declared builtin or see its address
5786 being taken (we can unify those cases here) then we can mark
5787 the builtin for implicit generation by GCC. */
5788 if (TREE_CODE (op0) == FUNCTION_DECL
5789 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5790 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5791 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5793 /* We use fb_either here because the C frontend sometimes takes
5794 the address of a call that returns a struct; see
5795 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5796 the implied temporary explicit. */
5798 /* Make the operand addressable. */
5799 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5800 is_gimple_addressable, fb_either);
5801 if (ret == GS_ERROR)
5804 /* Then mark it. Beware that it may not be possible to do so directly
5805 if a temporary has been created by the gimplification. */
5806 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5808 op0 = TREE_OPERAND (expr, 0);
5810 /* For various reasons, the gimplification of the expression
5811 may have made a new INDIRECT_REF. */
5812 if (TREE_CODE (op0) == INDIRECT_REF)
5813 goto do_indirect_ref;
5815 mark_addressable (TREE_OPERAND (expr, 0));
5817 /* The FEs may end up building ADDR_EXPRs early on a decl with
5818 an incomplete type. Re-build ADDR_EXPRs in canonical form
5820 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5821 *expr_p = build_fold_addr_expr (op0);
5823 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5824 recompute_tree_invariant_for_addr_expr (*expr_p);
5826 /* If we re-built the ADDR_EXPR add a conversion to the original type
5828 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5829 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5837 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5838 value; output operands should be a gimple lvalue. */
5840 static enum gimplify_status
5841 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5845 const char **oconstraints;
5848 const char *constraint;
5849 bool allows_mem, allows_reg, is_inout;
5850 enum gimplify_status ret, tret;
5852 vec<tree, va_gc> *inputs;
5853 vec<tree, va_gc> *outputs;
5854 vec<tree, va_gc> *clobbers;
5855 vec<tree, va_gc> *labels;
5859 noutputs = list_length (ASM_OUTPUTS (expr));
5860 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5868 link_next = NULL_TREE;
5869 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5872 size_t constraint_len;
5874 link_next = TREE_CHAIN (link);
5878 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5879 constraint_len = strlen (constraint);
5880 if (constraint_len == 0)
5883 ok = parse_output_constraint (&constraint, i, 0, 0,
5884 &allows_mem, &allows_reg, &is_inout);
5891 if (!allows_reg && allows_mem)
5892 mark_addressable (TREE_VALUE (link));
5894 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5895 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5896 fb_lvalue | fb_mayfail);
5897 if (tret == GS_ERROR)
5899 error ("invalid lvalue in asm output %d", i);
5903 /* If the constraint does not allow memory make sure we gimplify
5904 it to a register if it is not already but its base is. This
5905 happens for complex and vector components. */
5908 tree op = TREE_VALUE (link);
5909 if (! is_gimple_val (op)
5910 && is_gimple_reg_type (TREE_TYPE (op))
5911 && is_gimple_reg (get_base_address (op)))
5913 tree tem = create_tmp_reg (TREE_TYPE (op));
5917 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5918 tem, unshare_expr (op));
5919 gimplify_and_add (ass, pre_p);
5921 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5922 gimplify_and_add (ass, post_p);
5924 TREE_VALUE (link) = tem;
5929 vec_safe_push (outputs, link);
5930 TREE_CHAIN (link) = NULL_TREE;
5934 /* An input/output operand. To give the optimizers more
5935 flexibility, split it into separate input and output
5940 /* Turn the in/out constraint into an output constraint. */
5941 char *p = xstrdup (constraint);
5943 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5945 /* And add a matching input constraint. */
5948 sprintf (buf, "%d", i);
5950 /* If there are multiple alternatives in the constraint,
5951 handle each of them individually. Those that allow register
5952 will be replaced with operand number, the others will stay
5954 if (strchr (p, ',') != NULL)
5956 size_t len = 0, buflen = strlen (buf);
5957 char *beg, *end, *str, *dst;
5961 end = strchr (beg, ',');
5963 end = strchr (beg, '\0');
5964 if ((size_t) (end - beg) < buflen)
5967 len += end - beg + 1;
5974 str = (char *) alloca (len);
5975 for (beg = p + 1, dst = str;;)
5978 bool mem_p, reg_p, inout_p;
5980 end = strchr (beg, ',');
5985 parse_output_constraint (&tem, i, 0, 0,
5986 &mem_p, ®_p, &inout_p);
5991 memcpy (dst, buf, buflen);
6000 memcpy (dst, beg, len);
6009 input = build_string (dst - str, str);
6012 input = build_string (strlen (buf), buf);
6015 input = build_string (constraint_len - 1, constraint + 1);
6019 input = build_tree_list (build_tree_list (NULL_TREE, input),
6020 unshare_expr (TREE_VALUE (link)));
6021 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6025 link_next = NULL_TREE;
6026 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6028 link_next = TREE_CHAIN (link);
6029 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6030 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6031 oconstraints, &allows_mem, &allows_reg);
6033 /* If we can't make copies, we can only accept memory. */
6034 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6040 error ("impossible constraint in %<asm%>");
6041 error ("non-memory input %d must stay in memory", i);
6046 /* If the operand is a memory input, it should be an lvalue. */
6047 if (!allows_reg && allows_mem)
6049 tree inputv = TREE_VALUE (link);
6050 STRIP_NOPS (inputv);
6051 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6052 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6053 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6054 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6055 || TREE_CODE (inputv) == MODIFY_EXPR)
6056 TREE_VALUE (link) = error_mark_node;
6057 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6058 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6059 if (tret != GS_ERROR)
6061 /* Unlike output operands, memory inputs are not guaranteed
6062 to be lvalues by the FE, and while the expressions are
6063 marked addressable there, if it is e.g. a statement
6064 expression, temporaries in it might not end up being
6065 addressable. They might be already used in the IL and thus
6066 it is too late to make them addressable now though. */
6067 tree x = TREE_VALUE (link);
6068 while (handled_component_p (x))
6069 x = TREE_OPERAND (x, 0);
6070 if (TREE_CODE (x) == MEM_REF
6071 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6072 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6073 if ((TREE_CODE (x) == VAR_DECL
6074 || TREE_CODE (x) == PARM_DECL
6075 || TREE_CODE (x) == RESULT_DECL)
6076 && !TREE_ADDRESSABLE (x)
6077 && is_gimple_reg (x))
6079 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6081 "memory input %d is not directly addressable",
6083 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6086 mark_addressable (TREE_VALUE (link));
6087 if (tret == GS_ERROR)
6089 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6090 "memory input %d is not directly addressable", i);
6096 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6097 is_gimple_asm_val, fb_rvalue);
6098 if (tret == GS_ERROR)
6102 TREE_CHAIN (link) = NULL_TREE;
6103 vec_safe_push (inputs, link);
6106 link_next = NULL_TREE;
6107 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6109 link_next = TREE_CHAIN (link);
6110 TREE_CHAIN (link) = NULL_TREE;
6111 vec_safe_push (clobbers, link);
6114 link_next = NULL_TREE;
6115 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6117 link_next = TREE_CHAIN (link);
6118 TREE_CHAIN (link) = NULL_TREE;
6119 vec_safe_push (labels, link);
6122 /* Do not add ASMs with errors to the gimple IL stream. */
6123 if (ret != GS_ERROR)
6125 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6126 inputs, outputs, clobbers, labels);
6128 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6129 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6131 gimplify_seq_add_stmt (pre_p, stmt);
6137 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6138 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6139 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6140 return to this function.
6142 FIXME should we complexify the prequeue handling instead? Or use flags
6143 for all the cleanups and let the optimizer tighten them up? The current
6144 code seems pretty fragile; it will break on a cleanup within any
6145 non-conditional nesting. But any such nesting would be broken, anyway;
6146 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6147 and continues out of it. We can do that at the RTL level, though, so
6148 having an optimizer to tighten up try/finally regions would be a Good
6151 static enum gimplify_status
6152 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6154 gimple_stmt_iterator iter;
6155 gimple_seq body_sequence = NULL;
6157 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6159 /* We only care about the number of conditions between the innermost
6160 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6161 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6162 int old_conds = gimplify_ctxp->conditions;
6163 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6164 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6165 gimplify_ctxp->conditions = 0;
6166 gimplify_ctxp->conditional_cleanups = NULL;
6167 gimplify_ctxp->in_cleanup_point_expr = true;
6169 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6171 gimplify_ctxp->conditions = old_conds;
6172 gimplify_ctxp->conditional_cleanups = old_cleanups;
6173 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6175 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6177 gimple *wce = gsi_stmt (iter);
6179 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6181 if (gsi_one_before_end_p (iter))
6183 /* Note that gsi_insert_seq_before and gsi_remove do not
6184 scan operands, unlike some other sequence mutators. */
6185 if (!gimple_wce_cleanup_eh_only (wce))
6186 gsi_insert_seq_before_without_update (&iter,
6187 gimple_wce_cleanup (wce),
6189 gsi_remove (&iter, true);
6196 enum gimple_try_flags kind;
6198 if (gimple_wce_cleanup_eh_only (wce))
6199 kind = GIMPLE_TRY_CATCH;
6201 kind = GIMPLE_TRY_FINALLY;
6202 seq = gsi_split_seq_after (iter);
6204 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6205 /* Do not use gsi_replace here, as it may scan operands.
6206 We want to do a simple structural modification only. */
6207 gsi_set_stmt (&iter, gtry);
6208 iter = gsi_start (gtry->eval);
6215 gimplify_seq_add_seq (pre_p, body_sequence);
6228 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6229 is the cleanup action required. EH_ONLY is true if the cleanup should
6230 only be executed if an exception is thrown, not on normal exit. */
6233 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6236 gimple_seq cleanup_stmts = NULL;
6238 /* Errors can result in improperly nested cleanups. Which results in
6239 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6243 if (gimple_conditional_context ())
6245 /* If we're in a conditional context, this is more complex. We only
6246 want to run the cleanup if we actually ran the initialization that
6247 necessitates it, but we want to run it after the end of the
6248 conditional context. So we wrap the try/finally around the
6249 condition and use a flag to determine whether or not to actually
6250 run the destructor. Thus
6254 becomes (approximately)
6258 if (test) { A::A(temp); flag = 1; val = f(temp); }
6261 if (flag) A::~A(temp);
6265 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6266 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6267 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6269 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6270 gimplify_stmt (&cleanup, &cleanup_stmts);
6271 wce = gimple_build_wce (cleanup_stmts);
6273 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6274 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6275 gimplify_seq_add_stmt (pre_p, ftrue);
6277 /* Because of this manipulation, and the EH edges that jump
6278 threading cannot redirect, the temporary (VAR) will appear
6279 to be used uninitialized. Don't warn. */
6280 TREE_NO_WARNING (var) = 1;
6284 gimplify_stmt (&cleanup, &cleanup_stmts);
6285 wce = gimple_build_wce (cleanup_stmts);
6286 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6287 gimplify_seq_add_stmt (pre_p, wce);
6291 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6293 static enum gimplify_status
6294 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6296 tree targ = *expr_p;
6297 tree temp = TARGET_EXPR_SLOT (targ);
6298 tree init = TARGET_EXPR_INITIAL (targ);
6299 enum gimplify_status ret;
6301 bool unpoison_empty_seq = false;
6302 gimple_stmt_iterator unpoison_it;
6306 tree cleanup = NULL_TREE;
6308 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6309 to the temps list. Handle also variable length TARGET_EXPRs. */
6310 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6312 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6313 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6314 gimplify_vla_decl (temp, pre_p);
6318 /* Save location where we need to place unpoisoning. It's possible
6319 that a variable will be converted to needs_to_live_in_memory. */
6320 unpoison_it = gsi_last (*pre_p);
6321 unpoison_empty_seq = gsi_end_p (unpoison_it);
6323 gimple_add_tmp_var (temp);
6326 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6327 expression is supposed to initialize the slot. */
6328 if (VOID_TYPE_P (TREE_TYPE (init)))
6329 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6332 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6334 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6336 ggc_free (init_expr);
6338 if (ret == GS_ERROR)
6340 /* PR c++/28266 Make sure this is expanded only once. */
6341 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6345 gimplify_and_add (init, pre_p);
6347 /* If needed, push the cleanup for the temp. */
6348 if (TARGET_EXPR_CLEANUP (targ))
6350 if (CLEANUP_EH_ONLY (targ))
6351 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6352 CLEANUP_EH_ONLY (targ), pre_p);
6354 cleanup = TARGET_EXPR_CLEANUP (targ);
6357 /* Add a clobber for the temporary going out of scope, like
6358 gimplify_bind_expr. */
6359 if (gimplify_ctxp->in_cleanup_point_expr
6360 && needs_to_live_in_memory (temp))
6362 if (flag_stack_reuse == SR_ALL)
6364 tree clobber = build_constructor (TREE_TYPE (temp),
6366 TREE_THIS_VOLATILE (clobber) = true;
6367 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6369 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
6374 if (asan_sanitize_use_after_scope ()
6375 && dbg_cnt (asan_use_after_scope))
6377 tree asan_cleanup = build_asan_poison_call_expr (temp);
6380 if (unpoison_empty_seq)
6381 unpoison_it = gsi_start (*pre_p);
6383 asan_poison_variable (temp, false, &unpoison_it,
6384 unpoison_empty_seq);
6385 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6390 gimple_push_cleanup (temp, cleanup, false, pre_p);
6392 /* Only expand this once. */
6393 TREE_OPERAND (targ, 3) = init;
6394 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6397 /* We should have expanded this before. */
6398 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6404 /* Gimplification of expression trees. */
6406 /* Gimplify an expression which appears at statement context. The
6407 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6408 NULL, a new sequence is allocated.
6410 Return true if we actually added a statement to the queue. */
6413 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6415 gimple_seq_node last;
6417 last = gimple_seq_last (*seq_p);
6418 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6419 return last != gimple_seq_last (*seq_p);
6422 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6423 to CTX. If entries already exist, force them to be some flavor of private.
6424 If there is no enclosing parallel, do nothing. */
6427 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6431 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6436 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6439 if (n->value & GOVD_SHARED)
6440 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6441 else if (n->value & GOVD_MAP)
6442 n->value |= GOVD_MAP_TO_ONLY;
6446 else if ((ctx->region_type & ORT_TARGET) != 0)
6448 if (ctx->target_map_scalars_firstprivate)
6449 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6451 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6453 else if (ctx->region_type != ORT_WORKSHARE
6454 && ctx->region_type != ORT_SIMD
6455 && ctx->region_type != ORT_ACC
6456 && !(ctx->region_type & ORT_TARGET_DATA))
6457 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6459 ctx = ctx->outer_context;
6464 /* Similarly for each of the type sizes of TYPE. */
6467 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6469 if (type == NULL || type == error_mark_node)
6471 type = TYPE_MAIN_VARIANT (type);
6473 if (ctx->privatized_types->add (type))
6476 switch (TREE_CODE (type))
6482 case FIXED_POINT_TYPE:
6483 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6484 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6488 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6489 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6494 case QUAL_UNION_TYPE:
6497 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6498 if (TREE_CODE (field) == FIELD_DECL)
6500 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6501 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6507 case REFERENCE_TYPE:
6508 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6515 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6516 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6517 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6520 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6523 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6526 unsigned int nflags;
6529 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6532 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6533 there are constructors involved somewhere. */
6534 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
6535 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
6538 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6539 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6541 /* We shouldn't be re-adding the decl with the same data
6543 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6544 nflags = n->value | flags;
6545 /* The only combination of data sharing classes we should see is
6546 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6547 reduction variables to be used in data sharing clauses. */
6548 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6549 || ((nflags & GOVD_DATA_SHARE_CLASS)
6550 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6551 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6556 /* When adding a variable-sized variable, we have to handle all sorts
6557 of additional bits of data: the pointer replacement variable, and
6558 the parameters of the type. */
6559 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6561 /* Add the pointer replacement variable as PRIVATE if the variable
6562 replacement is private, else FIRSTPRIVATE since we'll need the
6563 address of the original variable either for SHARED, or for the
6564 copy into or out of the context. */
6565 if (!(flags & GOVD_LOCAL))
6567 if (flags & GOVD_MAP)
6568 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6569 else if (flags & GOVD_PRIVATE)
6570 nflags = GOVD_PRIVATE;
6571 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6572 && (flags & GOVD_FIRSTPRIVATE))
6573 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6575 nflags = GOVD_FIRSTPRIVATE;
6576 nflags |= flags & GOVD_SEEN;
6577 t = DECL_VALUE_EXPR (decl);
6578 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6579 t = TREE_OPERAND (t, 0);
6580 gcc_assert (DECL_P (t));
6581 omp_add_variable (ctx, t, nflags);
6584 /* Add all of the variable and type parameters (which should have
6585 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6586 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6587 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6588 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6590 /* The variable-sized variable itself is never SHARED, only some form
6591 of PRIVATE. The sharing would take place via the pointer variable
6592 which we remapped above. */
6593 if (flags & GOVD_SHARED)
6594 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
6595 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6597 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6598 alloca statement we generate for the variable, so make sure it
6599 is available. This isn't automatically needed for the SHARED
6600 case, since we won't be allocating local storage then.
6601 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6602 in this case omp_notice_variable will be called later
6603 on when it is gimplified. */
6604 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6605 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6606 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6608 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6609 && lang_hooks.decls.omp_privatize_by_reference (decl))
6611 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6613 /* Similar to the direct variable sized case above, we'll need the
6614 size of references being privatized. */
6615 if ((flags & GOVD_SHARED) == 0)
6617 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6619 omp_notice_variable (ctx, t, true);
6626 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6628 /* For reductions clauses in OpenACC loop directives, by default create a
6629 copy clause on the enclosing parallel construct for carrying back the
6631 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6633 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6636 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6639 /* Ignore local variables and explicitly declared clauses. */
6640 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6642 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6644 /* According to the OpenACC spec, such a reduction variable
6645 should already have a copy map on a kernels construct,
6646 verify that here. */
6647 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6648 && (n->value & GOVD_MAP));
6650 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6652 /* Remove firstprivate and make it a copy map. */
6653 n->value &= ~GOVD_FIRSTPRIVATE;
6654 n->value |= GOVD_MAP;
6657 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6659 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6660 GOVD_MAP | GOVD_SEEN);
6663 outer_ctx = outer_ctx->outer_context;
6668 /* Notice a threadprivate variable DECL used in OMP context CTX.
6669 This just prints out diagnostics about threadprivate variable uses
6670 in untied tasks. If DECL2 is non-NULL, prevent this warning
6671 on that variable. */
6674 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6678 struct gimplify_omp_ctx *octx;
6680 for (octx = ctx; octx; octx = octx->outer_context)
6681 if ((octx->region_type & ORT_TARGET) != 0)
6683 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6686 error ("threadprivate variable %qE used in target region",
6688 error_at (octx->location, "enclosing target region");
6689 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6692 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6695 if (ctx->region_type != ORT_UNTIED_TASK)
6697 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6700 error ("threadprivate variable %qE used in untied task",
6702 error_at (ctx->location, "enclosing task");
6703 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6706 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6710 /* Return true if global var DECL is device resident. */
6713 device_resident_p (tree decl)
6715 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6720 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6722 tree c = TREE_VALUE (t);
6723 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6730 /* Determine outer default flags for DECL mentioned in an OMP region
6731 but not declared in an enclosing clause.
6733 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6734 remapped firstprivate instead of shared. To some extent this is
6735 addressed in omp_firstprivatize_type_sizes, but not
6739 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6740 bool in_code, unsigned flags)
6742 enum omp_clause_default_kind default_kind = ctx->default_kind;
6743 enum omp_clause_default_kind kind;
6745 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6746 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6747 default_kind = kind;
6749 switch (default_kind)
6751 case OMP_CLAUSE_DEFAULT_NONE:
6755 if (ctx->region_type & ORT_PARALLEL)
6757 else if (ctx->region_type & ORT_TASK)
6759 else if (ctx->region_type & ORT_TEAMS)
6764 error ("%qE not specified in enclosing %s",
6765 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6766 error_at (ctx->location, "enclosing %s", rtype);
6769 case OMP_CLAUSE_DEFAULT_SHARED:
6770 flags |= GOVD_SHARED;
6772 case OMP_CLAUSE_DEFAULT_PRIVATE:
6773 flags |= GOVD_PRIVATE;
6775 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6776 flags |= GOVD_FIRSTPRIVATE;
6778 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6779 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6780 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6781 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6783 omp_notice_variable (octx, decl, in_code);
6784 for (; octx; octx = octx->outer_context)
6788 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6789 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6790 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6792 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6794 flags |= GOVD_FIRSTPRIVATE;
6797 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6799 flags |= GOVD_SHARED;
6805 if (TREE_CODE (decl) == PARM_DECL
6806 || (!is_global_var (decl)
6807 && DECL_CONTEXT (decl) == current_function_decl))
6808 flags |= GOVD_FIRSTPRIVATE;
6810 flags |= GOVD_SHARED;
6822 /* Determine outer default flags for DECL mentioned in an OACC region
6823 but not declared in an enclosing clause. */
6826 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6829 bool on_device = false;
6830 tree type = TREE_TYPE (decl);
6832 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6833 type = TREE_TYPE (type);
6835 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6836 && is_global_var (decl)
6837 && device_resident_p (decl))
6840 flags |= GOVD_MAP_TO_ONLY;
6843 switch (ctx->region_type)
6848 case ORT_ACC_KERNELS:
6849 /* Scalars are default 'copy' under kernels, non-scalars are default
6850 'present_or_copy'. */
6852 if (!AGGREGATE_TYPE_P (type))
6853 flags |= GOVD_MAP_FORCE;
6858 case ORT_ACC_PARALLEL:
6860 if (on_device || AGGREGATE_TYPE_P (type))
6861 /* Aggregates default to 'present_or_copy'. */
6864 /* Scalars default to 'firstprivate'. */
6865 flags |= GOVD_FIRSTPRIVATE;
6871 if (DECL_ARTIFICIAL (decl))
6872 ; /* We can get compiler-generated decls, and should not complain
6874 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6876 error ("%qE not specified in enclosing OpenACC %qs construct",
6877 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6878 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6881 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6886 /* Record the fact that DECL was used within the OMP context CTX.
6887 IN_CODE is true when real code uses DECL, and false when we should
6888 merely emit default(none) errors. Return true if DECL is going to
6889 be remapped and thus DECL shouldn't be gimplified into its
6890 DECL_VALUE_EXPR (if any). */
6893 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6896 unsigned flags = in_code ? GOVD_SEEN : 0;
6897 bool ret = false, shared;
6899 if (error_operand_p (decl))
6902 if (ctx->region_type == ORT_NONE)
6903 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6905 if (is_global_var (decl))
6907 /* Threadprivate variables are predetermined. */
6908 if (DECL_THREAD_LOCAL_P (decl))
6909 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6911 if (DECL_HAS_VALUE_EXPR_P (decl))
6913 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6915 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6916 return omp_notice_threadprivate_variable (ctx, decl, value);
6919 if (gimplify_omp_ctxp->outer_context == NULL
6921 && get_oacc_fn_attrib (current_function_decl))
6923 location_t loc = DECL_SOURCE_LOCATION (decl);
6925 if (lookup_attribute ("omp declare target link",
6926 DECL_ATTRIBUTES (decl)))
6929 "%qE with %<link%> clause used in %<routine%> function",
6933 else if (!lookup_attribute ("omp declare target",
6934 DECL_ATTRIBUTES (decl)))
6937 "%qE requires a %<declare%> directive for use "
6938 "in a %<routine%> function", DECL_NAME (decl));
6944 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6945 if ((ctx->region_type & ORT_TARGET) != 0)
6947 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6950 unsigned nflags = flags;
6951 if (ctx->target_map_pointers_as_0len_arrays
6952 || ctx->target_map_scalars_firstprivate)
6954 bool is_declare_target = false;
6955 bool is_scalar = false;
6956 if (is_global_var (decl)
6957 && varpool_node::get_create (decl)->offloadable)
6959 struct gimplify_omp_ctx *octx;
6960 for (octx = ctx->outer_context;
6961 octx; octx = octx->outer_context)
6963 n = splay_tree_lookup (octx->variables,
6964 (splay_tree_key)decl);
6966 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6967 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6970 is_declare_target = octx == NULL;
6972 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6974 tree type = TREE_TYPE (decl);
6975 if (TREE_CODE (type) == REFERENCE_TYPE)
6976 type = TREE_TYPE (type);
6977 if (TREE_CODE (type) == COMPLEX_TYPE)
6978 type = TREE_TYPE (type);
6979 if (INTEGRAL_TYPE_P (type)
6980 || SCALAR_FLOAT_TYPE_P (type)
6981 || TREE_CODE (type) == POINTER_TYPE)
6984 if (is_declare_target)
6986 else if (ctx->target_map_pointers_as_0len_arrays
6987 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6988 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6989 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6991 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6993 nflags |= GOVD_FIRSTPRIVATE;
6996 struct gimplify_omp_ctx *octx = ctx->outer_context;
6997 if ((ctx->region_type & ORT_ACC) && octx)
6999 /* Look in outer OpenACC contexts, to see if there's a
7000 data attribute for this variable. */
7001 omp_notice_variable (octx, decl, in_code);
7003 for (; octx; octx = octx->outer_context)
7005 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7008 = splay_tree_lookup (octx->variables,
7009 (splay_tree_key) decl);
7012 if (octx->region_type == ORT_ACC_HOST_DATA)
7013 error ("variable %qE declared in enclosing "
7014 "%<host_data%> region", DECL_NAME (decl));
7022 tree type = TREE_TYPE (decl);
7025 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7026 && lang_hooks.decls.omp_privatize_by_reference (decl))
7027 type = TREE_TYPE (type);
7029 && !lang_hooks.types.omp_mappable_type (type))
7031 error ("%qD referenced in target region does not have "
7032 "a mappable type", decl);
7033 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7035 else if (nflags == flags)
7037 if ((ctx->region_type & ORT_ACC) != 0)
7038 nflags = oacc_default_clause (ctx, decl, flags);
7044 omp_add_variable (ctx, decl, nflags);
7048 /* If nothing changed, there's nothing left to do. */
7049 if ((n->value & flags) == flags)
7059 if (ctx->region_type == ORT_WORKSHARE
7060 || ctx->region_type == ORT_SIMD
7061 || ctx->region_type == ORT_ACC
7062 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7065 flags = omp_default_clause (ctx, decl, in_code, flags);
7067 if ((flags & GOVD_PRIVATE)
7068 && lang_hooks.decls.omp_private_outer_ref (decl))
7069 flags |= GOVD_PRIVATE_OUTER_REF;
7071 omp_add_variable (ctx, decl, flags);
7073 shared = (flags & GOVD_SHARED) != 0;
7074 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7078 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7079 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7080 && DECL_SIZE (decl))
7082 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7085 tree t = DECL_VALUE_EXPR (decl);
7086 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7087 t = TREE_OPERAND (t, 0);
7088 gcc_assert (DECL_P (t));
7089 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7090 n2->value |= GOVD_SEEN;
7092 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7093 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7094 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7098 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7099 gcc_assert (DECL_P (t));
7100 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7102 n2->value |= GOVD_SEEN;
7106 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7107 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7109 /* If nothing changed, there's nothing left to do. */
7110 if ((n->value & flags) == flags)
7116 /* If the variable is private in the current context, then we don't
7117 need to propagate anything to an outer context. */
7118 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7120 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7121 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7123 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7124 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7125 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7127 if (ctx->outer_context
7128 && omp_notice_variable (ctx->outer_context, decl, in_code))
7133 /* Verify that DECL is private within CTX. If there's specific information
7134 to the contrary in the innermost scope, generate an error. */
7137 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7141 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7144 if (n->value & GOVD_SHARED)
7146 if (ctx == gimplify_omp_ctxp)
7149 error ("iteration variable %qE is predetermined linear",
7152 error ("iteration variable %qE should be private",
7154 n->value = GOVD_PRIVATE;
7160 else if ((n->value & GOVD_EXPLICIT) != 0
7161 && (ctx == gimplify_omp_ctxp
7162 || (ctx->region_type == ORT_COMBINED_PARALLEL
7163 && gimplify_omp_ctxp->outer_context == ctx)))
7165 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7166 error ("iteration variable %qE should not be firstprivate",
7168 else if ((n->value & GOVD_REDUCTION) != 0)
7169 error ("iteration variable %qE should not be reduction",
7171 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7172 error ("iteration variable %qE should not be linear",
7174 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7175 error ("iteration variable %qE should not be lastprivate",
7177 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7178 error ("iteration variable %qE should not be private",
7180 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7181 error ("iteration variable %qE is predetermined linear",
7184 return (ctx == gimplify_omp_ctxp
7185 || (ctx->region_type == ORT_COMBINED_PARALLEL
7186 && gimplify_omp_ctxp->outer_context == ctx));
7189 if (ctx->region_type != ORT_WORKSHARE
7190 && ctx->region_type != ORT_SIMD
7191 && ctx->region_type != ORT_ACC)
7193 else if (ctx->outer_context)
7194 return omp_is_private (ctx->outer_context, decl, simd);
7198 /* Return true if DECL is private within a parallel region
7199 that binds to the current construct's context or in parallel
7200 region's REDUCTION clause. */
7203 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7209 ctx = ctx->outer_context;
7212 if (is_global_var (decl))
7215 /* References might be private, but might be shared too,
7216 when checking for copyprivate, assume they might be
7217 private, otherwise assume they might be shared. */
7221 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7224 /* Treat C++ privatized non-static data members outside
7225 of the privatization the same. */
7226 if (omp_member_access_dummy_var (decl))
7232 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7234 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7235 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7240 if ((n->value & GOVD_LOCAL) != 0
7241 && omp_member_access_dummy_var (decl))
7243 return (n->value & GOVD_SHARED) == 0;
7246 while (ctx->region_type == ORT_WORKSHARE
7247 || ctx->region_type == ORT_SIMD
7248 || ctx->region_type == ORT_ACC);
7252 /* Return true if the CTX is combined with distribute and thus
7253 lastprivate can't be supported. */
7256 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
7260 if (ctx->outer_context == NULL)
7262 ctx = ctx->outer_context;
7263 switch (ctx->region_type)
7266 if (!ctx->combined_loop)
7268 if (ctx->distribute)
7269 return lang_GNU_Fortran ();
7271 case ORT_COMBINED_PARALLEL:
7273 case ORT_COMBINED_TEAMS:
7274 return lang_GNU_Fortran ();
7282 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7285 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7289 /* If this node has been visited, unmark it and keep looking. */
7290 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7293 if (IS_TYPE_OR_DECL_P (t))
7298 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7299 and previous omp contexts. */
7302 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7303 enum omp_region_type region_type,
7304 enum tree_code code)
7306 struct gimplify_omp_ctx *ctx, *outer_ctx;
7308 hash_map<tree, tree> *struct_map_to_clause = NULL;
7309 tree *prev_list_p = NULL;
7311 ctx = new_omp_context (region_type);
7312 outer_ctx = ctx->outer_context;
7313 if (code == OMP_TARGET && !lang_GNU_Fortran ())
7315 ctx->target_map_pointers_as_0len_arrays = true;
7316 /* FIXME: For Fortran we want to set this too, when
7317 the Fortran FE is updated to OpenMP 4.5. */
7318 ctx->target_map_scalars_firstprivate = true;
7320 if (!lang_GNU_Fortran ())
7324 case OMP_TARGET_DATA:
7325 case OMP_TARGET_ENTER_DATA:
7326 case OMP_TARGET_EXIT_DATA:
7327 case OACC_HOST_DATA:
7328 ctx->target_firstprivatize_array_bases = true;
7333 while ((c = *list_p) != NULL)
7335 bool remove = false;
7336 bool notice_outer = true;
7337 const char *check_non_private = NULL;
7341 switch (OMP_CLAUSE_CODE (c))
7343 case OMP_CLAUSE_PRIVATE:
7344 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7345 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7347 flags |= GOVD_PRIVATE_OUTER_REF;
7348 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7351 notice_outer = false;
7353 case OMP_CLAUSE_SHARED:
7354 flags = GOVD_SHARED | GOVD_EXPLICIT;
7356 case OMP_CLAUSE_FIRSTPRIVATE:
7357 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7358 check_non_private = "firstprivate";
7360 case OMP_CLAUSE_LASTPRIVATE:
7361 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7362 check_non_private = "lastprivate";
7363 decl = OMP_CLAUSE_DECL (c);
7364 if (omp_no_lastprivate (ctx))
7366 notice_outer = false;
7367 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7369 else if (error_operand_p (decl))
7372 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7373 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7374 && splay_tree_lookup (outer_ctx->variables,
7375 (splay_tree_key) decl) == NULL)
7377 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7378 if (outer_ctx->outer_context)
7379 omp_notice_variable (outer_ctx->outer_context, decl, true);
7382 && (outer_ctx->region_type & ORT_TASK) != 0
7383 && outer_ctx->combined_loop
7384 && splay_tree_lookup (outer_ctx->variables,
7385 (splay_tree_key) decl) == NULL)
7387 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7388 if (outer_ctx->outer_context)
7389 omp_notice_variable (outer_ctx->outer_context, decl, true);
7392 && (outer_ctx->region_type == ORT_WORKSHARE
7393 || outer_ctx->region_type == ORT_ACC)
7394 && outer_ctx->combined_loop
7395 && splay_tree_lookup (outer_ctx->variables,
7396 (splay_tree_key) decl) == NULL
7397 && !omp_check_private (outer_ctx, decl, false))
7399 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7400 if (outer_ctx->outer_context
7401 && (outer_ctx->outer_context->region_type
7402 == ORT_COMBINED_PARALLEL)
7403 && splay_tree_lookup (outer_ctx->outer_context->variables,
7404 (splay_tree_key) decl) == NULL)
7406 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7407 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7408 if (octx->outer_context)
7409 omp_notice_variable (octx->outer_context, decl, true);
7411 else if (outer_ctx->outer_context)
7412 omp_notice_variable (outer_ctx->outer_context, decl, true);
7415 case OMP_CLAUSE_REDUCTION:
7416 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7417 /* OpenACC permits reductions on private variables. */
7418 if (!(region_type & ORT_ACC))
7419 check_non_private = "reduction";
7420 decl = OMP_CLAUSE_DECL (c);
7421 if (TREE_CODE (decl) == MEM_REF)
7423 tree type = TREE_TYPE (decl);
7424 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7425 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7430 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7433 omp_firstprivatize_variable (ctx, v);
7434 omp_notice_variable (ctx, v, true);
7436 decl = TREE_OPERAND (decl, 0);
7437 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7439 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7440 NULL, is_gimple_val, fb_rvalue)
7446 v = TREE_OPERAND (decl, 1);
7449 omp_firstprivatize_variable (ctx, v);
7450 omp_notice_variable (ctx, v, true);
7452 decl = TREE_OPERAND (decl, 0);
7454 if (TREE_CODE (decl) == ADDR_EXPR
7455 || TREE_CODE (decl) == INDIRECT_REF)
7456 decl = TREE_OPERAND (decl, 0);
7459 case OMP_CLAUSE_LINEAR:
7460 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7461 is_gimple_val, fb_rvalue) == GS_ERROR)
7468 if (code == OMP_SIMD
7469 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7471 struct gimplify_omp_ctx *octx = outer_ctx;
7473 && octx->region_type == ORT_WORKSHARE
7474 && octx->combined_loop
7475 && !octx->distribute)
7477 if (octx->outer_context
7478 && (octx->outer_context->region_type
7479 == ORT_COMBINED_PARALLEL))
7480 octx = octx->outer_context->outer_context;
7482 octx = octx->outer_context;
7485 && octx->region_type == ORT_WORKSHARE
7486 && octx->combined_loop
7488 && !lang_GNU_Fortran ())
7490 error_at (OMP_CLAUSE_LOCATION (c),
7491 "%<linear%> clause for variable other than "
7492 "loop iterator specified on construct "
7493 "combined with %<distribute%>");
7498 /* For combined #pragma omp parallel for simd, need to put
7499 lastprivate and perhaps firstprivate too on the
7500 parallel. Similarly for #pragma omp for simd. */
7501 struct gimplify_omp_ctx *octx = outer_ctx;
7503 if (omp_no_lastprivate (ctx))
7504 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7507 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7508 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7510 decl = OMP_CLAUSE_DECL (c);
7511 if (error_operand_p (decl))
7517 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7518 flags |= GOVD_FIRSTPRIVATE;
7519 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7520 flags |= GOVD_LASTPRIVATE;
7522 && octx->region_type == ORT_WORKSHARE
7523 && octx->combined_loop)
7525 if (octx->outer_context
7526 && (octx->outer_context->region_type
7527 == ORT_COMBINED_PARALLEL))
7528 octx = octx->outer_context;
7529 else if (omp_check_private (octx, decl, false))
7533 && (octx->region_type & ORT_TASK) != 0
7534 && octx->combined_loop)
7537 && octx->region_type == ORT_COMBINED_PARALLEL
7538 && ctx->region_type == ORT_WORKSHARE
7539 && octx == outer_ctx)
7540 flags = GOVD_SEEN | GOVD_SHARED;
7542 && octx->region_type == ORT_COMBINED_TEAMS)
7543 flags = GOVD_SEEN | GOVD_SHARED;
7545 && octx->region_type == ORT_COMBINED_TARGET)
7547 flags &= ~GOVD_LASTPRIVATE;
7548 if (flags == GOVD_SEEN)
7554 = splay_tree_lookup (octx->variables,
7555 (splay_tree_key) decl);
7556 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7561 omp_add_variable (octx, decl, flags);
7562 if (octx->outer_context == NULL)
7564 octx = octx->outer_context;
7569 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7570 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7571 omp_notice_variable (octx, decl, true);
7573 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7574 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7575 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7577 notice_outer = false;
7578 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7582 case OMP_CLAUSE_MAP:
7583 decl = OMP_CLAUSE_DECL (c);
7584 if (error_operand_p (decl))
7590 case OMP_TARGET_DATA:
7591 case OMP_TARGET_ENTER_DATA:
7592 case OMP_TARGET_EXIT_DATA:
7593 case OACC_HOST_DATA:
7594 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7595 || (OMP_CLAUSE_MAP_KIND (c)
7596 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7597 /* For target {,enter ,exit }data only the array slice is
7598 mapped, but not the pointer to it. */
7606 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7608 struct gimplify_omp_ctx *octx;
7609 for (octx = outer_ctx; octx; octx = octx->outer_context)
7611 if (octx->region_type != ORT_ACC_HOST_DATA)
7614 = splay_tree_lookup (octx->variables,
7615 (splay_tree_key) decl);
7617 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7618 "declared in enclosing %<host_data%> region",
7622 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7623 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7624 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7625 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7626 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7631 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7632 || (OMP_CLAUSE_MAP_KIND (c)
7633 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7634 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7637 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
7638 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7639 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7644 if (TREE_CODE (d) == ARRAY_REF)
7646 while (TREE_CODE (d) == ARRAY_REF)
7647 d = TREE_OPERAND (d, 0);
7648 if (TREE_CODE (d) == COMPONENT_REF
7649 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7652 pd = &OMP_CLAUSE_DECL (c);
7654 && TREE_CODE (decl) == INDIRECT_REF
7655 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7656 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7659 pd = &TREE_OPERAND (decl, 0);
7660 decl = TREE_OPERAND (decl, 0);
7662 if (TREE_CODE (decl) == COMPONENT_REF)
7664 while (TREE_CODE (decl) == COMPONENT_REF)
7665 decl = TREE_OPERAND (decl, 0);
7666 if (TREE_CODE (decl) == INDIRECT_REF
7667 && DECL_P (TREE_OPERAND (decl, 0))
7668 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7670 decl = TREE_OPERAND (decl, 0);
7672 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7680 if (error_operand_p (decl))
7686 tree stype = TREE_TYPE (decl);
7687 if (TREE_CODE (stype) == REFERENCE_TYPE)
7688 stype = TREE_TYPE (stype);
7689 if (TYPE_SIZE_UNIT (stype) == NULL
7690 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7692 error_at (OMP_CLAUSE_LOCATION (c),
7693 "mapping field %qE of variable length "
7694 "structure", OMP_CLAUSE_DECL (c));
7699 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7701 /* Error recovery. */
7702 if (prev_list_p == NULL)
7707 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7709 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7710 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7719 HOST_WIDE_INT bitsize, bitpos;
7721 int unsignedp, reversep, volatilep = 0;
7722 tree base = OMP_CLAUSE_DECL (c);
7723 while (TREE_CODE (base) == ARRAY_REF)
7724 base = TREE_OPERAND (base, 0);
7725 if (TREE_CODE (base) == INDIRECT_REF)
7726 base = TREE_OPERAND (base, 0);
7727 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7728 &mode, &unsignedp, &reversep,
7730 tree orig_base = base;
7731 if ((TREE_CODE (base) == INDIRECT_REF
7732 || (TREE_CODE (base) == MEM_REF
7733 && integer_zerop (TREE_OPERAND (base, 1))))
7734 && DECL_P (TREE_OPERAND (base, 0))
7735 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7737 base = TREE_OPERAND (base, 0);
7738 gcc_assert (base == decl
7739 && (offset == NULL_TREE
7740 || TREE_CODE (offset) == INTEGER_CST));
7743 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7744 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7745 == GOMP_MAP_ALWAYS_POINTER);
7746 if (n == NULL || (n->value & GOVD_MAP) == 0)
7748 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7750 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7751 if (orig_base != base)
7752 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7754 OMP_CLAUSE_DECL (l) = decl;
7755 OMP_CLAUSE_SIZE (l) = size_int (1);
7756 if (struct_map_to_clause == NULL)
7757 struct_map_to_clause = new hash_map<tree, tree>;
7758 struct_map_to_clause->put (decl, l);
7761 enum gomp_map_kind mkind
7762 = code == OMP_TARGET_EXIT_DATA
7763 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7764 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7766 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7767 OMP_CLAUSE_DECL (c2)
7768 = unshare_expr (OMP_CLAUSE_DECL (c));
7769 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7770 OMP_CLAUSE_SIZE (c2)
7771 = TYPE_SIZE_UNIT (ptr_type_node);
7772 OMP_CLAUSE_CHAIN (l) = c2;
7773 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7775 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7777 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7779 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7780 OMP_CLAUSE_DECL (c3)
7781 = unshare_expr (OMP_CLAUSE_DECL (c4));
7782 OMP_CLAUSE_SIZE (c3)
7783 = TYPE_SIZE_UNIT (ptr_type_node);
7784 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7785 OMP_CLAUSE_CHAIN (c2) = c3;
7792 OMP_CLAUSE_CHAIN (l) = c;
7794 list_p = &OMP_CLAUSE_CHAIN (l);
7796 if (orig_base != base && code == OMP_TARGET)
7798 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7800 enum gomp_map_kind mkind
7801 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7802 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7803 OMP_CLAUSE_DECL (c2) = decl;
7804 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7805 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7806 OMP_CLAUSE_CHAIN (l) = c2;
7808 flags = GOVD_MAP | GOVD_EXPLICIT;
7809 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7815 tree *osc = struct_map_to_clause->get (decl);
7816 tree *sc = NULL, *scp = NULL;
7817 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7818 n->value |= GOVD_SEEN;
7821 o1 = wi::to_offset (offset);
7825 o1 = o1 + bitpos / BITS_PER_UNIT;
7826 sc = &OMP_CLAUSE_CHAIN (*osc);
7828 && (OMP_CLAUSE_MAP_KIND (*sc)
7829 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7830 sc = &OMP_CLAUSE_CHAIN (*sc);
7831 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7832 if (ptr && sc == prev_list_p)
7834 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7836 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7838 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7844 HOST_WIDE_INT bitsize2, bitpos2;
7845 base = OMP_CLAUSE_DECL (*sc);
7846 if (TREE_CODE (base) == ARRAY_REF)
7848 while (TREE_CODE (base) == ARRAY_REF)
7849 base = TREE_OPERAND (base, 0);
7850 if (TREE_CODE (base) != COMPONENT_REF
7851 || (TREE_CODE (TREE_TYPE (base))
7855 else if (TREE_CODE (base) == INDIRECT_REF
7856 && (TREE_CODE (TREE_OPERAND (base, 0))
7858 && (TREE_CODE (TREE_TYPE
7859 (TREE_OPERAND (base, 0)))
7861 base = TREE_OPERAND (base, 0);
7862 base = get_inner_reference (base, &bitsize2,
7865 &reversep, &volatilep,
7867 if ((TREE_CODE (base) == INDIRECT_REF
7868 || (TREE_CODE (base) == MEM_REF
7869 && integer_zerop (TREE_OPERAND (base,
7871 && DECL_P (TREE_OPERAND (base, 0))
7872 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7875 base = TREE_OPERAND (base, 0);
7880 gcc_assert (offset == NULL_TREE
7881 || TREE_CODE (offset) == INTEGER_CST);
7882 tree d1 = OMP_CLAUSE_DECL (*sc);
7883 tree d2 = OMP_CLAUSE_DECL (c);
7884 while (TREE_CODE (d1) == ARRAY_REF)
7885 d1 = TREE_OPERAND (d1, 0);
7886 while (TREE_CODE (d2) == ARRAY_REF)
7887 d2 = TREE_OPERAND (d2, 0);
7888 if (TREE_CODE (d1) == INDIRECT_REF)
7889 d1 = TREE_OPERAND (d1, 0);
7890 if (TREE_CODE (d2) == INDIRECT_REF)
7891 d2 = TREE_OPERAND (d2, 0);
7892 while (TREE_CODE (d1) == COMPONENT_REF)
7893 if (TREE_CODE (d2) == COMPONENT_REF
7894 && TREE_OPERAND (d1, 1)
7895 == TREE_OPERAND (d2, 1))
7897 d1 = TREE_OPERAND (d1, 0);
7898 d2 = TREE_OPERAND (d2, 0);
7904 error_at (OMP_CLAUSE_LOCATION (c),
7905 "%qE appears more than once in map "
7906 "clauses", OMP_CLAUSE_DECL (c));
7911 o2 = wi::to_offset (offset2);
7915 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7916 if (wi::ltu_p (o1, o2)
7917 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7927 OMP_CLAUSE_SIZE (*osc)
7928 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7932 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7934 tree cl = NULL_TREE;
7935 enum gomp_map_kind mkind
7936 = code == OMP_TARGET_EXIT_DATA
7937 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7938 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7939 OMP_CLAUSE_DECL (c2)
7940 = unshare_expr (OMP_CLAUSE_DECL (c));
7941 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7942 OMP_CLAUSE_SIZE (c2)
7943 = TYPE_SIZE_UNIT (ptr_type_node);
7944 cl = scp ? *prev_list_p : c2;
7945 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7947 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7949 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7951 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7952 OMP_CLAUSE_DECL (c3)
7953 = unshare_expr (OMP_CLAUSE_DECL (c4));
7954 OMP_CLAUSE_SIZE (c3)
7955 = TYPE_SIZE_UNIT (ptr_type_node);
7956 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7958 OMP_CLAUSE_CHAIN (c2) = c3;
7964 if (sc == prev_list_p)
7971 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7972 list_p = prev_list_p;
7974 OMP_CLAUSE_CHAIN (c) = *sc;
7981 *list_p = OMP_CLAUSE_CHAIN (c);
7982 OMP_CLAUSE_CHAIN (c) = *sc;
7989 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7990 && OMP_CLAUSE_CHAIN (c)
7991 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7992 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7993 == GOMP_MAP_ALWAYS_POINTER))
7994 prev_list_p = list_p;
7997 flags = GOVD_MAP | GOVD_EXPLICIT;
7998 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7999 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8000 flags |= GOVD_MAP_ALWAYS_TO;
8003 case OMP_CLAUSE_DEPEND:
8004 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
8005 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8007 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
8011 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8013 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8014 NULL, is_gimple_val, fb_rvalue);
8015 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8017 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8022 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8023 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8024 is_gimple_val, fb_rvalue) == GS_ERROR)
8032 case OMP_CLAUSE_FROM:
8033 case OMP_CLAUSE__CACHE_:
8034 decl = OMP_CLAUSE_DECL (c);
8035 if (error_operand_p (decl))
8040 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8041 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8042 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8043 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8044 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8051 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8052 NULL, is_gimple_lvalue, fb_lvalue)
8062 case OMP_CLAUSE_USE_DEVICE_PTR:
8063 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8065 case OMP_CLAUSE_IS_DEVICE_PTR:
8066 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8070 decl = OMP_CLAUSE_DECL (c);
8072 if (error_operand_p (decl))
8077 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8079 tree t = omp_member_access_dummy_var (decl);
8082 tree v = DECL_VALUE_EXPR (decl);
8083 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8085 omp_notice_variable (outer_ctx, t, true);
8088 omp_add_variable (ctx, decl, flags);
8089 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8090 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8092 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8093 GOVD_LOCAL | GOVD_SEEN);
8094 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8095 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8097 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8099 omp_add_variable (ctx,
8100 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8101 GOVD_LOCAL | GOVD_SEEN);
8102 gimplify_omp_ctxp = ctx;
8103 push_gimplify_context ();
8105 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8106 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8108 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8109 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8110 pop_gimplify_context
8111 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8112 push_gimplify_context ();
8113 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8114 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8115 pop_gimplify_context
8116 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8117 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8118 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8120 gimplify_omp_ctxp = outer_ctx;
8122 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8123 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8125 gimplify_omp_ctxp = ctx;
8126 push_gimplify_context ();
8127 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8129 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8131 TREE_SIDE_EFFECTS (bind) = 1;
8132 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8133 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8135 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8136 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8137 pop_gimplify_context
8138 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8139 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8141 gimplify_omp_ctxp = outer_ctx;
8143 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8144 && OMP_CLAUSE_LINEAR_STMT (c))
8146 gimplify_omp_ctxp = ctx;
8147 push_gimplify_context ();
8148 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8150 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8152 TREE_SIDE_EFFECTS (bind) = 1;
8153 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8154 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8156 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8157 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8158 pop_gimplify_context
8159 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8160 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8162 gimplify_omp_ctxp = outer_ctx;
8168 case OMP_CLAUSE_COPYIN:
8169 case OMP_CLAUSE_COPYPRIVATE:
8170 decl = OMP_CLAUSE_DECL (c);
8171 if (error_operand_p (decl))
8176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8178 && !omp_check_private (ctx, decl, true))
8181 if (is_global_var (decl))
8183 if (DECL_THREAD_LOCAL_P (decl))
8185 else if (DECL_HAS_VALUE_EXPR_P (decl))
8187 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8191 && DECL_THREAD_LOCAL_P (value))
8196 error_at (OMP_CLAUSE_LOCATION (c),
8197 "copyprivate variable %qE is not threadprivate"
8198 " or private in outer context", DECL_NAME (decl));
8202 omp_notice_variable (outer_ctx, decl, true);
8203 if (check_non_private
8204 && region_type == ORT_WORKSHARE
8205 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8206 || decl == OMP_CLAUSE_DECL (c)
8207 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8208 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8210 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8211 == POINTER_PLUS_EXPR
8212 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8213 (OMP_CLAUSE_DECL (c), 0), 0))
8215 && omp_check_private (ctx, decl, false))
8217 error ("%s variable %qE is private in outer context",
8218 check_non_private, DECL_NAME (decl));
8224 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8225 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8228 for (int i = 0; i < 2; i++)
8229 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8231 case OMP_PARALLEL: p[i] = "parallel"; break;
8232 case OMP_TASK: p[i] = "task"; break;
8233 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8234 case OMP_TARGET_DATA: p[i] = "target data"; break;
8235 case OMP_TARGET: p[i] = "target"; break;
8236 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8237 case OMP_TARGET_ENTER_DATA:
8238 p[i] = "target enter data"; break;
8239 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8240 default: gcc_unreachable ();
8242 error_at (OMP_CLAUSE_LOCATION (c),
8243 "expected %qs %<if%> clause modifier rather than %qs",
8249 case OMP_CLAUSE_FINAL:
8250 OMP_CLAUSE_OPERAND (c, 0)
8251 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8254 case OMP_CLAUSE_SCHEDULE:
8255 case OMP_CLAUSE_NUM_THREADS:
8256 case OMP_CLAUSE_NUM_TEAMS:
8257 case OMP_CLAUSE_THREAD_LIMIT:
8258 case OMP_CLAUSE_DIST_SCHEDULE:
8259 case OMP_CLAUSE_DEVICE:
8260 case OMP_CLAUSE_PRIORITY:
8261 case OMP_CLAUSE_GRAINSIZE:
8262 case OMP_CLAUSE_NUM_TASKS:
8263 case OMP_CLAUSE_HINT:
8264 case OMP_CLAUSE__CILK_FOR_COUNT_:
8265 case OMP_CLAUSE_ASYNC:
8266 case OMP_CLAUSE_WAIT:
8267 case OMP_CLAUSE_NUM_GANGS:
8268 case OMP_CLAUSE_NUM_WORKERS:
8269 case OMP_CLAUSE_VECTOR_LENGTH:
8270 case OMP_CLAUSE_WORKER:
8271 case OMP_CLAUSE_VECTOR:
8272 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8273 is_gimple_val, fb_rvalue) == GS_ERROR)
8277 case OMP_CLAUSE_GANG:
8278 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8279 is_gimple_val, fb_rvalue) == GS_ERROR)
8281 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8282 is_gimple_val, fb_rvalue) == GS_ERROR)
8286 case OMP_CLAUSE_TILE:
8287 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
8288 list = TREE_CHAIN (list))
8290 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
8291 is_gimple_val, fb_rvalue) == GS_ERROR)
8296 case OMP_CLAUSE_DEVICE_RESIDENT:
8300 case OMP_CLAUSE_NOWAIT:
8301 case OMP_CLAUSE_ORDERED:
8302 case OMP_CLAUSE_UNTIED:
8303 case OMP_CLAUSE_COLLAPSE:
8304 case OMP_CLAUSE_AUTO:
8305 case OMP_CLAUSE_SEQ:
8306 case OMP_CLAUSE_INDEPENDENT:
8307 case OMP_CLAUSE_MERGEABLE:
8308 case OMP_CLAUSE_PROC_BIND:
8309 case OMP_CLAUSE_SAFELEN:
8310 case OMP_CLAUSE_SIMDLEN:
8311 case OMP_CLAUSE_NOGROUP:
8312 case OMP_CLAUSE_THREADS:
8313 case OMP_CLAUSE_SIMD:
8316 case OMP_CLAUSE_DEFAULTMAP:
8317 ctx->target_map_scalars_firstprivate = false;
8320 case OMP_CLAUSE_ALIGNED:
8321 decl = OMP_CLAUSE_DECL (c);
8322 if (error_operand_p (decl))
8327 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8328 is_gimple_val, fb_rvalue) == GS_ERROR)
8333 if (!is_global_var (decl)
8334 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8335 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8338 case OMP_CLAUSE_DEFAULT:
8339 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8347 *list_p = OMP_CLAUSE_CHAIN (c);
8349 list_p = &OMP_CLAUSE_CHAIN (c);
8352 gimplify_omp_ctxp = ctx;
8353 if (struct_map_to_clause)
8354 delete struct_map_to_clause;
8357 /* Return true if DECL is a candidate for shared to firstprivate
8358 optimization. We only consider non-addressable scalars, not
8359 too big, and not references. */
8362 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8364 if (TREE_ADDRESSABLE (decl))
8366 tree type = TREE_TYPE (decl);
8367 if (!is_gimple_reg_type (type)
8368 || TREE_CODE (type) == REFERENCE_TYPE
8369 || TREE_ADDRESSABLE (type))
8371 /* Don't optimize too large decls, as each thread/task will have
8373 HOST_WIDE_INT len = int_size_in_bytes (type);
8374 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8376 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8381 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8382 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8383 GOVD_WRITTEN in outer contexts. */
8386 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8388 for (; ctx; ctx = ctx->outer_context)
8390 splay_tree_node n = splay_tree_lookup (ctx->variables,
8391 (splay_tree_key) decl);
8394 else if (n->value & GOVD_SHARED)
8396 n->value |= GOVD_WRITTEN;
8399 else if (n->value & GOVD_DATA_SHARE_CLASS)
8404 /* Helper callback for walk_gimple_seq to discover possible stores
8405 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8406 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8410 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8412 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8421 if (handled_component_p (op))
8422 op = TREE_OPERAND (op, 0);
8423 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8424 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8425 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8430 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8433 omp_mark_stores (gimplify_omp_ctxp, op);
8437 /* Helper callback for walk_gimple_seq to discover possible stores
8438 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8439 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8443 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8444 bool *handled_ops_p,
8445 struct walk_stmt_info *wi)
8447 gimple *stmt = gsi_stmt (*gsi_p);
8448 switch (gimple_code (stmt))
8450 /* Don't recurse on OpenMP constructs for which
8451 gimplify_adjust_omp_clauses already handled the bodies,
8452 except handle gimple_omp_for_pre_body. */
8453 case GIMPLE_OMP_FOR:
8454 *handled_ops_p = true;
8455 if (gimple_omp_for_pre_body (stmt))
8456 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8457 omp_find_stores_stmt, omp_find_stores_op, wi);
8459 case GIMPLE_OMP_PARALLEL:
8460 case GIMPLE_OMP_TASK:
8461 case GIMPLE_OMP_SECTIONS:
8462 case GIMPLE_OMP_SINGLE:
8463 case GIMPLE_OMP_TARGET:
8464 case GIMPLE_OMP_TEAMS:
8465 case GIMPLE_OMP_CRITICAL:
8466 *handled_ops_p = true;
8474 struct gimplify_adjust_omp_clauses_data
8480 /* For all variables that were not actually used within the context,
8481 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8484 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8486 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8488 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8489 tree decl = (tree) n->key;
8490 unsigned flags = n->value;
8491 enum omp_clause_code code;
8495 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8497 if ((flags & GOVD_SEEN) == 0)
8499 if (flags & GOVD_DEBUG_PRIVATE)
8501 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
8502 private_debug = true;
8504 else if (flags & GOVD_MAP)
8505 private_debug = false;
8508 = lang_hooks.decls.omp_private_debug_clause (decl,
8509 !!(flags & GOVD_SHARED));
8511 code = OMP_CLAUSE_PRIVATE;
8512 else if (flags & GOVD_MAP)
8513 code = OMP_CLAUSE_MAP;
8514 else if (flags & GOVD_SHARED)
8516 if (is_global_var (decl))
8518 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8522 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8523 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8524 | GOVD_PRIVATE | GOVD_REDUCTION
8525 | GOVD_LINEAR | GOVD_MAP)) != 0)
8527 ctx = ctx->outer_context;
8532 code = OMP_CLAUSE_SHARED;
8534 else if (flags & GOVD_PRIVATE)
8535 code = OMP_CLAUSE_PRIVATE;
8536 else if (flags & GOVD_FIRSTPRIVATE)
8537 code = OMP_CLAUSE_FIRSTPRIVATE;
8538 else if (flags & GOVD_LASTPRIVATE)
8539 code = OMP_CLAUSE_LASTPRIVATE;
8540 else if (flags & GOVD_ALIGNED)
8545 if (((flags & GOVD_LASTPRIVATE)
8546 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8547 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8548 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8550 tree chain = *list_p;
8551 clause = build_omp_clause (input_location, code);
8552 OMP_CLAUSE_DECL (clause) = decl;
8553 OMP_CLAUSE_CHAIN (clause) = chain;
8555 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8556 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8557 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8558 else if (code == OMP_CLAUSE_SHARED
8559 && (flags & GOVD_WRITTEN) == 0
8560 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8561 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8562 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8563 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8564 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8566 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8567 OMP_CLAUSE_DECL (nc) = decl;
8568 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8569 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8570 OMP_CLAUSE_DECL (clause)
8571 = build_simple_mem_ref_loc (input_location, decl);
8572 OMP_CLAUSE_DECL (clause)
8573 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8574 build_int_cst (build_pointer_type (char_type_node), 0));
8575 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8576 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8577 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8578 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8579 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8580 OMP_CLAUSE_CHAIN (nc) = chain;
8581 OMP_CLAUSE_CHAIN (clause) = nc;
8582 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8583 gimplify_omp_ctxp = ctx->outer_context;
8584 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8585 pre_p, NULL, is_gimple_val, fb_rvalue);
8586 gimplify_omp_ctxp = ctx;
8588 else if (code == OMP_CLAUSE_MAP)
8590 int kind = (flags & GOVD_MAP_TO_ONLY
8593 if (flags & GOVD_MAP_FORCE)
8594 kind |= GOMP_MAP_FLAG_FORCE;
8595 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8596 if (DECL_SIZE (decl)
8597 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8599 tree decl2 = DECL_VALUE_EXPR (decl);
8600 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8601 decl2 = TREE_OPERAND (decl2, 0);
8602 gcc_assert (DECL_P (decl2));
8603 tree mem = build_simple_mem_ref (decl2);
8604 OMP_CLAUSE_DECL (clause) = mem;
8605 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8606 if (gimplify_omp_ctxp->outer_context)
8608 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8609 omp_notice_variable (ctx, decl2, true);
8610 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8612 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8614 OMP_CLAUSE_DECL (nc) = decl;
8615 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8616 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8617 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8619 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8620 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8621 OMP_CLAUSE_CHAIN (clause) = nc;
8623 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8624 && lang_hooks.decls.omp_privatize_by_reference (decl))
8626 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8627 OMP_CLAUSE_SIZE (clause)
8628 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8629 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8630 gimplify_omp_ctxp = ctx->outer_context;
8631 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8632 pre_p, NULL, is_gimple_val, fb_rvalue);
8633 gimplify_omp_ctxp = ctx;
8634 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8636 OMP_CLAUSE_DECL (nc) = decl;
8637 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8638 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8639 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8640 OMP_CLAUSE_CHAIN (clause) = nc;
8643 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8645 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8647 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8648 OMP_CLAUSE_DECL (nc) = decl;
8649 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8650 OMP_CLAUSE_CHAIN (nc) = chain;
8651 OMP_CLAUSE_CHAIN (clause) = nc;
8652 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8653 gimplify_omp_ctxp = ctx->outer_context;
8654 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8655 gimplify_omp_ctxp = ctx;
8658 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8659 gimplify_omp_ctxp = ctx->outer_context;
8660 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8661 if (gimplify_omp_ctxp)
8662 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8663 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8664 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8665 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8667 gimplify_omp_ctxp = ctx;
8672 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8673 enum tree_code code)
8675 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8680 struct gimplify_omp_ctx *octx;
8681 for (octx = ctx; octx; octx = octx->outer_context)
8682 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8686 struct walk_stmt_info wi;
8687 memset (&wi, 0, sizeof (wi));
8688 walk_gimple_seq (body, omp_find_stores_stmt,
8689 omp_find_stores_op, &wi);
8692 while ((c = *list_p) != NULL)
8695 bool remove = false;
8697 switch (OMP_CLAUSE_CODE (c))
8699 case OMP_CLAUSE_PRIVATE:
8700 case OMP_CLAUSE_SHARED:
8701 case OMP_CLAUSE_FIRSTPRIVATE:
8702 case OMP_CLAUSE_LINEAR:
8703 decl = OMP_CLAUSE_DECL (c);
8704 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8705 remove = !(n->value & GOVD_SEEN);
8708 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8709 if ((n->value & GOVD_DEBUG_PRIVATE)
8710 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8712 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8713 || ((n->value & GOVD_DATA_SHARE_CLASS)
8715 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8716 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8718 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8719 && (n->value & GOVD_WRITTEN) == 0
8721 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8722 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8723 else if (DECL_P (decl)
8724 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8725 && (n->value & GOVD_WRITTEN) != 1)
8726 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8727 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8728 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8729 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8733 case OMP_CLAUSE_LASTPRIVATE:
8734 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8735 accurately reflect the presence of a FIRSTPRIVATE clause. */
8736 decl = OMP_CLAUSE_DECL (c);
8737 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8738 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8739 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8740 if (omp_no_lastprivate (ctx))
8742 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8745 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
8747 else if (code == OMP_DISTRIBUTE
8748 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8751 error_at (OMP_CLAUSE_LOCATION (c),
8752 "same variable used in %<firstprivate%> and "
8753 "%<lastprivate%> clauses on %<distribute%> "
8757 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8759 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8760 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8763 case OMP_CLAUSE_ALIGNED:
8764 decl = OMP_CLAUSE_DECL (c);
8765 if (!is_global_var (decl))
8767 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8768 remove = n == NULL || !(n->value & GOVD_SEEN);
8769 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8771 struct gimplify_omp_ctx *octx;
8773 && (n->value & (GOVD_DATA_SHARE_CLASS
8774 & ~GOVD_FIRSTPRIVATE)))
8777 for (octx = ctx->outer_context; octx;
8778 octx = octx->outer_context)
8780 n = splay_tree_lookup (octx->variables,
8781 (splay_tree_key) decl);
8784 if (n->value & GOVD_LOCAL)
8786 /* We have to avoid assigning a shared variable
8787 to itself when trying to add
8788 __builtin_assume_aligned. */
8789 if (n->value & GOVD_SHARED)
8797 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8799 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8800 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8805 case OMP_CLAUSE_MAP:
8806 if (code == OMP_TARGET_EXIT_DATA
8807 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8812 decl = OMP_CLAUSE_DECL (c);
8813 /* Data clasues associated with acc parallel reductions must be
8814 compatible with present_or_copy. Warn and adjust the clause
8815 if that is not the case. */
8816 if (ctx->region_type == ORT_ACC_PARALLEL)
8818 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8822 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8824 if (n && (n->value & GOVD_REDUCTION))
8826 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8828 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8829 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8830 && kind != GOMP_MAP_FORCE_PRESENT
8831 && kind != GOMP_MAP_POINTER)
8833 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8834 "incompatible data clause with reduction "
8835 "on %qE; promoting to present_or_copy",
8837 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8843 if ((ctx->region_type & ORT_TARGET) != 0
8844 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8846 if (TREE_CODE (decl) == INDIRECT_REF
8847 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8848 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8850 decl = TREE_OPERAND (decl, 0);
8851 if (TREE_CODE (decl) == COMPONENT_REF)
8853 while (TREE_CODE (decl) == COMPONENT_REF)
8854 decl = TREE_OPERAND (decl, 0);
8857 n = splay_tree_lookup (ctx->variables,
8858 (splay_tree_key) decl);
8859 if (!(n->value & GOVD_SEEN))
8866 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8867 if ((ctx->region_type & ORT_TARGET) != 0
8868 && !(n->value & GOVD_SEEN)
8869 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8870 && !lookup_attribute ("omp declare target link",
8871 DECL_ATTRIBUTES (decl)))
8874 /* For struct element mapping, if struct is never referenced
8875 in target block and none of the mapping has always modifier,
8876 remove all the struct element mappings, which immediately
8877 follow the GOMP_MAP_STRUCT map clause. */
8878 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8880 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8882 OMP_CLAUSE_CHAIN (c)
8883 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8886 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8887 && code == OMP_TARGET_EXIT_DATA)
8889 else if (DECL_SIZE (decl)
8890 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8891 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8892 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8893 && (OMP_CLAUSE_MAP_KIND (c)
8894 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8896 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8897 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8899 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8901 tree decl2 = DECL_VALUE_EXPR (decl);
8902 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8903 decl2 = TREE_OPERAND (decl2, 0);
8904 gcc_assert (DECL_P (decl2));
8905 tree mem = build_simple_mem_ref (decl2);
8906 OMP_CLAUSE_DECL (c) = mem;
8907 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8908 if (ctx->outer_context)
8910 omp_notice_variable (ctx->outer_context, decl2, true);
8911 omp_notice_variable (ctx->outer_context,
8912 OMP_CLAUSE_SIZE (c), true);
8914 if (((ctx->region_type & ORT_TARGET) != 0
8915 || !ctx->target_firstprivatize_array_bases)
8916 && ((n->value & GOVD_SEEN) == 0
8917 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8919 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8921 OMP_CLAUSE_DECL (nc) = decl;
8922 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8923 if (ctx->target_firstprivatize_array_bases)
8924 OMP_CLAUSE_SET_MAP_KIND (nc,
8925 GOMP_MAP_FIRSTPRIVATE_POINTER);
8927 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8928 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8929 OMP_CLAUSE_CHAIN (c) = nc;
8935 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8936 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8937 gcc_assert ((n->value & GOVD_SEEN) == 0
8938 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8944 case OMP_CLAUSE_FROM:
8945 case OMP_CLAUSE__CACHE_:
8946 decl = OMP_CLAUSE_DECL (c);
8949 if (DECL_SIZE (decl)
8950 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8952 tree decl2 = DECL_VALUE_EXPR (decl);
8953 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8954 decl2 = TREE_OPERAND (decl2, 0);
8955 gcc_assert (DECL_P (decl2));
8956 tree mem = build_simple_mem_ref (decl2);
8957 OMP_CLAUSE_DECL (c) = mem;
8958 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8959 if (ctx->outer_context)
8961 omp_notice_variable (ctx->outer_context, decl2, true);
8962 omp_notice_variable (ctx->outer_context,
8963 OMP_CLAUSE_SIZE (c), true);
8966 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8967 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8970 case OMP_CLAUSE_REDUCTION:
8971 decl = OMP_CLAUSE_DECL (c);
8972 /* OpenACC reductions need a present_or_copy data clause.
8973 Add one if necessary. Error is the reduction is private. */
8974 if (ctx->region_type == ORT_ACC_PARALLEL)
8976 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8977 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8978 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8979 "reduction on %qE", DECL_NAME (decl));
8980 else if ((n->value & GOVD_MAP) == 0)
8982 tree next = OMP_CLAUSE_CHAIN (c);
8983 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8984 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8985 OMP_CLAUSE_DECL (nc) = decl;
8986 OMP_CLAUSE_CHAIN (c) = nc;
8987 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8990 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8991 if (OMP_CLAUSE_CHAIN (nc) == NULL)
8993 nc = OMP_CLAUSE_CHAIN (nc);
8995 OMP_CLAUSE_CHAIN (nc) = next;
8996 n->value |= GOVD_MAP;
9000 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9001 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9003 case OMP_CLAUSE_COPYIN:
9004 case OMP_CLAUSE_COPYPRIVATE:
9006 case OMP_CLAUSE_NUM_THREADS:
9007 case OMP_CLAUSE_NUM_TEAMS:
9008 case OMP_CLAUSE_THREAD_LIMIT:
9009 case OMP_CLAUSE_DIST_SCHEDULE:
9010 case OMP_CLAUSE_DEVICE:
9011 case OMP_CLAUSE_SCHEDULE:
9012 case OMP_CLAUSE_NOWAIT:
9013 case OMP_CLAUSE_ORDERED:
9014 case OMP_CLAUSE_DEFAULT:
9015 case OMP_CLAUSE_UNTIED:
9016 case OMP_CLAUSE_COLLAPSE:
9017 case OMP_CLAUSE_FINAL:
9018 case OMP_CLAUSE_MERGEABLE:
9019 case OMP_CLAUSE_PROC_BIND:
9020 case OMP_CLAUSE_SAFELEN:
9021 case OMP_CLAUSE_SIMDLEN:
9022 case OMP_CLAUSE_DEPEND:
9023 case OMP_CLAUSE_PRIORITY:
9024 case OMP_CLAUSE_GRAINSIZE:
9025 case OMP_CLAUSE_NUM_TASKS:
9026 case OMP_CLAUSE_NOGROUP:
9027 case OMP_CLAUSE_THREADS:
9028 case OMP_CLAUSE_SIMD:
9029 case OMP_CLAUSE_HINT:
9030 case OMP_CLAUSE_DEFAULTMAP:
9031 case OMP_CLAUSE_USE_DEVICE_PTR:
9032 case OMP_CLAUSE_IS_DEVICE_PTR:
9033 case OMP_CLAUSE__CILK_FOR_COUNT_:
9034 case OMP_CLAUSE_ASYNC:
9035 case OMP_CLAUSE_WAIT:
9036 case OMP_CLAUSE_DEVICE_RESIDENT:
9037 case OMP_CLAUSE_INDEPENDENT:
9038 case OMP_CLAUSE_NUM_GANGS:
9039 case OMP_CLAUSE_NUM_WORKERS:
9040 case OMP_CLAUSE_VECTOR_LENGTH:
9041 case OMP_CLAUSE_GANG:
9042 case OMP_CLAUSE_WORKER:
9043 case OMP_CLAUSE_VECTOR:
9044 case OMP_CLAUSE_AUTO:
9045 case OMP_CLAUSE_SEQ:
9048 case OMP_CLAUSE_TILE:
9049 /* We're not yet making use of the information provided by OpenACC
9050 tile clauses. Discard these here, to simplify later middle end
9060 *list_p = OMP_CLAUSE_CHAIN (c);
9062 list_p = &OMP_CLAUSE_CHAIN (c);
9065 /* Add in any implicit data sharing. */
9066 struct gimplify_adjust_omp_clauses_data data;
9067 data.list_p = list_p;
9069 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9071 gimplify_omp_ctxp = ctx->outer_context;
9072 delete_omp_context (ctx);
9075 /* Gimplify OACC_CACHE. */
9078 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9080 tree expr = *expr_p;
9082 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9084 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9087 /* TODO: Do something sensible with this information. */
9089 *expr_p = NULL_TREE;
9092 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9093 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9094 kind. The entry kind will replace the one in CLAUSE, while the exit
9095 kind will be used in a new omp_clause and returned to the caller. */
9098 gimplify_oacc_declare_1 (tree clause)
9100 HOST_WIDE_INT kind, new_op;
9104 kind = OMP_CLAUSE_MAP_KIND (clause);
9108 case GOMP_MAP_ALLOC:
9109 case GOMP_MAP_FORCE_ALLOC:
9110 case GOMP_MAP_FORCE_TO:
9111 new_op = GOMP_MAP_DELETE;
9115 case GOMP_MAP_FORCE_FROM:
9116 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9117 new_op = GOMP_MAP_FORCE_FROM;
9121 case GOMP_MAP_FORCE_TOFROM:
9122 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9123 new_op = GOMP_MAP_FORCE_FROM;
9128 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9129 new_op = GOMP_MAP_FROM;
9133 case GOMP_MAP_TOFROM:
9134 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9135 new_op = GOMP_MAP_FROM;
9139 case GOMP_MAP_DEVICE_RESIDENT:
9140 case GOMP_MAP_FORCE_DEVICEPTR:
9141 case GOMP_MAP_FORCE_PRESENT:
9143 case GOMP_MAP_POINTER:
9154 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9155 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9156 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9162 /* Gimplify OACC_DECLARE. */
9165 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9167 tree expr = *expr_p;
9171 clauses = OACC_DECLARE_CLAUSES (expr);
9173 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9175 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9177 tree decl = OMP_CLAUSE_DECL (t);
9179 if (TREE_CODE (decl) == MEM_REF)
9182 if (TREE_CODE (decl) == VAR_DECL
9183 && !is_global_var (decl)
9184 && DECL_CONTEXT (decl) == current_function_decl)
9186 tree c = gimplify_oacc_declare_1 (t);
9189 if (oacc_declare_returns == NULL)
9190 oacc_declare_returns = new hash_map<tree, tree>;
9192 oacc_declare_returns->put (decl, c);
9196 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9199 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9202 gimplify_seq_add_stmt (pre_p, stmt);
9204 *expr_p = NULL_TREE;
9207 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9208 gimplification of the body, as well as scanning the body for used
9209 variables. We need to do this scan now, because variable-sized
9210 decls will be decomposed during gimplification. */
9213 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9215 tree expr = *expr_p;
9217 gimple_seq body = NULL;
9219 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9220 OMP_PARALLEL_COMBINED (expr)
9221 ? ORT_COMBINED_PARALLEL
9222 : ORT_PARALLEL, OMP_PARALLEL);
9224 push_gimplify_context ();
9226 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9227 if (gimple_code (g) == GIMPLE_BIND)
9228 pop_gimplify_context (g);
9230 pop_gimplify_context (NULL);
9232 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9235 g = gimple_build_omp_parallel (body,
9236 OMP_PARALLEL_CLAUSES (expr),
9237 NULL_TREE, NULL_TREE);
9238 if (OMP_PARALLEL_COMBINED (expr))
9239 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9240 gimplify_seq_add_stmt (pre_p, g);
9241 *expr_p = NULL_TREE;
9244 /* Gimplify the contents of an OMP_TASK statement. This involves
9245 gimplification of the body, as well as scanning the body for used
9246 variables. We need to do this scan now, because variable-sized
9247 decls will be decomposed during gimplification. */
9250 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9252 tree expr = *expr_p;
9254 gimple_seq body = NULL;
9256 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9257 find_omp_clause (OMP_TASK_CLAUSES (expr),
9259 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9261 push_gimplify_context ();
9263 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9264 if (gimple_code (g) == GIMPLE_BIND)
9265 pop_gimplify_context (g);
9267 pop_gimplify_context (NULL);
9269 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9272 g = gimple_build_omp_task (body,
9273 OMP_TASK_CLAUSES (expr),
9274 NULL_TREE, NULL_TREE,
9275 NULL_TREE, NULL_TREE, NULL_TREE);
9276 gimplify_seq_add_stmt (pre_p, g);
9277 *expr_p = NULL_TREE;
9280 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9281 with non-NULL OMP_FOR_INIT. */
9284 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9287 switch (TREE_CODE (*tp))
9293 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9297 case STATEMENT_LIST:
9307 /* Gimplify the gross structure of an OMP_FOR statement. */
9309 static enum gimplify_status
9310 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9312 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9313 enum gimplify_status ret = GS_ALL_DONE;
9314 enum gimplify_status tret;
9316 gimple_seq for_body, for_pre_body;
9318 bitmap has_decl_expr = NULL;
9319 enum omp_region_type ort = ORT_WORKSHARE;
9321 orig_for_stmt = for_stmt = *expr_p;
9323 switch (TREE_CODE (for_stmt))
9327 case OMP_DISTRIBUTE:
9333 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9334 ort = ORT_UNTIED_TASK;
9346 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9347 clause for the IV. */
9348 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9350 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9351 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9352 decl = TREE_OPERAND (t, 0);
9353 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9354 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9355 && OMP_CLAUSE_DECL (c) == decl)
9357 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9362 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9364 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9365 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9366 find_combined_omp_for, NULL, NULL);
9367 if (inner_for_stmt == NULL_TREE)
9369 gcc_assert (seen_error ());
9370 *expr_p = NULL_TREE;
9375 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9376 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9377 TREE_CODE (for_stmt));
9379 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9380 gimplify_omp_ctxp->distribute = true;
9382 /* Handle OMP_FOR_INIT. */
9383 for_pre_body = NULL;
9384 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9386 has_decl_expr = BITMAP_ALLOC (NULL);
9387 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9388 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9391 t = OMP_FOR_PRE_BODY (for_stmt);
9392 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9394 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9396 tree_stmt_iterator si;
9397 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9401 if (TREE_CODE (t) == DECL_EXPR
9402 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9403 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9407 if (OMP_FOR_PRE_BODY (for_stmt))
9409 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9410 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9413 struct gimplify_omp_ctx ctx;
9414 memset (&ctx, 0, sizeof (ctx));
9415 ctx.region_type = ORT_NONE;
9416 gimplify_omp_ctxp = &ctx;
9417 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9418 gimplify_omp_ctxp = NULL;
9421 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9423 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9424 for_stmt = inner_for_stmt;
9426 /* For taskloop, need to gimplify the start, end and step before the
9427 taskloop, outside of the taskloop omp context. */
9428 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9430 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9432 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9433 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9436 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9438 tree c = build_omp_clause (input_location,
9439 OMP_CLAUSE_FIRSTPRIVATE);
9440 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9441 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9442 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9445 /* Handle OMP_FOR_COND. */
9446 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9447 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9450 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9451 gimple_seq_empty_p (for_pre_body)
9452 ? pre_p : &for_pre_body, NULL);
9453 tree c = build_omp_clause (input_location,
9454 OMP_CLAUSE_FIRSTPRIVATE);
9455 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9456 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9457 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9460 /* Handle OMP_FOR_INCR. */
9461 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9462 if (TREE_CODE (t) == MODIFY_EXPR)
9464 decl = TREE_OPERAND (t, 0);
9465 t = TREE_OPERAND (t, 1);
9466 tree *tp = &TREE_OPERAND (t, 1);
9467 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9468 tp = &TREE_OPERAND (t, 0);
9470 if (!is_gimple_constant (*tp))
9472 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9473 ? pre_p : &for_pre_body;
9474 *tp = get_initialized_tmp_var (*tp, seq, NULL);
9475 tree c = build_omp_clause (input_location,
9476 OMP_CLAUSE_FIRSTPRIVATE);
9477 OMP_CLAUSE_DECL (c) = *tp;
9478 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9479 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9484 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9488 if (orig_for_stmt != for_stmt)
9489 gimplify_omp_ctxp->combined_loop = true;
9492 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9493 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9494 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9495 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9497 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9498 bool is_doacross = false;
9499 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9502 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9503 (OMP_FOR_INIT (for_stmt))
9507 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9509 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9510 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9512 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9513 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9514 decl = TREE_OPERAND (t, 0);
9515 gcc_assert (DECL_P (decl));
9516 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9517 || POINTER_TYPE_P (TREE_TYPE (decl)));
9520 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9521 gimplify_omp_ctxp->loop_iter_var.quick_push
9522 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9524 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9525 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9528 /* Make sure the iteration variable is private. */
9530 tree c2 = NULL_TREE;
9531 if (orig_for_stmt != for_stmt)
9532 /* Do this only on innermost construct for combined ones. */;
9533 else if (ort == ORT_SIMD)
9535 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9536 (splay_tree_key) decl);
9537 omp_is_private (gimplify_omp_ctxp, decl,
9538 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9540 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9541 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9542 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9544 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9545 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9546 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9548 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9549 || omp_no_lastprivate (gimplify_omp_ctxp))
9551 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9552 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9554 struct gimplify_omp_ctx *outer
9555 = gimplify_omp_ctxp->outer_context;
9556 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9558 if (outer->region_type == ORT_WORKSHARE
9559 && outer->combined_loop)
9561 n = splay_tree_lookup (outer->variables,
9562 (splay_tree_key)decl);
9563 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9565 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9566 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9570 struct gimplify_omp_ctx *octx = outer->outer_context;
9572 && octx->region_type == ORT_COMBINED_PARALLEL
9573 && octx->outer_context
9574 && (octx->outer_context->region_type
9576 && octx->outer_context->combined_loop)
9578 octx = octx->outer_context;
9579 n = splay_tree_lookup (octx->variables,
9580 (splay_tree_key)decl);
9581 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9583 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9584 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9591 OMP_CLAUSE_DECL (c) = decl;
9592 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9593 OMP_FOR_CLAUSES (for_stmt) = c;
9594 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9595 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9597 if (outer->region_type == ORT_WORKSHARE
9598 && outer->combined_loop)
9600 if (outer->outer_context
9601 && (outer->outer_context->region_type
9602 == ORT_COMBINED_PARALLEL))
9603 outer = outer->outer_context;
9604 else if (omp_check_private (outer, decl, false))
9607 else if (((outer->region_type & ORT_TASK) != 0)
9608 && outer->combined_loop
9609 && !omp_check_private (gimplify_omp_ctxp,
9612 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9614 omp_notice_variable (outer, decl, true);
9619 n = splay_tree_lookup (outer->variables,
9620 (splay_tree_key)decl);
9621 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9623 omp_add_variable (outer, decl,
9624 GOVD_LASTPRIVATE | GOVD_SEEN);
9625 if (outer->region_type == ORT_COMBINED_PARALLEL
9626 && outer->outer_context
9627 && (outer->outer_context->region_type
9629 && outer->outer_context->combined_loop)
9631 outer = outer->outer_context;
9632 n = splay_tree_lookup (outer->variables,
9633 (splay_tree_key)decl);
9634 if (omp_check_private (outer, decl, false))
9637 || ((n->value & GOVD_DATA_SHARE_CLASS)
9639 omp_add_variable (outer, decl,
9645 if (outer && outer->outer_context
9646 && (outer->outer_context->region_type
9647 == ORT_COMBINED_TEAMS))
9649 outer = outer->outer_context;
9650 n = splay_tree_lookup (outer->variables,
9651 (splay_tree_key)decl);
9653 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9654 omp_add_variable (outer, decl,
9655 GOVD_SHARED | GOVD_SEEN);
9659 if (outer && outer->outer_context)
9660 omp_notice_variable (outer->outer_context, decl,
9670 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9671 && !omp_no_lastprivate (gimplify_omp_ctxp);
9672 struct gimplify_omp_ctx *outer
9673 = gimplify_omp_ctxp->outer_context;
9674 if (outer && lastprivate)
9676 if (outer->region_type == ORT_WORKSHARE
9677 && outer->combined_loop)
9679 n = splay_tree_lookup (outer->variables,
9680 (splay_tree_key)decl);
9681 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9683 lastprivate = false;
9686 else if (outer->outer_context
9687 && (outer->outer_context->region_type
9688 == ORT_COMBINED_PARALLEL))
9689 outer = outer->outer_context;
9690 else if (omp_check_private (outer, decl, false))
9693 else if (((outer->region_type & ORT_TASK) != 0)
9694 && outer->combined_loop
9695 && !omp_check_private (gimplify_omp_ctxp,
9698 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9700 omp_notice_variable (outer, decl, true);
9705 n = splay_tree_lookup (outer->variables,
9706 (splay_tree_key)decl);
9707 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9709 omp_add_variable (outer, decl,
9710 GOVD_LASTPRIVATE | GOVD_SEEN);
9711 if (outer->region_type == ORT_COMBINED_PARALLEL
9712 && outer->outer_context
9713 && (outer->outer_context->region_type
9715 && outer->outer_context->combined_loop)
9717 outer = outer->outer_context;
9718 n = splay_tree_lookup (outer->variables,
9719 (splay_tree_key)decl);
9720 if (omp_check_private (outer, decl, false))
9723 || ((n->value & GOVD_DATA_SHARE_CLASS)
9725 omp_add_variable (outer, decl,
9731 if (outer && outer->outer_context
9732 && (outer->outer_context->region_type
9733 == ORT_COMBINED_TEAMS))
9735 outer = outer->outer_context;
9736 n = splay_tree_lookup (outer->variables,
9737 (splay_tree_key)decl);
9739 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9740 omp_add_variable (outer, decl,
9741 GOVD_SHARED | GOVD_SEEN);
9745 if (outer && outer->outer_context)
9746 omp_notice_variable (outer->outer_context, decl,
9752 c = build_omp_clause (input_location,
9753 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9754 : OMP_CLAUSE_PRIVATE);
9755 OMP_CLAUSE_DECL (c) = decl;
9756 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9757 OMP_FOR_CLAUSES (for_stmt) = c;
9758 omp_add_variable (gimplify_omp_ctxp, decl,
9759 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9760 | GOVD_EXPLICIT | GOVD_SEEN);
9764 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9765 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9767 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9769 /* If DECL is not a gimple register, create a temporary variable to act
9770 as an iteration counter. This is valid, since DECL cannot be
9771 modified in the body of the loop. Similarly for any iteration vars
9772 in simd with collapse > 1 where the iterator vars must be
9774 if (orig_for_stmt != for_stmt)
9776 else if (!is_gimple_reg (decl)
9778 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9780 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9781 /* Make sure omp_add_variable is not called on it prematurely.
9782 We call it ourselves a few lines later. */
9783 gimplify_omp_ctxp = NULL;
9784 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9785 gimplify_omp_ctxp = ctx;
9786 TREE_OPERAND (t, 0) = var;
9788 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9791 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9793 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9794 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9795 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9796 OMP_CLAUSE_DECL (c2) = var;
9797 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9798 OMP_FOR_CLAUSES (for_stmt) = c2;
9799 omp_add_variable (gimplify_omp_ctxp, var,
9800 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9808 omp_add_variable (gimplify_omp_ctxp, var,
9809 GOVD_PRIVATE | GOVD_SEEN);
9814 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9815 is_gimple_val, fb_rvalue);
9816 ret = MIN (ret, tret);
9817 if (ret == GS_ERROR)
9820 /* Handle OMP_FOR_COND. */
9821 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9822 gcc_assert (COMPARISON_CLASS_P (t));
9823 gcc_assert (TREE_OPERAND (t, 0) == decl);
9825 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9826 is_gimple_val, fb_rvalue);
9827 ret = MIN (ret, tret);
9829 /* Handle OMP_FOR_INCR. */
9830 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9831 switch (TREE_CODE (t))
9833 case PREINCREMENT_EXPR:
9834 case POSTINCREMENT_EXPR:
9836 tree decl = TREE_OPERAND (t, 0);
9837 /* c_omp_for_incr_canonicalize_ptr() should have been
9838 called to massage things appropriately. */
9839 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9841 if (orig_for_stmt != for_stmt)
9843 t = build_int_cst (TREE_TYPE (decl), 1);
9845 OMP_CLAUSE_LINEAR_STEP (c) = t;
9846 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9847 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9848 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9852 case PREDECREMENT_EXPR:
9853 case POSTDECREMENT_EXPR:
9854 /* c_omp_for_incr_canonicalize_ptr() should have been
9855 called to massage things appropriately. */
9856 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9857 if (orig_for_stmt != for_stmt)
9859 t = build_int_cst (TREE_TYPE (decl), -1);
9861 OMP_CLAUSE_LINEAR_STEP (c) = t;
9862 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9863 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9864 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9868 gcc_assert (TREE_OPERAND (t, 0) == decl);
9869 TREE_OPERAND (t, 0) = var;
9871 t = TREE_OPERAND (t, 1);
9872 switch (TREE_CODE (t))
9875 if (TREE_OPERAND (t, 1) == decl)
9877 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9878 TREE_OPERAND (t, 0) = var;
9884 case POINTER_PLUS_EXPR:
9885 gcc_assert (TREE_OPERAND (t, 0) == decl);
9886 TREE_OPERAND (t, 0) = var;
9892 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9893 is_gimple_val, fb_rvalue);
9894 ret = MIN (ret, tret);
9897 tree step = TREE_OPERAND (t, 1);
9898 tree stept = TREE_TYPE (decl);
9899 if (POINTER_TYPE_P (stept))
9901 step = fold_convert (stept, step);
9902 if (TREE_CODE (t) == MINUS_EXPR)
9903 step = fold_build1 (NEGATE_EXPR, stept, step);
9904 OMP_CLAUSE_LINEAR_STEP (c) = step;
9905 if (step != TREE_OPERAND (t, 1))
9907 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9908 &for_pre_body, NULL,
9909 is_gimple_val, fb_rvalue);
9910 ret = MIN (ret, tret);
9922 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9925 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
9927 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9928 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9929 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9930 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9931 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9932 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9933 && OMP_CLAUSE_DECL (c) == decl)
9935 if (is_doacross && (collapse == 1 || i >= collapse))
9939 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9940 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9941 gcc_assert (TREE_OPERAND (t, 0) == var);
9942 t = TREE_OPERAND (t, 1);
9943 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9944 || TREE_CODE (t) == MINUS_EXPR
9945 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9946 gcc_assert (TREE_OPERAND (t, 0) == var);
9947 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9948 is_doacross ? var : decl,
9949 TREE_OPERAND (t, 1));
9952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9953 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9955 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9956 gimplify_assign (decl, t, seq);
9961 BITMAP_FREE (has_decl_expr);
9963 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9965 push_gimplify_context ();
9966 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9968 OMP_FOR_BODY (orig_for_stmt)
9969 = build3 (BIND_EXPR, void_type_node, NULL,
9970 OMP_FOR_BODY (orig_for_stmt), NULL);
9971 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9975 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9978 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9980 if (gimple_code (g) == GIMPLE_BIND)
9981 pop_gimplify_context (g);
9983 pop_gimplify_context (NULL);
9986 if (orig_for_stmt != for_stmt)
9987 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9989 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9990 decl = TREE_OPERAND (t, 0);
9991 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9992 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9993 gimplify_omp_ctxp = ctx->outer_context;
9994 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9995 gimplify_omp_ctxp = ctx;
9996 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9997 TREE_OPERAND (t, 0) = var;
9998 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9999 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10000 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10003 gimplify_adjust_omp_clauses (pre_p, for_body,
10004 &OMP_FOR_CLAUSES (orig_for_stmt),
10005 TREE_CODE (orig_for_stmt));
10008 switch (TREE_CODE (orig_for_stmt))
10010 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10011 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10012 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10013 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10014 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10015 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10016 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10018 gcc_unreachable ();
10020 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10021 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10023 if (orig_for_stmt != for_stmt)
10024 gimple_omp_for_set_combined_p (gfor, true);
10025 if (gimplify_omp_ctxp
10026 && (gimplify_omp_ctxp->combined_loop
10027 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10028 && gimplify_omp_ctxp->outer_context
10029 && gimplify_omp_ctxp->outer_context->combined_loop)))
10031 gimple_omp_for_set_combined_into_p (gfor, true);
10032 if (gimplify_omp_ctxp->combined_loop)
10033 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10035 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10038 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10040 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10041 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10042 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10043 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10044 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10045 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10046 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10047 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10050 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10051 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10052 The outer taskloop stands for computing the number of iterations,
10053 counts for collapsed loops and holding taskloop specific clauses.
10054 The task construct stands for the effect of data sharing on the
10055 explicit task it creates and the inner taskloop stands for expansion
10056 of the static loop inside of the explicit task construct. */
10057 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10059 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10060 tree task_clauses = NULL_TREE;
10061 tree c = *gfor_clauses_ptr;
10062 tree *gtask_clauses_ptr = &task_clauses;
10063 tree outer_for_clauses = NULL_TREE;
10064 tree *gforo_clauses_ptr = &outer_for_clauses;
10065 for (; c; c = OMP_CLAUSE_CHAIN (c))
10066 switch (OMP_CLAUSE_CODE (c))
10068 /* These clauses are allowed on task, move them there. */
10069 case OMP_CLAUSE_SHARED:
10070 case OMP_CLAUSE_FIRSTPRIVATE:
10071 case OMP_CLAUSE_DEFAULT:
10072 case OMP_CLAUSE_IF:
10073 case OMP_CLAUSE_UNTIED:
10074 case OMP_CLAUSE_FINAL:
10075 case OMP_CLAUSE_MERGEABLE:
10076 case OMP_CLAUSE_PRIORITY:
10077 *gtask_clauses_ptr = c;
10078 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10080 case OMP_CLAUSE_PRIVATE:
10081 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10083 /* We want private on outer for and firstprivate
10086 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10087 OMP_CLAUSE_FIRSTPRIVATE);
10088 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10089 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10090 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10091 *gforo_clauses_ptr = c;
10092 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10096 *gtask_clauses_ptr = c;
10097 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10100 /* These clauses go into outer taskloop clauses. */
10101 case OMP_CLAUSE_GRAINSIZE:
10102 case OMP_CLAUSE_NUM_TASKS:
10103 case OMP_CLAUSE_NOGROUP:
10104 *gforo_clauses_ptr = c;
10105 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10107 /* Taskloop clause we duplicate on both taskloops. */
10108 case OMP_CLAUSE_COLLAPSE:
10109 *gfor_clauses_ptr = c;
10110 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10111 *gforo_clauses_ptr = copy_node (c);
10112 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10114 /* For lastprivate, keep the clause on inner taskloop, and add
10115 a shared clause on task. If the same decl is also firstprivate,
10116 add also firstprivate clause on the inner taskloop. */
10117 case OMP_CLAUSE_LASTPRIVATE:
10118 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10120 /* For taskloop C++ lastprivate IVs, we want:
10121 1) private on outer taskloop
10122 2) firstprivate and shared on task
10123 3) lastprivate on inner taskloop */
10125 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10126 OMP_CLAUSE_FIRSTPRIVATE);
10127 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10128 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10129 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10130 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10131 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10132 OMP_CLAUSE_PRIVATE);
10133 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10134 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10135 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10136 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10138 *gfor_clauses_ptr = c;
10139 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10141 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10142 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10143 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10144 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10146 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10149 gcc_unreachable ();
10151 *gfor_clauses_ptr = NULL_TREE;
10152 *gtask_clauses_ptr = NULL_TREE;
10153 *gforo_clauses_ptr = NULL_TREE;
10154 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10155 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10156 NULL_TREE, NULL_TREE, NULL_TREE);
10157 gimple_omp_task_set_taskloop_p (g, true);
10158 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10160 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10161 gimple_omp_for_collapse (gfor),
10162 gimple_omp_for_pre_body (gfor));
10163 gimple_omp_for_set_pre_body (gfor, NULL);
10164 gimple_omp_for_set_combined_p (gforo, true);
10165 gimple_omp_for_set_combined_into_p (gfor, true);
10166 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10168 t = unshare_expr (gimple_omp_for_index (gfor, i));
10169 gimple_omp_for_set_index (gforo, i, t);
10170 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10171 gimple_omp_for_set_initial (gforo, i, t);
10172 gimple_omp_for_set_cond (gforo, i,
10173 gimple_omp_for_cond (gfor, i));
10174 t = unshare_expr (gimple_omp_for_final (gfor, i));
10175 gimple_omp_for_set_final (gforo, i, t);
10176 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10177 gimple_omp_for_set_incr (gforo, i, t);
10179 gimplify_seq_add_stmt (pre_p, gforo);
10182 gimplify_seq_add_stmt (pre_p, gfor);
10183 if (ret != GS_ALL_DONE)
10185 *expr_p = NULL_TREE;
10186 return GS_ALL_DONE;
10189 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10190 of OMP_TARGET's body. */
10193 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10195 *walk_subtrees = 0;
10196 switch (TREE_CODE (*tp))
10201 case STATEMENT_LIST:
10202 *walk_subtrees = 1;
10210 /* Helper function of optimize_target_teams, determine if the expression
10211 can be computed safely before the target construct on the host. */
10214 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10220 *walk_subtrees = 0;
10223 switch (TREE_CODE (*tp))
10228 *walk_subtrees = 0;
10229 if (error_operand_p (*tp)
10230 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10231 || DECL_HAS_VALUE_EXPR_P (*tp)
10232 || DECL_THREAD_LOCAL_P (*tp)
10233 || TREE_SIDE_EFFECTS (*tp)
10234 || TREE_THIS_VOLATILE (*tp))
10236 if (is_global_var (*tp)
10237 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10238 || lookup_attribute ("omp declare target link",
10239 DECL_ATTRIBUTES (*tp))))
10241 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10242 (splay_tree_key) *tp);
10245 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10249 else if (n->value & GOVD_LOCAL)
10251 else if (n->value & GOVD_FIRSTPRIVATE)
10253 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10254 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10258 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10262 if (TARGET_EXPR_INITIAL (*tp)
10263 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10265 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10266 walk_subtrees, NULL);
10267 /* Allow some reasonable subset of integral arithmetics. */
10271 case TRUNC_DIV_EXPR:
10272 case CEIL_DIV_EXPR:
10273 case FLOOR_DIV_EXPR:
10274 case ROUND_DIV_EXPR:
10275 case TRUNC_MOD_EXPR:
10276 case CEIL_MOD_EXPR:
10277 case FLOOR_MOD_EXPR:
10278 case ROUND_MOD_EXPR:
10280 case EXACT_DIV_EXPR:
10291 case NON_LVALUE_EXPR:
10293 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10296 /* And disallow anything else, except for comparisons. */
10298 if (COMPARISON_CLASS_P (*tp))
10304 /* Try to determine if the num_teams and/or thread_limit expressions
10305 can have their values determined already before entering the
10307 INTEGER_CSTs trivially are,
10308 integral decls that are firstprivate (explicitly or implicitly)
10309 or explicitly map(always, to:) or map(always, tofrom:) on the target
10310 region too, and expressions involving simple arithmetics on those
10311 too, function calls are not ok, dereferencing something neither etc.
10312 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10313 EXPR based on what we find:
10314 0 stands for clause not specified at all, use implementation default
10315 -1 stands for value that can't be determined easily before entering
10316 the target construct.
10317 If teams construct is not present at all, use 1 for num_teams
10318 and 0 for thread_limit (only one team is involved, and the thread
10319 limit is implementation defined. */
10322 optimize_target_teams (tree target, gimple_seq *pre_p)
10324 tree body = OMP_BODY (target);
10325 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10326 tree num_teams = integer_zero_node;
10327 tree thread_limit = integer_zero_node;
10328 location_t num_teams_loc = EXPR_LOCATION (target);
10329 location_t thread_limit_loc = EXPR_LOCATION (target);
10331 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10333 if (teams == NULL_TREE)
10334 num_teams = integer_one_node;
10336 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10338 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10341 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10343 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10346 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10350 expr = OMP_CLAUSE_OPERAND (c, 0);
10351 if (TREE_CODE (expr) == INTEGER_CST)
10356 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10358 *p = integer_minus_one_node;
10362 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10363 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
10366 gimplify_omp_ctxp = target_ctx;
10367 *p = integer_minus_one_node;
10370 gimplify_omp_ctxp = target_ctx;
10371 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10372 OMP_CLAUSE_OPERAND (c, 0) = *p;
10374 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10375 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10376 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10377 OMP_TARGET_CLAUSES (target) = c;
10378 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10379 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10380 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10381 OMP_TARGET_CLAUSES (target) = c;
10384 /* Gimplify the gross structure of several OMP constructs. */
10387 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10389 tree expr = *expr_p;
10391 gimple_seq body = NULL;
10392 enum omp_region_type ort;
10394 switch (TREE_CODE (expr))
10398 ort = ORT_WORKSHARE;
10401 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10404 ort = ORT_ACC_KERNELS;
10406 case OACC_PARALLEL:
10407 ort = ORT_ACC_PARALLEL;
10410 ort = ORT_ACC_DATA;
10412 case OMP_TARGET_DATA:
10413 ort = ORT_TARGET_DATA;
10416 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10418 case OACC_HOST_DATA:
10419 ort = ORT_ACC_HOST_DATA;
10422 gcc_unreachable ();
10424 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10426 if (TREE_CODE (expr) == OMP_TARGET)
10427 optimize_target_teams (expr, pre_p);
10428 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10430 push_gimplify_context ();
10431 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10432 if (gimple_code (g) == GIMPLE_BIND)
10433 pop_gimplify_context (g);
10435 pop_gimplify_context (NULL);
10436 if ((ort & ORT_TARGET_DATA) != 0)
10438 enum built_in_function end_ix;
10439 switch (TREE_CODE (expr))
10442 case OACC_HOST_DATA:
10443 end_ix = BUILT_IN_GOACC_DATA_END;
10445 case OMP_TARGET_DATA:
10446 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10449 gcc_unreachable ();
10451 tree fn = builtin_decl_explicit (end_ix);
10452 g = gimple_build_call (fn, 0);
10453 gimple_seq cleanup = NULL;
10454 gimple_seq_add_stmt (&cleanup, g);
10455 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10457 gimple_seq_add_stmt (&body, g);
10461 gimplify_and_add (OMP_BODY (expr), &body);
10462 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10465 switch (TREE_CODE (expr))
10468 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10469 OMP_CLAUSES (expr));
10472 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10473 OMP_CLAUSES (expr));
10475 case OACC_HOST_DATA:
10476 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10477 OMP_CLAUSES (expr));
10479 case OACC_PARALLEL:
10480 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10481 OMP_CLAUSES (expr));
10484 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10487 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10490 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10491 OMP_CLAUSES (expr));
10493 case OMP_TARGET_DATA:
10494 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10495 OMP_CLAUSES (expr));
10498 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10501 gcc_unreachable ();
10504 gimplify_seq_add_stmt (pre_p, stmt);
10505 *expr_p = NULL_TREE;
10508 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10509 target update constructs. */
10512 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10514 tree expr = *expr_p;
10517 enum omp_region_type ort = ORT_WORKSHARE;
10519 switch (TREE_CODE (expr))
10521 case OACC_ENTER_DATA:
10522 case OACC_EXIT_DATA:
10523 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10527 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10530 case OMP_TARGET_UPDATE:
10531 kind = GF_OMP_TARGET_KIND_UPDATE;
10533 case OMP_TARGET_ENTER_DATA:
10534 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10536 case OMP_TARGET_EXIT_DATA:
10537 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10540 gcc_unreachable ();
10542 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10543 ort, TREE_CODE (expr));
10544 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10546 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10548 gimplify_seq_add_stmt (pre_p, stmt);
10549 *expr_p = NULL_TREE;
10552 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10553 stabilized the lhs of the atomic operation as *ADDR. Return true if
10554 EXPR is this stabilized form. */
10557 goa_lhs_expr_p (tree expr, tree addr)
10559 /* Also include casts to other type variants. The C front end is fond
10560 of adding these for e.g. volatile variables. This is like
10561 STRIP_TYPE_NOPS but includes the main variant lookup. */
10562 STRIP_USELESS_TYPE_CONVERSION (expr);
10564 if (TREE_CODE (expr) == INDIRECT_REF)
10566 expr = TREE_OPERAND (expr, 0);
10567 while (expr != addr
10568 && (CONVERT_EXPR_P (expr)
10569 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10570 && TREE_CODE (expr) == TREE_CODE (addr)
10571 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10573 expr = TREE_OPERAND (expr, 0);
10574 addr = TREE_OPERAND (addr, 0);
10578 return (TREE_CODE (addr) == ADDR_EXPR
10579 && TREE_CODE (expr) == ADDR_EXPR
10580 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10582 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10587 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10588 expression does not involve the lhs, evaluate it into a temporary.
10589 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10590 or -1 if an error was encountered. */
10593 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10596 tree expr = *expr_p;
10599 if (goa_lhs_expr_p (expr, lhs_addr))
10604 if (is_gimple_val (expr))
10608 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10611 case tcc_comparison:
10612 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10616 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10619 case tcc_expression:
10620 switch (TREE_CODE (expr))
10622 case TRUTH_ANDIF_EXPR:
10623 case TRUTH_ORIF_EXPR:
10624 case TRUTH_AND_EXPR:
10625 case TRUTH_OR_EXPR:
10626 case TRUTH_XOR_EXPR:
10627 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10628 lhs_addr, lhs_var);
10630 case TRUTH_NOT_EXPR:
10631 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10632 lhs_addr, lhs_var);
10634 case COMPOUND_EXPR:
10635 /* Break out any preevaluations from cp_build_modify_expr. */
10636 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10637 expr = TREE_OPERAND (expr, 1))
10638 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10640 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10651 enum gimplify_status gs;
10652 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10653 if (gs != GS_ALL_DONE)
10660 /* Gimplify an OMP_ATOMIC statement. */
10662 static enum gimplify_status
10663 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10665 tree addr = TREE_OPERAND (*expr_p, 0);
10666 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10667 ? NULL : TREE_OPERAND (*expr_p, 1);
10668 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10670 gomp_atomic_load *loadstmt;
10671 gomp_atomic_store *storestmt;
10673 tmp_load = create_tmp_reg (type);
10674 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10677 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10681 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10682 gimplify_seq_add_stmt (pre_p, loadstmt);
10683 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10687 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10689 storestmt = gimple_build_omp_atomic_store (rhs);
10690 gimplify_seq_add_stmt (pre_p, storestmt);
10691 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10693 gimple_omp_atomic_set_seq_cst (loadstmt);
10694 gimple_omp_atomic_set_seq_cst (storestmt);
10696 switch (TREE_CODE (*expr_p))
10698 case OMP_ATOMIC_READ:
10699 case OMP_ATOMIC_CAPTURE_OLD:
10700 *expr_p = tmp_load;
10701 gimple_omp_atomic_set_need_value (loadstmt);
10703 case OMP_ATOMIC_CAPTURE_NEW:
10705 gimple_omp_atomic_set_need_value (storestmt);
10712 return GS_ALL_DONE;
10715 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10716 body, and adding some EH bits. */
10718 static enum gimplify_status
10719 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10721 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10723 gtransaction *trans_stmt;
10724 gimple_seq body = NULL;
10727 /* Wrap the transaction body in a BIND_EXPR so we have a context
10728 where to put decls for OMP. */
10729 if (TREE_CODE (tbody) != BIND_EXPR)
10731 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10732 TREE_SIDE_EFFECTS (bind) = 1;
10733 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10734 TRANSACTION_EXPR_BODY (expr) = bind;
10737 push_gimplify_context ();
10738 temp = voidify_wrapper_expr (*expr_p, NULL);
10740 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10741 pop_gimplify_context (body_stmt);
10743 trans_stmt = gimple_build_transaction (body);
10744 if (TRANSACTION_EXPR_OUTER (expr))
10745 subcode = GTMA_IS_OUTER;
10746 else if (TRANSACTION_EXPR_RELAXED (expr))
10747 subcode = GTMA_IS_RELAXED;
10748 gimple_transaction_set_subcode (trans_stmt, subcode);
10750 gimplify_seq_add_stmt (pre_p, trans_stmt);
10758 *expr_p = NULL_TREE;
10759 return GS_ALL_DONE;
10762 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10763 is the OMP_BODY of the original EXPR (which has already been
10764 gimplified so it's not present in the EXPR).
10766 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10769 gimplify_omp_ordered (tree expr, gimple_seq body)
10774 tree source_c = NULL_TREE;
10775 tree sink_c = NULL_TREE;
10777 if (gimplify_omp_ctxp)
10779 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10781 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10782 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10783 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10785 error_at (OMP_CLAUSE_LOCATION (c),
10786 "%<ordered%> construct with %<depend%> clause must be "
10787 "closely nested inside a loop with %<ordered%> clause "
10788 "with a parameter");
10791 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10792 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10795 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10796 decls && TREE_CODE (decls) == TREE_LIST;
10797 decls = TREE_CHAIN (decls), ++i)
10798 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10800 else if (TREE_VALUE (decls)
10801 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10803 error_at (OMP_CLAUSE_LOCATION (c),
10804 "variable %qE is not an iteration "
10805 "of outermost loop %d, expected %qE",
10806 TREE_VALUE (decls), i + 1,
10807 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10813 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10814 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10816 error_at (OMP_CLAUSE_LOCATION (c),
10817 "number of variables in %<depend(sink)%> "
10818 "clause does not match number of "
10819 "iteration variables");
10824 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10825 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10829 error_at (OMP_CLAUSE_LOCATION (c),
10830 "more than one %<depend(source)%> clause on an "
10831 "%<ordered%> construct");
10838 if (source_c && sink_c)
10840 error_at (OMP_CLAUSE_LOCATION (source_c),
10841 "%<depend(source)%> clause specified together with "
10842 "%<depend(sink:)%> clauses on the same construct");
10847 return gimple_build_nop ();
10848 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10851 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10852 expression produces a value to be used as an operand inside a GIMPLE
10853 statement, the value will be stored back in *EXPR_P. This value will
10854 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10855 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10856 emitted in PRE_P and POST_P.
10858 Additionally, this process may overwrite parts of the input
10859 expression during gimplification. Ideally, it should be
10860 possible to do non-destructive gimplification.
10862 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10863 the expression needs to evaluate to a value to be used as
10864 an operand in a GIMPLE statement, this value will be stored in
10865 *EXPR_P on exit. This happens when the caller specifies one
10866 of fb_lvalue or fb_rvalue fallback flags.
10868 PRE_P will contain the sequence of GIMPLE statements corresponding
10869 to the evaluation of EXPR and all the side-effects that must
10870 be executed before the main expression. On exit, the last
10871 statement of PRE_P is the core statement being gimplified. For
10872 instance, when gimplifying 'if (++a)' the last statement in
10873 PRE_P will be 'if (t.1)' where t.1 is the result of
10874 pre-incrementing 'a'.
10876 POST_P will contain the sequence of GIMPLE statements corresponding
10877 to the evaluation of all the side-effects that must be executed
10878 after the main expression. If this is NULL, the post
10879 side-effects are stored at the end of PRE_P.
10881 The reason why the output is split in two is to handle post
10882 side-effects explicitly. In some cases, an expression may have
10883 inner and outer post side-effects which need to be emitted in
10884 an order different from the one given by the recursive
10885 traversal. For instance, for the expression (*p--)++ the post
10886 side-effects of '--' must actually occur *after* the post
10887 side-effects of '++'. However, gimplification will first visit
10888 the inner expression, so if a separate POST sequence was not
10889 used, the resulting sequence would be:
10896 However, the post-decrement operation in line #2 must not be
10897 evaluated until after the store to *p at line #4, so the
10898 correct sequence should be:
10905 So, by specifying a separate post queue, it is possible
10906 to emit the post side-effects in the correct order.
10907 If POST_P is NULL, an internal queue will be used. Before
10908 returning to the caller, the sequence POST_P is appended to
10909 the main output sequence PRE_P.
10911 GIMPLE_TEST_F points to a function that takes a tree T and
10912 returns nonzero if T is in the GIMPLE form requested by the
10913 caller. The GIMPLE predicates are in gimple.c.
10915 FALLBACK tells the function what sort of a temporary we want if
10916 gimplification cannot produce an expression that complies with
10919 fb_none means that no temporary should be generated
10920 fb_rvalue means that an rvalue is OK to generate
10921 fb_lvalue means that an lvalue is OK to generate
10922 fb_either means that either is OK, but an lvalue is preferable.
10923 fb_mayfail means that gimplification may fail (in which case
10924 GS_ERROR will be returned)
10926 The return value is either GS_ERROR or GS_ALL_DONE, since this
10927 function iterates until EXPR is completely gimplified or an error
10930 enum gimplify_status
10931 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10932 bool (*gimple_test_f) (tree), fallback_t fallback)
10935 gimple_seq internal_pre = NULL;
10936 gimple_seq internal_post = NULL;
10939 location_t saved_location;
10940 enum gimplify_status ret;
10941 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10944 save_expr = *expr_p;
10945 if (save_expr == NULL_TREE)
10946 return GS_ALL_DONE;
10948 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10949 is_statement = gimple_test_f == is_gimple_stmt;
10951 gcc_assert (pre_p);
10953 /* Consistency checks. */
10954 if (gimple_test_f == is_gimple_reg)
10955 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10956 else if (gimple_test_f == is_gimple_val
10957 || gimple_test_f == is_gimple_call_addr
10958 || gimple_test_f == is_gimple_condexpr
10959 || gimple_test_f == is_gimple_mem_rhs
10960 || gimple_test_f == is_gimple_mem_rhs_or_call
10961 || gimple_test_f == is_gimple_reg_rhs
10962 || gimple_test_f == is_gimple_reg_rhs_or_call
10963 || gimple_test_f == is_gimple_asm_val
10964 || gimple_test_f == is_gimple_mem_ref_addr)
10965 gcc_assert (fallback & fb_rvalue);
10966 else if (gimple_test_f == is_gimple_min_lval
10967 || gimple_test_f == is_gimple_lvalue)
10968 gcc_assert (fallback & fb_lvalue);
10969 else if (gimple_test_f == is_gimple_addressable)
10970 gcc_assert (fallback & fb_either);
10971 else if (gimple_test_f == is_gimple_stmt)
10972 gcc_assert (fallback == fb_none);
10975 /* We should have recognized the GIMPLE_TEST_F predicate to
10976 know what kind of fallback to use in case a temporary is
10977 needed to hold the value or address of *EXPR_P. */
10978 gcc_unreachable ();
10981 /* We used to check the predicate here and return immediately if it
10982 succeeds. This is wrong; the design is for gimplification to be
10983 idempotent, and for the predicates to only test for valid forms, not
10984 whether they are fully simplified. */
10986 pre_p = &internal_pre;
10988 if (post_p == NULL)
10989 post_p = &internal_post;
10991 /* Remember the last statements added to PRE_P and POST_P. Every
10992 new statement added by the gimplification helpers needs to be
10993 annotated with location information. To centralize the
10994 responsibility, we remember the last statement that had been
10995 added to both queues before gimplifying *EXPR_P. If
10996 gimplification produces new statements in PRE_P and POST_P, those
10997 statements will be annotated with the same location information
10999 pre_last_gsi = gsi_last (*pre_p);
11000 post_last_gsi = gsi_last (*post_p);
11002 saved_location = input_location;
11003 if (save_expr != error_mark_node
11004 && EXPR_HAS_LOCATION (*expr_p))
11005 input_location = EXPR_LOCATION (*expr_p);
11007 /* Loop over the specific gimplifiers until the toplevel node
11008 remains the same. */
11011 /* Strip away as many useless type conversions as possible
11012 at the toplevel. */
11013 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11015 /* Remember the expr. */
11016 save_expr = *expr_p;
11018 /* Die, die, die, my darling. */
11019 if (save_expr == error_mark_node
11020 || (TREE_TYPE (save_expr)
11021 && TREE_TYPE (save_expr) == error_mark_node))
11027 /* Do any language-specific gimplification. */
11028 ret = ((enum gimplify_status)
11029 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11032 if (*expr_p == NULL_TREE)
11034 if (*expr_p != save_expr)
11037 else if (ret != GS_UNHANDLED)
11040 /* Make sure that all the cases set 'ret' appropriately. */
11041 ret = GS_UNHANDLED;
11042 switch (TREE_CODE (*expr_p))
11044 /* First deal with the special cases. */
11046 case POSTINCREMENT_EXPR:
11047 case POSTDECREMENT_EXPR:
11048 case PREINCREMENT_EXPR:
11049 case PREDECREMENT_EXPR:
11050 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11051 fallback != fb_none,
11052 TREE_TYPE (*expr_p));
11055 case VIEW_CONVERT_EXPR:
11056 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11057 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11059 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11060 post_p, is_gimple_val, fb_rvalue);
11061 recalculate_side_effects (*expr_p);
11067 case ARRAY_RANGE_REF:
11068 case REALPART_EXPR:
11069 case IMAGPART_EXPR:
11070 case COMPONENT_REF:
11071 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11072 fallback ? fallback : fb_rvalue);
11076 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11078 /* C99 code may assign to an array in a structure value of a
11079 conditional expression, and this has undefined behavior
11080 only on execution, so create a temporary if an lvalue is
11082 if (fallback == fb_lvalue)
11084 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
11085 mark_addressable (*expr_p);
11091 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11093 /* C99 code may assign to an array in a structure returned
11094 from a function, and this has undefined behavior only on
11095 execution, so create a temporary if an lvalue is
11097 if (fallback == fb_lvalue)
11099 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
11100 mark_addressable (*expr_p);
11106 gcc_unreachable ();
11108 case COMPOUND_EXPR:
11109 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11112 case COMPOUND_LITERAL_EXPR:
11113 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11114 gimple_test_f, fallback);
11119 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11120 fallback != fb_none);
11123 case TRUTH_ANDIF_EXPR:
11124 case TRUTH_ORIF_EXPR:
11126 /* Preserve the original type of the expression and the
11127 source location of the outer expression. */
11128 tree org_type = TREE_TYPE (*expr_p);
11129 *expr_p = gimple_boolify (*expr_p);
11130 *expr_p = build3_loc (input_location, COND_EXPR,
11134 org_type, boolean_true_node),
11137 org_type, boolean_false_node));
11142 case TRUTH_NOT_EXPR:
11144 tree type = TREE_TYPE (*expr_p);
11145 /* The parsers are careful to generate TRUTH_NOT_EXPR
11146 only with operands that are always zero or one.
11147 We do not fold here but handle the only interesting case
11148 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11149 *expr_p = gimple_boolify (*expr_p);
11150 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11151 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11152 TREE_TYPE (*expr_p),
11153 TREE_OPERAND (*expr_p, 0));
11155 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11156 TREE_TYPE (*expr_p),
11157 TREE_OPERAND (*expr_p, 0),
11158 build_int_cst (TREE_TYPE (*expr_p), 1));
11159 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11160 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11166 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11169 case ANNOTATE_EXPR:
11171 tree cond = TREE_OPERAND (*expr_p, 0);
11172 tree kind = TREE_OPERAND (*expr_p, 1);
11173 tree type = TREE_TYPE (cond);
11174 if (!INTEGRAL_TYPE_P (type))
11180 tree tmp = create_tmp_var (type);
11181 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11183 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11184 gimple_call_set_lhs (call, tmp);
11185 gimplify_seq_add_stmt (pre_p, call);
11192 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11196 if (IS_EMPTY_STMT (*expr_p))
11202 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11203 || fallback == fb_none)
11205 /* Just strip a conversion to void (or in void context) and
11207 *expr_p = TREE_OPERAND (*expr_p, 0);
11212 ret = gimplify_conversion (expr_p);
11213 if (ret == GS_ERROR)
11215 if (*expr_p != save_expr)
11219 case FIX_TRUNC_EXPR:
11220 /* unary_expr: ... | '(' cast ')' val | ... */
11221 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11222 is_gimple_val, fb_rvalue);
11223 recalculate_side_effects (*expr_p);
11228 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11229 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11230 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11232 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11233 if (*expr_p != save_expr)
11239 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11240 is_gimple_reg, fb_rvalue);
11241 if (ret == GS_ERROR)
11244 recalculate_side_effects (*expr_p);
11245 *expr_p = fold_build2_loc (input_location, MEM_REF,
11246 TREE_TYPE (*expr_p),
11247 TREE_OPERAND (*expr_p, 0),
11248 build_int_cst (saved_ptr_type, 0));
11249 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11250 TREE_THIS_NOTRAP (*expr_p) = notrap;
11255 /* We arrive here through the various re-gimplifcation paths. */
11257 /* First try re-folding the whole thing. */
11258 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11259 TREE_OPERAND (*expr_p, 0),
11260 TREE_OPERAND (*expr_p, 1));
11263 REF_REVERSE_STORAGE_ORDER (tmp)
11264 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11266 recalculate_side_effects (*expr_p);
11270 /* Avoid re-gimplifying the address operand if it is already
11271 in suitable form. Re-gimplifying would mark the address
11272 operand addressable. Always gimplify when not in SSA form
11273 as we still may have to gimplify decls with value-exprs. */
11274 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
11275 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11277 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11278 is_gimple_mem_ref_addr, fb_rvalue);
11279 if (ret == GS_ERROR)
11282 recalculate_side_effects (*expr_p);
11286 /* Constants need not be gimplified. */
11293 /* Drop the overflow flag on constants, we do not want
11294 that in the GIMPLE IL. */
11295 if (TREE_OVERFLOW_P (*expr_p))
11296 *expr_p = drop_tree_overflow (*expr_p);
11301 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11302 CONST_DECL node. Otherwise the decl is replaceable by its
11304 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11305 if (fallback & fb_lvalue)
11309 *expr_p = DECL_INITIAL (*expr_p);
11315 ret = gimplify_decl_expr (expr_p, pre_p);
11319 ret = gimplify_bind_expr (expr_p, pre_p);
11323 ret = gimplify_loop_expr (expr_p, pre_p);
11327 ret = gimplify_switch_expr (expr_p, pre_p);
11331 ret = gimplify_exit_expr (expr_p);
11335 /* If the target is not LABEL, then it is a computed jump
11336 and the target needs to be gimplified. */
11337 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11339 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11340 NULL, is_gimple_val, fb_rvalue);
11341 if (ret == GS_ERROR)
11344 gimplify_seq_add_stmt (pre_p,
11345 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11350 gimplify_seq_add_stmt (pre_p,
11351 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11352 PREDICT_EXPR_OUTCOME (*expr_p)));
11357 ret = gimplify_label_expr (expr_p, pre_p);
11358 label = LABEL_EXPR_LABEL (*expr_p);
11359 gcc_assert (decl_function_context (label) == current_function_decl);
11361 /* If the label is used in a goto statement, or address of the label
11362 is taken, we need to unpoison all variables that were seen so far.
11363 Doing so would prevent us from reporting a false positives. */
11364 if (asan_sanitize_use_after_scope ()
11365 && asan_used_labels != NULL
11366 && asan_used_labels->contains (label))
11367 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11370 case CASE_LABEL_EXPR:
11371 ret = gimplify_case_label_expr (expr_p, pre_p);
11373 if (gimplify_ctxp->live_switch_vars)
11374 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11379 ret = gimplify_return_expr (*expr_p, pre_p);
11383 /* Don't reduce this in place; let gimplify_init_constructor work its
11384 magic. Buf if we're just elaborating this for side effects, just
11385 gimplify any element that has side-effects. */
11386 if (fallback == fb_none)
11388 unsigned HOST_WIDE_INT ix;
11390 tree temp = NULL_TREE;
11391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11392 if (TREE_SIDE_EFFECTS (val))
11393 append_to_statement_list (val, &temp);
11396 ret = temp ? GS_OK : GS_ALL_DONE;
11398 /* C99 code may assign to an array in a constructed
11399 structure or union, and this has undefined behavior only
11400 on execution, so create a temporary if an lvalue is
11402 else if (fallback == fb_lvalue)
11404 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
11405 mark_addressable (*expr_p);
11412 /* The following are special cases that are not handled by the
11413 original GIMPLE grammar. */
11415 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11418 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11421 case BIT_FIELD_REF:
11422 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11423 post_p, is_gimple_lvalue, fb_either);
11424 recalculate_side_effects (*expr_p);
11427 case TARGET_MEM_REF:
11429 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11431 if (TMR_BASE (*expr_p))
11432 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11433 post_p, is_gimple_mem_ref_addr, fb_either);
11434 if (TMR_INDEX (*expr_p))
11435 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11436 post_p, is_gimple_val, fb_rvalue);
11437 if (TMR_INDEX2 (*expr_p))
11438 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11439 post_p, is_gimple_val, fb_rvalue);
11440 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11441 ret = MIN (r0, r1);
11445 case NON_LVALUE_EXPR:
11446 /* This should have been stripped above. */
11447 gcc_unreachable ();
11450 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11453 case TRY_FINALLY_EXPR:
11454 case TRY_CATCH_EXPR:
11456 gimple_seq eval, cleanup;
11459 /* Calls to destructors are generated automatically in FINALLY/CATCH
11460 block. They should have location as UNKNOWN_LOCATION. However,
11461 gimplify_call_expr will reset these call stmts to input_location
11462 if it finds stmt's location is unknown. To prevent resetting for
11463 destructors, we set the input_location to unknown.
11464 Note that this only affects the destructor calls in FINALLY/CATCH
11465 block, and will automatically reset to its original value by the
11466 end of gimplify_expr. */
11467 input_location = UNKNOWN_LOCATION;
11468 eval = cleanup = NULL;
11469 location_t finally_loc = 0;
11470 /* The cleanup location can be extracted from STATEMENT_LIST_END
11471 location added especially for this purpose. */
11472 if (TREE_OPERAND (*expr_p, 0) &&
11473 TREE_CODE (TREE_OPERAND (*expr_p, 0)) == STATEMENT_LIST)
11475 const tree_statement_list_node* last_node =
11476 STATEMENT_LIST_TAIL(TREE_OPERAND (*expr_p, 0));
11479 TREE_CODE (last_node->stmt) == STATEMENT_LIST_END)
11480 finally_loc = EXPR_LOCATION(last_node->stmt);
11482 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11483 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11484 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11485 if (gimple_seq_empty_p (cleanup))
11487 gimple_seq_add_seq (pre_p, eval);
11491 try_ = gimple_build_try (eval, cleanup,
11492 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11493 ? GIMPLE_TRY_FINALLY
11494 : GIMPLE_TRY_CATCH);
11495 if (EXPR_HAS_LOCATION (save_expr))
11496 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11497 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11498 gimple_set_location (try_, saved_location);
11499 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11500 gimple_try_set_catch_is_cleanup (try_,
11501 TRY_CATCH_IS_CLEANUP (*expr_p));
11503 gimple *last_in_seq = gimple_seq_last_stmt (cleanup);
11504 gimple_set_location(last_in_seq, finally_loc);
11506 gimplify_seq_add_stmt (pre_p, try_);
11511 case CLEANUP_POINT_EXPR:
11512 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11516 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11522 gimple_seq handler = NULL;
11523 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11524 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11525 gimplify_seq_add_stmt (pre_p, c);
11530 case EH_FILTER_EXPR:
11533 gimple_seq failure = NULL;
11535 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11536 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11537 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11538 gimplify_seq_add_stmt (pre_p, ehf);
11545 enum gimplify_status r0, r1;
11546 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11547 post_p, is_gimple_val, fb_rvalue);
11548 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11549 post_p, is_gimple_val, fb_rvalue);
11550 TREE_SIDE_EFFECTS (*expr_p) = 0;
11551 ret = MIN (r0, r1);
11556 /* We get here when taking the address of a label. We mark
11557 the label as "forced"; meaning it can never be removed and
11558 it is a potential target for any computed goto. */
11559 FORCED_LABEL (*expr_p) = 1;
11563 case STATEMENT_LIST:
11564 ret = gimplify_statement_list (expr_p, pre_p);
11567 case STATEMENT_LIST_END:
11571 case WITH_SIZE_EXPR:
11573 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11574 post_p == &internal_post ? NULL : post_p,
11575 gimple_test_f, fallback);
11576 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11577 is_gimple_val, fb_rvalue);
11584 ret = gimplify_var_or_parm_decl (expr_p);
11588 /* When within an OMP context, notice uses of variables. */
11589 if (gimplify_omp_ctxp)
11590 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11595 /* Allow callbacks into the gimplifier during optimization. */
11600 gimplify_omp_parallel (expr_p, pre_p);
11605 gimplify_omp_task (expr_p, pre_p);
11613 case OMP_DISTRIBUTE:
11616 ret = gimplify_omp_for (expr_p, pre_p);
11620 gimplify_oacc_cache (expr_p, pre_p);
11625 gimplify_oacc_declare (expr_p, pre_p);
11629 case OACC_HOST_DATA:
11632 case OACC_PARALLEL:
11636 case OMP_TARGET_DATA:
11638 gimplify_omp_workshare (expr_p, pre_p);
11642 case OACC_ENTER_DATA:
11643 case OACC_EXIT_DATA:
11645 case OMP_TARGET_UPDATE:
11646 case OMP_TARGET_ENTER_DATA:
11647 case OMP_TARGET_EXIT_DATA:
11648 gimplify_omp_target_update (expr_p, pre_p);
11654 case OMP_TASKGROUP:
11658 gimple_seq body = NULL;
11661 gimplify_and_add (OMP_BODY (*expr_p), &body);
11662 switch (TREE_CODE (*expr_p))
11665 g = gimple_build_omp_section (body);
11668 g = gimple_build_omp_master (body);
11670 case OMP_TASKGROUP:
11672 gimple_seq cleanup = NULL;
11674 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11675 g = gimple_build_call (fn, 0);
11676 gimple_seq_add_stmt (&cleanup, g);
11677 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11679 gimple_seq_add_stmt (&body, g);
11680 g = gimple_build_omp_taskgroup (body);
11684 g = gimplify_omp_ordered (*expr_p, body);
11687 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11688 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11689 gimplify_adjust_omp_clauses (pre_p, body,
11690 &OMP_CRITICAL_CLAUSES (*expr_p),
11692 g = gimple_build_omp_critical (body,
11693 OMP_CRITICAL_NAME (*expr_p),
11694 OMP_CRITICAL_CLAUSES (*expr_p));
11697 gcc_unreachable ();
11699 gimplify_seq_add_stmt (pre_p, g);
11705 case OMP_ATOMIC_READ:
11706 case OMP_ATOMIC_CAPTURE_OLD:
11707 case OMP_ATOMIC_CAPTURE_NEW:
11708 ret = gimplify_omp_atomic (expr_p, pre_p);
11711 case TRANSACTION_EXPR:
11712 ret = gimplify_transaction (expr_p, pre_p);
11715 case TRUTH_AND_EXPR:
11716 case TRUTH_OR_EXPR:
11717 case TRUTH_XOR_EXPR:
11719 tree orig_type = TREE_TYPE (*expr_p);
11720 tree new_type, xop0, xop1;
11721 *expr_p = gimple_boolify (*expr_p);
11722 new_type = TREE_TYPE (*expr_p);
11723 if (!useless_type_conversion_p (orig_type, new_type))
11725 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11730 /* Boolified binary truth expressions are semantically equivalent
11731 to bitwise binary expressions. Canonicalize them to the
11732 bitwise variant. */
11733 switch (TREE_CODE (*expr_p))
11735 case TRUTH_AND_EXPR:
11736 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11738 case TRUTH_OR_EXPR:
11739 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11741 case TRUTH_XOR_EXPR:
11742 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11747 /* Now make sure that operands have compatible type to
11748 expression's new_type. */
11749 xop0 = TREE_OPERAND (*expr_p, 0);
11750 xop1 = TREE_OPERAND (*expr_p, 1);
11751 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11752 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11755 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11756 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11759 /* Continue classified as tcc_binary. */
11763 case VEC_COND_EXPR:
11765 enum gimplify_status r0, r1, r2;
11767 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11768 post_p, is_gimple_condexpr, fb_rvalue);
11769 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11770 post_p, is_gimple_val, fb_rvalue);
11771 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11772 post_p, is_gimple_val, fb_rvalue);
11774 ret = MIN (MIN (r0, r1), r2);
11775 recalculate_side_effects (*expr_p);
11780 case VEC_PERM_EXPR:
11781 /* Classified as tcc_expression. */
11784 case POINTER_PLUS_EXPR:
11786 enum gimplify_status r0, r1;
11787 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11788 post_p, is_gimple_val, fb_rvalue);
11789 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11790 post_p, is_gimple_val, fb_rvalue);
11791 recalculate_side_effects (*expr_p);
11792 ret = MIN (r0, r1);
11796 case CILK_SYNC_STMT:
11798 if (!fn_contains_cilk_spawn_p (cfun))
11800 error_at (EXPR_LOCATION (*expr_p),
11801 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11806 gimplify_cilk_sync (expr_p, pre_p);
11813 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11815 case tcc_comparison:
11816 /* Handle comparison of objects of non scalar mode aggregates
11817 with a call to memcmp. It would be nice to only have to do
11818 this for variable-sized objects, but then we'd have to allow
11819 the same nest of reference nodes we allow for MODIFY_EXPR and
11820 that's too complex.
11822 Compare scalar mode aggregates as scalar mode values. Using
11823 memcmp for them would be very inefficient at best, and is
11824 plain wrong if bitfields are involved. */
11826 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11828 /* Vector comparisons need no boolification. */
11829 if (TREE_CODE (type) == VECTOR_TYPE)
11831 else if (!AGGREGATE_TYPE_P (type))
11833 tree org_type = TREE_TYPE (*expr_p);
11834 *expr_p = gimple_boolify (*expr_p);
11835 if (!useless_type_conversion_p (org_type,
11836 TREE_TYPE (*expr_p)))
11838 *expr_p = fold_convert_loc (input_location,
11839 org_type, *expr_p);
11845 else if (TYPE_MODE (type) != BLKmode)
11846 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11848 ret = gimplify_variable_sized_compare (expr_p);
11853 /* If *EXPR_P does not need to be special-cased, handle it
11854 according to its class. */
11856 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11857 post_p, is_gimple_val, fb_rvalue);
11863 enum gimplify_status r0, r1;
11865 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11866 post_p, is_gimple_val, fb_rvalue);
11867 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11868 post_p, is_gimple_val, fb_rvalue);
11870 ret = MIN (r0, r1);
11876 enum gimplify_status r0, r1, r2;
11878 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11879 post_p, is_gimple_val, fb_rvalue);
11880 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11881 post_p, is_gimple_val, fb_rvalue);
11882 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11883 post_p, is_gimple_val, fb_rvalue);
11885 ret = MIN (MIN (r0, r1), r2);
11889 case tcc_declaration:
11892 goto dont_recalculate;
11895 gcc_unreachable ();
11898 recalculate_side_effects (*expr_p);
11904 gcc_assert (*expr_p || ret != GS_OK);
11906 while (ret == GS_OK);
11908 /* If we encountered an error_mark somewhere nested inside, either
11909 stub out the statement or propagate the error back out. */
11910 if (ret == GS_ERROR)
11917 /* This was only valid as a return value from the langhook, which
11918 we handled. Make sure it doesn't escape from any other context. */
11919 gcc_assert (ret != GS_UNHANDLED);
11921 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
11923 /* We aren't looking for a value, and we don't have a valid
11924 statement. If it doesn't have side-effects, throw it away. */
11925 if (!TREE_SIDE_EFFECTS (*expr_p))
11927 else if (!TREE_THIS_VOLATILE (*expr_p))
11929 /* This is probably a _REF that contains something nested that
11930 has side effects. Recurse through the operands to find it. */
11931 enum tree_code code = TREE_CODE (*expr_p);
11935 case COMPONENT_REF:
11936 case REALPART_EXPR:
11937 case IMAGPART_EXPR:
11938 case VIEW_CONVERT_EXPR:
11939 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11940 gimple_test_f, fallback);
11944 case ARRAY_RANGE_REF:
11945 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11946 gimple_test_f, fallback);
11947 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11948 gimple_test_f, fallback);
11952 /* Anything else with side-effects must be converted to
11953 a valid statement before we get here. */
11954 gcc_unreachable ();
11959 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11960 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
11962 /* Historically, the compiler has treated a bare reference
11963 to a non-BLKmode volatile lvalue as forcing a load. */
11964 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
11966 /* Normally, we do not want to create a temporary for a
11967 TREE_ADDRESSABLE type because such a type should not be
11968 copied by bitwise-assignment. However, we make an
11969 exception here, as all we are doing here is ensuring that
11970 we read the bytes that make up the type. We use
11971 create_tmp_var_raw because create_tmp_var will abort when
11972 given a TREE_ADDRESSABLE type. */
11973 tree tmp = create_tmp_var_raw (type, "vol");
11974 gimple_add_tmp_var (tmp);
11975 gimplify_assign (tmp, *expr_p, pre_p);
11979 /* We can't do anything useful with a volatile reference to
11980 an incomplete type, so just throw it away. Likewise for
11981 a BLKmode type, since any implicit inner load should
11982 already have been turned into an explicit one by the
11983 gimplification process. */
11987 /* If we are gimplifying at the statement level, we're done. Tack
11988 everything together and return. */
11989 if (fallback == fb_none || is_statement)
11991 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11992 it out for GC to reclaim it. */
11993 *expr_p = NULL_TREE;
11995 if (!gimple_seq_empty_p (internal_pre)
11996 || !gimple_seq_empty_p (internal_post))
11998 gimplify_seq_add_seq (&internal_pre, internal_post);
11999 gimplify_seq_add_seq (pre_p, internal_pre);
12002 /* The result of gimplifying *EXPR_P is going to be the last few
12003 statements in *PRE_P and *POST_P. Add location information
12004 to all the statements that were added by the gimplification
12006 if (!gimple_seq_empty_p (*pre_p))
12007 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12009 if (!gimple_seq_empty_p (*post_p))
12010 annotate_all_with_location_after (*post_p, post_last_gsi,
12016 #ifdef ENABLE_GIMPLE_CHECKING
12019 enum tree_code code = TREE_CODE (*expr_p);
12020 /* These expressions should already be in gimple IR form. */
12021 gcc_assert (code != MODIFY_EXPR
12022 && code != ASM_EXPR
12023 && code != BIND_EXPR
12024 && code != CATCH_EXPR
12025 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12026 && code != EH_FILTER_EXPR
12027 && code != GOTO_EXPR
12028 && code != LABEL_EXPR
12029 && code != LOOP_EXPR
12030 && code != SWITCH_EXPR
12031 && code != TRY_FINALLY_EXPR
12032 && code != OACC_PARALLEL
12033 && code != OACC_KERNELS
12034 && code != OACC_DATA
12035 && code != OACC_HOST_DATA
12036 && code != OACC_DECLARE
12037 && code != OACC_UPDATE
12038 && code != OACC_ENTER_DATA
12039 && code != OACC_EXIT_DATA
12040 && code != OACC_CACHE
12041 && code != OMP_CRITICAL
12043 && code != OACC_LOOP
12044 && code != OMP_MASTER
12045 && code != OMP_TASKGROUP
12046 && code != OMP_ORDERED
12047 && code != OMP_PARALLEL
12048 && code != OMP_SECTIONS
12049 && code != OMP_SECTION
12050 && code != OMP_SINGLE);
12054 /* Otherwise we're gimplifying a subexpression, so the resulting
12055 value is interesting. If it's a valid operand that matches
12056 GIMPLE_TEST_F, we're done. Unless we are handling some
12057 post-effects internally; if that's the case, we need to copy into
12058 a temporary before adding the post-effects to POST_P. */
12059 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12062 /* Otherwise, we need to create a new temporary for the gimplified
12065 /* We can't return an lvalue if we have an internal postqueue. The
12066 object the lvalue refers to would (probably) be modified by the
12067 postqueue; we need to copy the value out first, which means an
12069 if ((fallback & fb_lvalue)
12070 && gimple_seq_empty_p (internal_post)
12071 && is_gimple_addressable (*expr_p))
12073 /* An lvalue will do. Take the address of the expression, store it
12074 in a temporary, and replace the expression with an INDIRECT_REF of
12076 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12077 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12078 *expr_p = build_simple_mem_ref (tmp);
12080 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12082 /* An rvalue will do. Assign the gimplified expression into a
12083 new temporary TMP and replace the original expression with
12084 TMP. First, make sure that the expression has a type so that
12085 it can be assigned into a temporary. */
12086 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12087 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12091 #ifdef ENABLE_GIMPLE_CHECKING
12092 if (!(fallback & fb_mayfail))
12094 fprintf (stderr, "gimplification failed:\n");
12095 print_generic_expr (stderr, *expr_p, 0);
12096 debug_tree (*expr_p);
12097 internal_error ("gimplification failed");
12100 gcc_assert (fallback & fb_mayfail);
12102 /* If this is an asm statement, and the user asked for the
12103 impossible, don't die. Fail and let gimplify_asm_expr
12109 /* Make sure the temporary matches our predicate. */
12110 gcc_assert ((*gimple_test_f) (*expr_p));
12112 if (!gimple_seq_empty_p (internal_post))
12114 annotate_all_with_location (internal_post, input_location);
12115 gimplify_seq_add_seq (pre_p, internal_post);
12119 input_location = saved_location;
12123 /* Look through TYPE for variable-sized objects and gimplify each such
12124 size that we find. Add to LIST_P any statements generated. */
12127 gimplify_type_sizes (tree type, gimple_seq *list_p)
12131 if (type == NULL || type == error_mark_node)
12134 /* We first do the main variant, then copy into any other variants. */
12135 type = TYPE_MAIN_VARIANT (type);
12137 /* Avoid infinite recursion. */
12138 if (TYPE_SIZES_GIMPLIFIED (type))
12141 TYPE_SIZES_GIMPLIFIED (type) = 1;
12143 switch (TREE_CODE (type))
12146 case ENUMERAL_TYPE:
12149 case FIXED_POINT_TYPE:
12150 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12151 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12153 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12155 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12156 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12161 /* These types may not have declarations, so handle them here. */
12162 gimplify_type_sizes (TREE_TYPE (type), list_p);
12163 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12164 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12165 with assigned stack slots, for -O1+ -g they should be tracked
12167 if (!(TYPE_NAME (type)
12168 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12169 && DECL_IGNORED_P (TYPE_NAME (type)))
12170 && TYPE_DOMAIN (type)
12171 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12173 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12174 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
12175 DECL_IGNORED_P (t) = 0;
12176 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12177 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
12178 DECL_IGNORED_P (t) = 0;
12184 case QUAL_UNION_TYPE:
12185 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12186 if (TREE_CODE (field) == FIELD_DECL)
12188 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12189 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12190 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12191 gimplify_type_sizes (TREE_TYPE (field), list_p);
12196 case REFERENCE_TYPE:
12197 /* We used to recurse on the pointed-to type here, which turned out to
12198 be incorrect because its definition might refer to variables not
12199 yet initialized at this point if a forward declaration is involved.
12201 It was actually useful for anonymous pointed-to types to ensure
12202 that the sizes evaluation dominates every possible later use of the
12203 values. Restricting to such types here would be safe since there
12204 is no possible forward declaration around, but would introduce an
12205 undesirable middle-end semantic to anonymity. We then defer to
12206 front-ends the responsibility of ensuring that the sizes are
12207 evaluated both early and late enough, e.g. by attaching artificial
12208 type declarations to the tree. */
12215 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12216 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12218 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12220 TYPE_SIZE (t) = TYPE_SIZE (type);
12221 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12222 TYPE_SIZES_GIMPLIFIED (t) = 1;
12226 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12227 a size or position, has had all of its SAVE_EXPRs evaluated.
12228 We add any required statements to *STMT_P. */
12231 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12233 tree expr = *expr_p;
12235 /* We don't do anything if the value isn't there, is constant, or contains
12236 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12237 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12238 will want to replace it with a new variable, but that will cause problems
12239 if this type is from outside the function. It's OK to have that here. */
12240 if (is_gimple_sizepos (expr))
12243 *expr_p = unshare_expr (expr);
12245 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
12248 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12249 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12250 is true, also gimplify the parameters. */
12253 gimplify_body (tree fndecl, bool do_parms)
12255 location_t saved_location = input_location;
12256 gimple_seq parm_stmts, seq;
12257 gimple *outer_stmt;
12259 struct cgraph_node *cgn;
12261 timevar_push (TV_TREE_GIMPLIFY);
12263 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12265 default_rtl_profile ();
12267 gcc_assert (gimplify_ctxp == NULL);
12268 push_gimplify_context ();
12270 if (flag_openacc || flag_openmp)
12272 gcc_assert (gimplify_omp_ctxp == NULL);
12273 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12274 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12277 /* Unshare most shared trees in the body and in that of any nested functions.
12278 It would seem we don't have to do this for nested functions because
12279 they are supposed to be output and then the outer function gimplified
12280 first, but the g++ front end doesn't always do it that way. */
12281 unshare_body (fndecl);
12282 unvisit_body (fndecl);
12284 cgn = cgraph_node::get (fndecl);
12285 if (cgn && cgn->origin)
12286 nonlocal_vlas = new hash_set<tree>;
12288 /* Make sure input_location isn't set to something weird. */
12289 input_location = DECL_SOURCE_LOCATION (fndecl);
12291 /* Resolve callee-copies. This has to be done before processing
12292 the body so that DECL_VALUE_EXPR gets processed correctly. */
12293 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12295 /* Gimplify the function's body. */
12297 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12298 outer_stmt = gimple_seq_first_stmt (seq);
12301 outer_stmt = gimple_build_nop ();
12302 gimplify_seq_add_stmt (&seq, outer_stmt);
12305 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12306 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12307 if (gimple_code (outer_stmt) == GIMPLE_BIND
12308 && gimple_seq_first (seq) == gimple_seq_last (seq))
12309 outer_bind = as_a <gbind *> (outer_stmt);
12311 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12313 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12315 /* If we had callee-copies statements, insert them at the beginning
12316 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12317 if (!gimple_seq_empty_p (parm_stmts))
12321 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12322 gimple_bind_set_body (outer_bind, parm_stmts);
12324 for (parm = DECL_ARGUMENTS (current_function_decl);
12325 parm; parm = DECL_CHAIN (parm))
12326 if (DECL_HAS_VALUE_EXPR_P (parm))
12328 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12329 DECL_IGNORED_P (parm) = 0;
12335 if (nonlocal_vla_vars)
12337 /* tree-nested.c may later on call declare_vars (..., true);
12338 which relies on BLOCK_VARS chain to be the tail of the
12339 gimple_bind_vars chain. Ensure we don't violate that
12341 if (gimple_bind_block (outer_bind)
12342 == DECL_INITIAL (current_function_decl))
12343 declare_vars (nonlocal_vla_vars, outer_bind, true);
12345 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12346 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12347 nonlocal_vla_vars);
12348 nonlocal_vla_vars = NULL_TREE;
12350 delete nonlocal_vlas;
12351 nonlocal_vlas = NULL;
12354 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12355 && gimplify_omp_ctxp)
12357 delete_omp_context (gimplify_omp_ctxp);
12358 gimplify_omp_ctxp = NULL;
12361 pop_gimplify_context (outer_bind);
12362 gcc_assert (gimplify_ctxp == NULL);
12364 if (flag_checking && !seen_error ())
12365 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12367 timevar_pop (TV_TREE_GIMPLIFY);
12368 input_location = saved_location;
12373 typedef char *char_p; /* For DEF_VEC_P. */
12375 /* Return whether we should exclude FNDECL from instrumentation. */
12378 flag_instrument_functions_exclude_p (tree fndecl)
12382 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12383 if (v && v->length () > 0)
12389 name = lang_hooks.decl_printable_name (fndecl, 0);
12390 FOR_EACH_VEC_ELT (*v, i, s)
12391 if (strstr (name, s) != NULL)
12395 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12396 if (v && v->length () > 0)
12402 name = DECL_SOURCE_FILE (fndecl);
12403 FOR_EACH_VEC_ELT (*v, i, s)
12404 if (strstr (name, s) != NULL)
12411 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12412 node for the function we want to gimplify.
12414 Return the sequence of GIMPLE statements corresponding to the body
12418 gimplify_function_tree (tree fndecl)
12424 gcc_assert (!gimple_body (fndecl));
12426 if (DECL_STRUCT_FUNCTION (fndecl))
12427 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12429 push_struct_function (fndecl);
12431 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12433 cfun->curr_properties |= PROP_gimple_lva;
12435 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12437 /* Preliminarily mark non-addressed complex variables as eligible
12438 for promotion to gimple registers. We'll transform their uses
12439 as we find them. */
12440 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12441 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12442 && !TREE_THIS_VOLATILE (parm)
12443 && !needs_to_live_in_memory (parm))
12444 DECL_GIMPLE_REG_P (parm) = 1;
12447 ret = DECL_RESULT (fndecl);
12448 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12449 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12450 && !needs_to_live_in_memory (ret))
12451 DECL_GIMPLE_REG_P (ret) = 1;
12453 asan_poisoned_variables = new hash_set<tree> ();
12454 bind = gimplify_body (fndecl, true);
12455 delete asan_poisoned_variables;
12456 asan_poisoned_variables = NULL;
12458 /* The tree body of the function is no longer needed, replace it
12459 with the new GIMPLE body. */
12461 gimple_seq_add_stmt (&seq, bind);
12462 gimple_set_body (fndecl, seq);
12464 /* If we're instrumenting function entry/exit, then prepend the call to
12465 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12466 catch the exit hook. */
12467 /* ??? Add some way to ignore exceptions for this TFE. */
12468 if (flag_instrument_function_entry_exit
12469 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12470 /* Do not instrument extern inline functions. */
12471 && !(DECL_DECLARED_INLINE_P (fndecl)
12472 && DECL_EXTERNAL (fndecl)
12473 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12474 && !flag_instrument_functions_exclude_p (fndecl))
12479 gimple_seq cleanup = NULL, body = NULL;
12483 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12484 call = gimple_build_call (x, 1, integer_zero_node);
12485 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12486 gimple_call_set_lhs (call, tmp_var);
12487 gimplify_seq_add_stmt (&cleanup, call);
12488 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12489 call = gimple_build_call (x, 2,
12490 build_fold_addr_expr (current_function_decl),
12492 gimplify_seq_add_stmt (&cleanup, call);
12493 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12495 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12496 call = gimple_build_call (x, 1, integer_zero_node);
12497 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12498 gimple_call_set_lhs (call, tmp_var);
12499 gimplify_seq_add_stmt (&body, call);
12500 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12501 call = gimple_build_call (x, 2,
12502 build_fold_addr_expr (current_function_decl),
12504 gimplify_seq_add_stmt (&body, call);
12505 gimplify_seq_add_stmt (&body, tf);
12506 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
12507 /* Clear the block for BIND, since it is no longer directly inside
12508 the function, but within a try block. */
12509 gimple_bind_set_block (bind, NULL);
12511 /* Replace the current function body with the body
12512 wrapped in the try/finally TF. */
12514 gimple_seq_add_stmt (&seq, new_bind);
12515 gimple_set_body (fndecl, seq);
12519 if ((flag_sanitize & SANITIZE_THREAD) != 0
12520 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
12522 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12523 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12524 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
12525 /* Clear the block for BIND, since it is no longer directly inside
12526 the function, but within a try block. */
12527 gimple_bind_set_block (bind, NULL);
12528 /* Replace the current function body with the body
12529 wrapped in the try/finally TF. */
12531 gimple_seq_add_stmt (&seq, new_bind);
12532 gimple_set_body (fndecl, seq);
12535 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12536 cfun->curr_properties |= PROP_gimple_any;
12540 dump_function (TDI_generic, fndecl);
12543 /* Return a dummy expression of type TYPE in order to keep going after an
12547 dummy_object (tree type)
12549 tree t = build_int_cst (build_pointer_type (type), 0);
12550 return build2 (MEM_REF, type, t, t);
12553 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12554 builtin function, but a very special sort of operator. */
12556 enum gimplify_status
12557 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12558 gimple_seq *post_p ATTRIBUTE_UNUSED)
12560 tree promoted_type, have_va_type;
12561 tree valist = TREE_OPERAND (*expr_p, 0);
12562 tree type = TREE_TYPE (*expr_p);
12563 tree t, tag, aptag;
12564 location_t loc = EXPR_LOCATION (*expr_p);
12566 /* Verify that valist is of the proper type. */
12567 have_va_type = TREE_TYPE (valist);
12568 if (have_va_type == error_mark_node)
12570 have_va_type = targetm.canonical_va_list_type (have_va_type);
12572 if (have_va_type == NULL_TREE)
12574 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
12578 /* Generate a diagnostic for requesting data of a type that cannot
12579 be passed through `...' due to type promotion at the call site. */
12580 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12583 static bool gave_help;
12585 /* Use the expansion point to handle cases such as passing bool (defined
12586 in a system header) through `...'. */
12587 source_location xloc
12588 = expansion_point_location_if_in_system_header (loc);
12590 /* Unfortunately, this is merely undefined, rather than a constraint
12591 violation, so we cannot make this an error. If this call is never
12592 executed, the program is still strictly conforming. */
12593 warned = warning_at (xloc, 0,
12594 "%qT is promoted to %qT when passed through %<...%>",
12595 type, promoted_type);
12596 if (!gave_help && warned)
12599 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12600 promoted_type, type);
12603 /* We can, however, treat "undefined" any way we please.
12604 Call abort to encourage the user to fix the program. */
12606 inform (xloc, "if this code is reached, the program will abort");
12607 /* Before the abort, allow the evaluation of the va_list
12608 expression to exit or longjmp. */
12609 gimplify_and_add (valist, pre_p);
12610 t = build_call_expr_loc (loc,
12611 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12612 gimplify_and_add (t, pre_p);
12614 /* This is dead code, but go ahead and finish so that the
12615 mode of the result comes out right. */
12616 *expr_p = dummy_object (type);
12617 return GS_ALL_DONE;
12620 tag = build_int_cst (build_pointer_type (type), 0);
12621 aptag = build_int_cst (TREE_TYPE (valist), 0);
12623 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12624 valist, tag, aptag);
12626 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12627 needs to be expanded. */
12628 cfun->curr_properties &= ~PROP_gimple_lva;
12633 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12635 DST/SRC are the destination and source respectively. You can pass
12636 ungimplified trees in DST or SRC, in which case they will be
12637 converted to a gimple operand if necessary.
12639 This function returns the newly created GIMPLE_ASSIGN tuple. */
12642 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12644 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12645 gimplify_and_add (t, seq_p);
12647 return gimple_seq_last_stmt (*seq_p);
12651 gimplify_hasher::hash (const elt_t *p)
12654 return iterative_hash_expr (t, 0);
12658 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12662 enum tree_code code = TREE_CODE (t1);
12664 if (TREE_CODE (t2) != code
12665 || TREE_TYPE (t1) != TREE_TYPE (t2))
12668 if (!operand_equal_p (t1, t2, 0))
12671 /* Only allow them to compare equal if they also hash equal; otherwise
12672 results are nondeterminate, and we fail bootstrap comparison. */
12673 gcc_checking_assert (hash (p1) == hash (p2));