1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
38 #include "fold-const.h"
43 #include "gimple-fold.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
55 #include "gimple-low.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 enum gimplify_omp_var_data
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
85 GOVD_MAP_0LEN_ARRAY = 32768,
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
104 ORT_WORKSHARE = 0x00,
108 ORT_COMBINED_PARALLEL = 0x03,
111 ORT_UNTIED_TASK = 0x05,
114 ORT_COMBINED_TEAMS = 0x09,
117 ORT_TARGET_DATA = 0x10,
119 /* Data region with offloading. */
121 ORT_COMBINED_TARGET = 0x21,
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
135 /* Gimplify hashtable helper. */
137 struct gimplify_hasher : free_ptr_hash <elt_t>
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
145 struct gimplify_ctx *prev_context;
147 vec<gbind *> bind_expr_stack;
149 gimple_seq conditional_cleanups;
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
165 struct gimplify_omp_ctx
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
189 /* Shorter alias name for the above function for use in gimplify.c
193 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
195 gimple_seq_add_stmt_without_update (seq_p, gs);
198 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
199 NULL, a new sequence is allocated. This function is
200 similar to gimple_seq_add_seq, but does not scan the operands.
201 During gimplification, we need to manipulate statement sequences
202 before the def/use vectors have been constructed. */
205 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
207 gimple_stmt_iterator si;
212 si = gsi_last (*dst_p);
213 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
217 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218 and popping gimplify contexts. */
220 static struct gimplify_ctx *ctx_pool = NULL;
222 /* Return a gimplify context struct from the pool. */
224 static inline struct gimplify_ctx *
227 struct gimplify_ctx * c = ctx_pool;
230 ctx_pool = c->prev_context;
232 c = XNEW (struct gimplify_ctx);
234 memset (c, '\0', sizeof (*c));
238 /* Put gimplify context C back into the pool. */
241 ctx_free (struct gimplify_ctx *c)
243 c->prev_context = ctx_pool;
247 /* Free allocated ctx stack memory. */
250 free_gimplify_stack (void)
252 struct gimplify_ctx *c;
254 while ((c = ctx_pool))
256 ctx_pool = c->prev_context;
262 /* Set up a context for the gimplifier. */
265 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
267 struct gimplify_ctx *c = ctx_alloc ();
269 c->prev_context = gimplify_ctxp;
271 gimplify_ctxp->into_ssa = in_ssa;
272 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
275 /* Tear down a context for the gimplifier. If BODY is non-null, then
276 put the temporaries into the outer BIND_EXPR. Otherwise, put them
279 BODY is not a sequence, but the first tuple in a sequence. */
282 pop_gimplify_context (gimple *body)
284 struct gimplify_ctx *c = gimplify_ctxp;
287 && (!c->bind_expr_stack.exists ()
288 || c->bind_expr_stack.is_empty ()));
289 c->bind_expr_stack.release ();
290 gimplify_ctxp = c->prev_context;
293 declare_vars (c->temps, body, false);
295 record_vars (c->temps);
302 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
305 gimple_push_bind_expr (gbind *bind_stmt)
307 gimplify_ctxp->bind_expr_stack.reserve (8);
308 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
311 /* Pop the first element off the stack of bindings. */
314 gimple_pop_bind_expr (void)
316 gimplify_ctxp->bind_expr_stack.pop ();
319 /* Return the first element of the stack of bindings. */
322 gimple_current_bind_expr (void)
324 return gimplify_ctxp->bind_expr_stack.last ();
327 /* Return the stack of bindings created during gimplification. */
330 gimple_bind_expr_stack (void)
332 return gimplify_ctxp->bind_expr_stack;
335 /* Return true iff there is a COND_EXPR between us and the innermost
336 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
339 gimple_conditional_context (void)
341 return gimplify_ctxp->conditions > 0;
344 /* Note that we've entered a COND_EXPR. */
347 gimple_push_condition (void)
349 #ifdef ENABLE_GIMPLE_CHECKING
350 if (gimplify_ctxp->conditions == 0)
351 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
353 ++(gimplify_ctxp->conditions);
356 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
357 now, add any conditional cleanups we've seen to the prequeue. */
360 gimple_pop_condition (gimple_seq *pre_p)
362 int conds = --(gimplify_ctxp->conditions);
364 gcc_assert (conds >= 0);
367 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368 gimplify_ctxp->conditional_cleanups = NULL;
372 /* A stable comparison routine for use with splay trees and DECLs. */
375 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
380 return DECL_UID (a) - DECL_UID (b);
383 /* Create a new omp construct that deals with variable remapping. */
385 static struct gimplify_omp_ctx *
386 new_omp_context (enum omp_region_type region_type)
388 struct gimplify_omp_ctx *c;
390 c = XCNEW (struct gimplify_omp_ctx);
391 c->outer_context = gimplify_omp_ctxp;
392 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
393 c->privatized_types = new hash_set<tree>;
394 c->location = input_location;
395 c->region_type = region_type;
396 if ((region_type & ORT_TASK) == 0)
397 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
399 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
404 /* Destroy an omp construct that deals with variable remapping. */
407 delete_omp_context (struct gimplify_omp_ctx *c)
409 splay_tree_delete (c->variables);
410 delete c->privatized_types;
411 c->loop_iter_var.release ();
415 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
418 /* Both gimplify the statement T and append it to *SEQ_P. This function
419 behaves exactly as gimplify_stmt, but you don't have to pass T as a
423 gimplify_and_add (tree t, gimple_seq *seq_p)
425 gimplify_stmt (&t, seq_p);
428 /* Gimplify statement T into sequence *SEQ_P, and return the first
429 tuple in the sequence of generated tuples for this statement.
430 Return NULL if gimplifying T produced no tuples. */
433 gimplify_and_return_first (tree t, gimple_seq *seq_p)
435 gimple_stmt_iterator last = gsi_last (*seq_p);
437 gimplify_and_add (t, seq_p);
439 if (!gsi_end_p (last))
442 return gsi_stmt (last);
445 return gimple_seq_first_stmt (*seq_p);
448 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
449 LHS, or for a call argument. */
452 is_gimple_mem_rhs (tree t)
454 /* If we're dealing with a renamable type, either source or dest must be
455 a renamed variable. */
456 if (is_gimple_reg_type (TREE_TYPE (t)))
457 return is_gimple_val (t);
459 return is_gimple_val (t) || is_gimple_lvalue (t);
462 /* Return true if T is a CALL_EXPR or an expression that can be
463 assigned to a temporary. Note that this predicate should only be
464 used during gimplification. See the rationale for this in
465 gimplify_modify_expr. */
468 is_gimple_reg_rhs_or_call (tree t)
470 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 || TREE_CODE (t) == CALL_EXPR);
474 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
475 this predicate should only be used during gimplification. See the
476 rationale for this in gimplify_modify_expr. */
479 is_gimple_mem_rhs_or_call (tree t)
481 /* If we're dealing with a renamable type, either source or dest must be
482 a renamed variable. */
483 if (is_gimple_reg_type (TREE_TYPE (t)))
484 return is_gimple_val (t);
486 return (is_gimple_val (t) || is_gimple_lvalue (t)
487 || TREE_CODE (t) == CALL_EXPR);
490 /* Create a temporary with a name derived from VAL. Subroutine of
491 lookup_tmp_var; nobody else should call this function. */
494 create_tmp_from_val (tree val)
496 /* Drop all qualifiers and address-space information from the value type. */
497 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498 tree var = create_tmp_var (type, get_name (val));
499 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
501 DECL_GIMPLE_REG_P (var) = 1;
505 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
506 an existing expression temporary. */
509 lookup_tmp_var (tree val, bool is_formal)
513 /* If not optimizing, never really reuse a temporary. local-alloc
514 won't allocate any variable that is used in more than one basic
515 block, which means it will go into memory, causing much extra
516 work in reload and final and poorer code generation, outweighing
517 the extra memory allocation here. */
518 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
519 ret = create_tmp_from_val (val);
526 if (!gimplify_ctxp->temp_htab)
527 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
531 elt_p = XNEW (elt_t);
533 elt_p->temp = ret = create_tmp_from_val (val);
546 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
549 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
554 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
556 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
559 if (gimplify_ctxp->into_ssa
560 && is_gimple_reg_type (TREE_TYPE (val)))
561 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
563 t = lookup_tmp_var (val, is_formal);
565 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
567 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
569 /* gimplify_modify_expr might want to reduce this further. */
570 gimplify_and_add (mod, pre_p);
576 /* Return a formal temporary variable initialized with VAL. PRE_P is as
577 in gimplify_expr. Only use this function if:
579 1) The value of the unfactored expression represented by VAL will not
580 change between the initialization and use of the temporary, and
581 2) The temporary will not be otherwise modified.
583 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584 and #2 means it is inappropriate for && temps.
586 For other cases, use get_initialized_tmp_var instead. */
589 get_formal_tmp_var (tree val, gimple_seq *pre_p)
591 return internal_get_tmp_var (val, pre_p, NULL, true);
594 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
595 are as in gimplify_expr. */
598 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
600 return internal_get_tmp_var (val, pre_p, post_p, false);
603 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
604 generate debug info for them; otherwise don't. */
607 declare_vars (tree vars, gimple *gs, bool debug_info)
614 gbind *scope = as_a <gbind *> (gs);
616 temps = nreverse (last);
618 block = gimple_bind_block (scope);
619 gcc_assert (!block || TREE_CODE (block) == BLOCK);
620 if (!block || !debug_info)
622 DECL_CHAIN (last) = gimple_bind_vars (scope);
623 gimple_bind_set_vars (scope, temps);
627 /* We need to attach the nodes both to the BIND_EXPR and to its
628 associated BLOCK for debugging purposes. The key point here
629 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
631 if (BLOCK_VARS (block))
632 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
635 gimple_bind_set_vars (scope,
636 chainon (gimple_bind_vars (scope), temps));
637 BLOCK_VARS (block) = temps;
643 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
645 no such upper bound can be obtained. */
648 force_constant_size (tree var)
650 /* The only attempt we make is by querying the maximum size of objects
651 of the variable's type. */
653 HOST_WIDE_INT max_size;
655 gcc_assert (TREE_CODE (var) == VAR_DECL);
657 max_size = max_int_size_in_bytes (TREE_TYPE (var));
659 gcc_assert (max_size >= 0);
662 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
664 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
667 /* Push the temporary variable TMP into the current binding. */
670 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
672 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
674 /* Later processing assumes that the object size is constant, which might
675 not be true at this point. Force the use of a constant upper bound in
677 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678 force_constant_size (tmp);
680 DECL_CONTEXT (tmp) = fn->decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
683 record_vars_into (tmp, fn->decl);
686 /* Push the temporary variable TMP into the current binding. */
689 gimple_add_tmp_var (tree tmp)
691 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
697 force_constant_size (tmp);
699 DECL_CONTEXT (tmp) = current_function_decl;
700 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
704 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
705 gimplify_ctxp->temps = tmp;
707 /* Mark temporaries local within the nearest enclosing parallel. */
708 if (gimplify_omp_ctxp)
710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
712 && (ctx->region_type == ORT_WORKSHARE
713 || ctx->region_type == ORT_SIMD
714 || ctx->region_type == ORT_ACC))
715 ctx = ctx->outer_context;
717 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
726 /* This case is for nested functions. We need to expose the locals
728 body_seq = gimple_body (current_function_decl);
729 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
735 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736 nodes that are referenced more than once in GENERIC functions. This is
737 necessary because gimplification (translation into GIMPLE) is performed
738 by modifying tree nodes in-place, so gimplication of a shared node in a
739 first context could generate an invalid GIMPLE form in a second context.
741 This is achieved with a simple mark/copy/unmark algorithm that walks the
742 GENERIC representation top-down, marks nodes with TREE_VISITED the first
743 time it encounters them, duplicates them if they already have TREE_VISITED
744 set, and finally removes the TREE_VISITED marks it has set.
746 The algorithm works only at the function level, i.e. it generates a GENERIC
747 representation of a function with no nodes shared within the function when
748 passed a GENERIC function (except for nodes that are allowed to be shared).
750 At the global level, it is also necessary to unshare tree nodes that are
751 referenced in more than one function, for the same aforementioned reason.
752 This requires some cooperation from the front-end. There are 2 strategies:
754 1. Manual unsharing. The front-end needs to call unshare_expr on every
755 expression that might end up being shared across functions.
757 2. Deep unsharing. This is an extension of regular unsharing. Instead
758 of calling unshare_expr on expressions that might be shared across
759 functions, the front-end pre-marks them with TREE_VISITED. This will
760 ensure that they are unshared on the first reference within functions
761 when the regular unsharing algorithm runs. The counterpart is that
762 this algorithm must look deeper than for manual unsharing, which is
763 specified by LANG_HOOKS_DEEP_UNSHARING.
765 If there are only few specific cases of node sharing across functions, it is
766 probably easier for a front-end to unshare the expressions manually. On the
767 contrary, if the expressions generated at the global level are as widespread
768 as expressions generated within functions, deep unsharing is very likely the
771 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
772 These nodes model computations that must be done once. If we were to
773 unshare something like SAVE_EXPR(i++), the gimplification process would
774 create wrong code. However, if DATA is non-null, it must hold a pointer
775 set that is used to unshare the subtrees of these nodes. */
778 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
781 enum tree_code code = TREE_CODE (t);
783 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784 copy their subtrees if we can make sure to do it only once. */
785 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
787 if (data && !((hash_set<tree> *)data)->add (t))
793 /* Stop at types, decls, constants like copy_tree_r. */
794 else if (TREE_CODE_CLASS (code) == tcc_type
795 || TREE_CODE_CLASS (code) == tcc_declaration
796 || TREE_CODE_CLASS (code) == tcc_constant
797 /* We can't do anything sensible with a BLOCK used as an
798 expression, but we also can't just die when we see it
799 because of non-expression uses. So we avert our eyes
800 and cross our fingers. Silly Java. */
804 /* Cope with the statement expression extension. */
805 else if (code == STATEMENT_LIST)
808 /* Leave the bulk of the work to copy_tree_r itself. */
810 copy_tree_r (tp, walk_subtrees, NULL);
815 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816 If *TP has been visited already, then *TP is deeply copied by calling
817 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
820 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
823 enum tree_code code = TREE_CODE (t);
825 /* Skip types, decls, and constants. But we do want to look at their
826 types and the bounds of types. Mark them as visited so we properly
827 unmark their subtrees on the unmark pass. If we've already seen them,
828 don't look down further. */
829 if (TREE_CODE_CLASS (code) == tcc_type
830 || TREE_CODE_CLASS (code) == tcc_declaration
831 || TREE_CODE_CLASS (code) == tcc_constant)
833 if (TREE_VISITED (t))
836 TREE_VISITED (t) = 1;
839 /* If this node has been visited already, unshare it and don't look
841 else if (TREE_VISITED (t))
843 walk_tree (tp, mostly_copy_tree_r, data, NULL);
847 /* Otherwise, mark the node as visited and keep looking. */
849 TREE_VISITED (t) = 1;
854 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
855 copy_if_shared_r callback unmodified. */
858 copy_if_shared (tree *tp, void *data)
860 walk_tree (tp, copy_if_shared_r, data, NULL);
863 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864 any nested functions. */
867 unshare_body (tree fndecl)
869 struct cgraph_node *cgn = cgraph_node::get (fndecl);
870 /* If the language requires deep unsharing, we need a pointer set to make
871 sure we don't repeatedly unshare subtrees of unshareable nodes. */
872 hash_set<tree> *visited
873 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
875 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
882 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
883 unshare_body (cgn->decl);
886 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
887 Subtrees are walked until the first unvisited node is encountered. */
890 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
894 /* If this node has been visited, unmark it and keep looking. */
895 if (TREE_VISITED (t))
896 TREE_VISITED (t) = 0;
898 /* Otherwise, don't look any deeper. */
905 /* Unmark the visited trees rooted at *TP. */
908 unmark_visited (tree *tp)
910 walk_tree (tp, unmark_visited_r, NULL, NULL);
913 /* Likewise, but mark all trees as not visited. */
916 unvisit_body (tree fndecl)
918 struct cgraph_node *cgn = cgraph_node::get (fndecl);
920 unmark_visited (&DECL_SAVED_TREE (fndecl));
921 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
925 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
926 unvisit_body (cgn->decl);
929 /* Unconditionally make an unshared copy of EXPR. This is used when using
930 stored expressions which span multiple functions, such as BINFO_VTABLE,
931 as the normal unsharing process can't tell that they're shared. */
934 unshare_expr (tree expr)
936 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
940 /* Worker for unshare_expr_without_location. */
943 prune_expr_location (tree *tp, int *walk_subtrees, void *)
946 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
952 /* Similar to unshare_expr but also prune all expression locations
956 unshare_expr_without_location (tree expr)
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
960 walk_tree (&expr, prune_expr_location, NULL, NULL);
964 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965 contain statements and have a value. Assign its value to a temporary
966 and give it void_type_node. Return the temporary, or NULL_TREE if
967 WRAPPER was already void. */
970 voidify_wrapper_expr (tree wrapper, tree temp)
972 tree type = TREE_TYPE (wrapper);
973 if (type && !VOID_TYPE_P (type))
977 /* Set p to point to the body of the wrapper. Loop until we find
978 something that isn't a wrapper. */
979 for (p = &wrapper; p && *p; )
981 switch (TREE_CODE (*p))
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 /* For a BIND_EXPR, the body is operand 1. */
987 p = &BIND_EXPR_BODY (*p);
990 case CLEANUP_POINT_EXPR:
991 case TRY_FINALLY_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 p = &TREE_OPERAND (*p, 0);
1000 tree_stmt_iterator i = tsi_last (*p);
1001 if (TREE_CODE(*tsi_stmt_ptr (i)) == STATEMENT_LIST_END)
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1013 /* Advance to the last statement. Set all container types to
1015 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1017 TREE_SIDE_EFFECTS (*p) = 1;
1018 TREE_TYPE (*p) = void_type_node;
1022 case TRANSACTION_EXPR:
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = &TRANSACTION_EXPR_BODY (*p);
1029 /* Assume that any tree upon which voidify_wrapper_expr is
1030 directly called is a wrapper, and that its body is op0. */
1033 TREE_SIDE_EFFECTS (*p) = 1;
1034 TREE_TYPE (*p) = void_type_node;
1035 p = &TREE_OPERAND (*p, 0);
1043 if (p == NULL || IS_EMPTY_STMT (*p))
1047 /* The wrapper is on the RHS of an assignment that we're pushing
1049 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1050 || TREE_CODE (temp) == MODIFY_EXPR);
1051 TREE_OPERAND (temp, 1) = *p;
1056 temp = create_tmp_var (type, "retval");
1057 *p = build2 (INIT_EXPR, type, temp, *p);
1066 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1067 a temporary through which they communicate. */
1070 build_stack_save_restore (gcall **save, gcall **restore)
1074 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1075 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1076 gimple_call_set_lhs (*save, tmp_var);
1079 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1083 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1085 static enum gimplify_status
1086 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1088 tree bind_expr = *expr_p;
1089 bool old_keep_stack = gimplify_ctxp->keep_stack;
1090 bool old_save_stack = gimplify_ctxp->save_stack;
1093 gimple_seq body, cleanup;
1095 location_t start_locus = 0, end_locus = 0;
1096 tree ret_clauses = NULL;
1098 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1100 /* Mark variables seen in this bind expr. */
1101 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1103 if (TREE_CODE (t) == VAR_DECL)
1105 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1107 /* Mark variable as local. */
1108 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1109 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1110 || splay_tree_lookup (ctx->variables,
1111 (splay_tree_key) t) == NULL))
1113 if (ctx->region_type == ORT_SIMD
1114 && TREE_ADDRESSABLE (t)
1115 && !TREE_STATIC (t))
1116 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1118 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1121 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1123 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1124 cfun->has_local_explicit_reg_vars = true;
1127 /* Preliminarily mark non-addressed complex variables as eligible
1128 for promotion to gimple registers. We'll transform their uses
1130 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1131 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1132 && !TREE_THIS_VOLATILE (t)
1133 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1134 && !needs_to_live_in_memory (t))
1135 DECL_GIMPLE_REG_P (t) = 1;
1138 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1139 BIND_EXPR_BLOCK (bind_expr));
1140 gimple_push_bind_expr (bind_stmt);
1142 gimplify_ctxp->keep_stack = false;
1143 gimplify_ctxp->save_stack = false;
1145 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1147 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1148 gimple_bind_set_body (bind_stmt, body);
1150 /* Source location wise, the cleanup code (stack_restore and clobbers)
1151 belongs to the end of the block, so propagate what we have. The
1152 stack_save operation belongs to the beginning of block, which we can
1153 infer from the bind_expr directly if the block has no explicit
1155 if (BIND_EXPR_BLOCK (bind_expr))
1157 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1158 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1160 if (start_locus == 0)
1161 start_locus = EXPR_LOCATION (bind_expr);
1166 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1167 the stack space allocated to the VLAs. */
1168 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1170 gcall *stack_restore;
1172 /* Save stack on entry and restore it on exit. Add a try_finally
1173 block to achieve this. */
1174 build_stack_save_restore (&stack_save, &stack_restore);
1176 gimple_set_location (stack_save, start_locus);
1177 gimple_set_location (stack_restore, end_locus);
1179 gimplify_seq_add_stmt (&cleanup, stack_restore);
1182 /* Add clobbers for all variables that go out of scope. */
1183 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1185 if (TREE_CODE (t) == VAR_DECL
1186 && !is_global_var (t)
1187 && DECL_CONTEXT (t) == current_function_decl
1188 && !DECL_HARD_REGISTER (t)
1189 && !TREE_THIS_VOLATILE (t)
1190 && !DECL_HAS_VALUE_EXPR_P (t)
1191 /* Only care for variables that have to be in memory. Others
1192 will be rewritten into SSA names, hence moved to the top-level. */
1193 && !is_gimple_reg (t)
1194 && flag_stack_reuse != SR_NONE)
1196 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1197 gimple *clobber_stmt;
1198 TREE_THIS_VOLATILE (clobber) = 1;
1199 clobber_stmt = gimple_build_assign (t, clobber);
1200 gimple_set_location (clobber_stmt, end_locus);
1201 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1203 if (flag_openacc && oacc_declare_returns != NULL)
1205 tree *c = oacc_declare_returns->get (t);
1209 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1213 oacc_declare_returns->remove (t);
1215 if (oacc_declare_returns->elements () == 0)
1217 delete oacc_declare_returns;
1218 oacc_declare_returns = NULL;
1228 gimple_stmt_iterator si = gsi_start (cleanup);
1230 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1232 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1238 gimple_seq new_body;
1241 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1242 GIMPLE_TRY_FINALLY);
1245 gimplify_seq_add_stmt (&new_body, stack_save);
1246 gimplify_seq_add_stmt (&new_body, gs);
1247 gimple_bind_set_body (bind_stmt, new_body);
1250 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1251 if (!gimplify_ctxp->keep_stack)
1252 gimplify_ctxp->keep_stack = old_keep_stack;
1253 gimplify_ctxp->save_stack = old_save_stack;
1255 gimple_pop_bind_expr ();
1257 gimplify_seq_add_stmt (pre_p, bind_stmt);
1265 *expr_p = NULL_TREE;
1269 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1270 GIMPLE value, it is assigned to a new temporary and the statement is
1271 re-written to return the temporary.
1273 PRE_P points to the sequence where side effects that must happen before
1274 STMT should be stored. */
1276 static enum gimplify_status
1277 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1280 tree ret_expr = TREE_OPERAND (stmt, 0);
1281 tree result_decl, result;
1283 if (ret_expr == error_mark_node)
1286 /* Implicit _Cilk_sync must be inserted right before any return statement
1287 if there is a _Cilk_spawn in the function. If the user has provided a
1288 _Cilk_sync, the optimizer should remove this duplicate one. */
1289 if (fn_contains_cilk_spawn_p (cfun))
1291 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1292 gimplify_and_add (impl_sync, pre_p);
1296 || TREE_CODE (ret_expr) == RESULT_DECL
1297 || ret_expr == error_mark_node)
1299 greturn *ret = gimple_build_return (ret_expr);
1300 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1301 gimplify_seq_add_stmt (pre_p, ret);
1305 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1306 result_decl = NULL_TREE;
1309 result_decl = TREE_OPERAND (ret_expr, 0);
1311 /* See through a return by reference. */
1312 if (TREE_CODE (result_decl) == INDIRECT_REF)
1313 result_decl = TREE_OPERAND (result_decl, 0);
1315 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1316 || TREE_CODE (ret_expr) == INIT_EXPR)
1317 && TREE_CODE (result_decl) == RESULT_DECL);
1320 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1321 Recall that aggregate_value_p is FALSE for any aggregate type that is
1322 returned in registers. If we're returning values in registers, then
1323 we don't want to extend the lifetime of the RESULT_DECL, particularly
1324 across another call. In addition, for those aggregates for which
1325 hard_function_value generates a PARALLEL, we'll die during normal
1326 expansion of structure assignments; there's special code in expand_return
1327 to handle this case that does not exist in expand_expr. */
1330 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1332 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1334 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1335 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1336 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1337 should be effectively allocated by the caller, i.e. all calls to
1338 this function must be subject to the Return Slot Optimization. */
1339 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1340 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1342 result = result_decl;
1344 else if (gimplify_ctxp->return_temp)
1345 result = gimplify_ctxp->return_temp;
1348 result = create_tmp_reg (TREE_TYPE (result_decl));
1350 /* ??? With complex control flow (usually involving abnormal edges),
1351 we can wind up warning about an uninitialized value for this. Due
1352 to how this variable is constructed and initialized, this is never
1353 true. Give up and never warn. */
1354 TREE_NO_WARNING (result) = 1;
1356 gimplify_ctxp->return_temp = result;
1359 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1360 Then gimplify the whole thing. */
1361 if (result != result_decl)
1362 TREE_OPERAND (ret_expr, 0) = result;
1364 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1366 ret = gimple_build_return (result);
1367 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1368 gimplify_seq_add_stmt (pre_p, ret);
1373 /* Gimplify a variable-length array DECL. */
1376 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1378 /* This is a variable-sized decl. Simplify its size and mark it
1379 for deferred expansion. */
1380 tree t, addr, ptr_type;
1382 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1383 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1385 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1386 if (DECL_HAS_VALUE_EXPR_P (decl))
1389 /* All occurrences of this decl in final gimplified code will be
1390 replaced by indirection. Setting DECL_VALUE_EXPR does two
1391 things: First, it lets the rest of the gimplifier know what
1392 replacement to use. Second, it lets the debug info know
1393 where to find the value. */
1394 ptr_type = build_pointer_type (TREE_TYPE (decl));
1395 addr = create_tmp_var (ptr_type, get_name (decl));
1396 DECL_IGNORED_P (addr) = 0;
1397 t = build_fold_indirect_ref (addr);
1398 TREE_THIS_NOTRAP (t) = 1;
1399 SET_DECL_VALUE_EXPR (decl, t);
1400 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1402 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1403 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1404 size_int (DECL_ALIGN (decl)));
1405 /* The call has been built for a variable-sized object. */
1406 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1407 t = fold_convert (ptr_type, t);
1408 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1410 gimplify_and_add (t, seq_p);
1413 /* A helper function to be called via walk_tree. Mark all labels under *TP
1414 as being forced. To be called for DECL_INITIAL of static variables. */
1417 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1421 if (TREE_CODE (*tp) == LABEL_DECL)
1423 FORCED_LABEL (*tp) = 1;
1424 cfun->has_forced_label_in_static = 1;
1430 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1431 and initialization explicit. */
1433 static enum gimplify_status
1434 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1436 tree stmt = *stmt_p;
1437 tree decl = DECL_EXPR_DECL (stmt);
1439 *stmt_p = NULL_TREE;
1441 if (TREE_TYPE (decl) == error_mark_node)
1444 if ((TREE_CODE (decl) == TYPE_DECL
1445 || TREE_CODE (decl) == VAR_DECL)
1446 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1448 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1449 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1450 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1453 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1454 in case its size expressions contain problematic nodes like CALL_EXPR. */
1455 if (TREE_CODE (decl) == TYPE_DECL
1456 && DECL_ORIGINAL_TYPE (decl)
1457 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1459 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1460 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1461 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1464 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1466 tree init = DECL_INITIAL (decl);
1468 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1469 || (!TREE_STATIC (decl)
1470 && flag_stack_check == GENERIC_STACK_CHECK
1471 && compare_tree_int (DECL_SIZE_UNIT (decl),
1472 STACK_CHECK_MAX_VAR_SIZE) > 0))
1473 gimplify_vla_decl (decl, seq_p);
1475 /* Some front ends do not explicitly declare all anonymous
1476 artificial variables. We compensate here by declaring the
1477 variables, though it would be better if the front ends would
1478 explicitly declare them. */
1479 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1480 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1481 gimple_add_tmp_var (decl);
1483 if (init && init != error_mark_node)
1485 if (!TREE_STATIC (decl))
1487 DECL_INITIAL (decl) = NULL_TREE;
1488 init = build2 (INIT_EXPR, void_type_node, decl, init);
1489 gimplify_and_add (init, seq_p);
1493 /* We must still examine initializers for static variables
1494 as they may contain a label address. */
1495 walk_tree (&init, force_labels_r, NULL, NULL);
1502 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1503 and replacing the LOOP_EXPR with goto, but if the loop contains an
1504 EXIT_EXPR, we need to append a label for it to jump to. */
1506 static enum gimplify_status
1507 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1509 tree saved_label = gimplify_ctxp->exit_label;
1510 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1512 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1514 gimplify_ctxp->exit_label = NULL_TREE;
1516 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1518 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1520 if (gimplify_ctxp->exit_label)
1521 gimplify_seq_add_stmt (pre_p,
1522 gimple_build_label (gimplify_ctxp->exit_label));
1524 gimplify_ctxp->exit_label = saved_label;
1530 /* Gimplify a statement list onto a sequence. These may be created either
1531 by an enlightened front-end, or by shortcut_cond_expr. */
1533 static enum gimplify_status
1534 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1536 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1538 tree_stmt_iterator i = tsi_start (*expr_p);
1540 while (!tsi_end_p (i))
1542 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1556 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1559 static enum gimplify_status
1560 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1562 tree switch_expr = *expr_p;
1563 gimple_seq switch_body_seq = NULL;
1564 enum gimplify_status ret;
1565 tree index_type = TREE_TYPE (switch_expr);
1566 if (index_type == NULL_TREE)
1567 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1569 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1571 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1574 if (SWITCH_BODY (switch_expr))
1577 vec<tree> saved_labels;
1578 tree default_case = NULL_TREE;
1579 gswitch *switch_stmt;
1581 /* If someone can be bothered to fill in the labels, they can
1582 be bothered to null out the body too. */
1583 gcc_assert (!SWITCH_LABELS (switch_expr));
1585 /* Save old labels, get new ones from body, then restore the old
1586 labels. Save all the things from the switch body to append after. */
1587 saved_labels = gimplify_ctxp->case_labels;
1588 gimplify_ctxp->case_labels.create (8);
1590 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1591 labels = gimplify_ctxp->case_labels;
1592 gimplify_ctxp->case_labels = saved_labels;
1594 preprocess_case_label_vec_for_gimple (labels, index_type,
1599 glabel *new_default;
1602 = build_case_label (NULL_TREE, NULL_TREE,
1603 create_artificial_label (UNKNOWN_LOCATION));
1604 new_default = gimple_build_label (CASE_LABEL (default_case));
1605 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1608 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1609 default_case, labels);
1610 gimplify_seq_add_stmt (pre_p, switch_stmt);
1611 gimplify_seq_add_seq (pre_p, switch_body_seq);
1615 gcc_assert (SWITCH_LABELS (switch_expr));
1620 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1622 static enum gimplify_status
1623 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1625 struct gimplify_ctx *ctxp;
1628 /* Invalid programs can play Duff's Device type games with, for example,
1629 #pragma omp parallel. At least in the C front end, we don't
1630 detect such invalid branches until after gimplification, in the
1631 diagnose_omp_blocks pass. */
1632 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1633 if (ctxp->case_labels.exists ())
1636 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1637 ctxp->case_labels.safe_push (*expr_p);
1638 gimplify_seq_add_stmt (pre_p, label_stmt);
1643 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1647 build_and_jump (tree *label_p)
1649 if (label_p == NULL)
1650 /* If there's nowhere to jump, just fall through. */
1653 if (*label_p == NULL_TREE)
1655 tree label = create_artificial_label (UNKNOWN_LOCATION);
1659 return build1 (GOTO_EXPR, void_type_node, *label_p);
1662 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1663 This also involves building a label to jump to and communicating it to
1664 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1666 static enum gimplify_status
1667 gimplify_exit_expr (tree *expr_p)
1669 tree cond = TREE_OPERAND (*expr_p, 0);
1672 expr = build_and_jump (&gimplify_ctxp->exit_label);
1673 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1679 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1680 different from its canonical type, wrap the whole thing inside a
1681 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1684 The canonical type of a COMPONENT_REF is the type of the field being
1685 referenced--unless the field is a bit-field which can be read directly
1686 in a smaller mode, in which case the canonical type is the
1687 sign-appropriate type corresponding to that mode. */
1690 canonicalize_component_ref (tree *expr_p)
1692 tree expr = *expr_p;
1695 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1697 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1698 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1700 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1702 /* One could argue that all the stuff below is not necessary for
1703 the non-bitfield case and declare it a FE error if type
1704 adjustment would be needed. */
1705 if (TREE_TYPE (expr) != type)
1707 #ifdef ENABLE_TYPES_CHECKING
1708 tree old_type = TREE_TYPE (expr);
1712 /* We need to preserve qualifiers and propagate them from
1714 type_quals = TYPE_QUALS (type)
1715 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1716 if (TYPE_QUALS (type) != type_quals)
1717 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1719 /* Set the type of the COMPONENT_REF to the underlying type. */
1720 TREE_TYPE (expr) = type;
1722 #ifdef ENABLE_TYPES_CHECKING
1723 /* It is now a FE error, if the conversion from the canonical
1724 type to the original expression type is not useless. */
1725 gcc_assert (useless_type_conversion_p (old_type, type));
1730 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1731 to foo, embed that change in the ADDR_EXPR by converting
1736 where L is the lower bound. For simplicity, only do this for constant
1738 The constraint is that the type of &array[L] is trivially convertible
1742 canonicalize_addr_expr (tree *expr_p)
1744 tree expr = *expr_p;
1745 tree addr_expr = TREE_OPERAND (expr, 0);
1746 tree datype, ddatype, pddatype;
1748 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1749 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1750 || TREE_CODE (addr_expr) != ADDR_EXPR)
1753 /* The addr_expr type should be a pointer to an array. */
1754 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1755 if (TREE_CODE (datype) != ARRAY_TYPE)
1758 /* The pointer to element type shall be trivially convertible to
1759 the expression pointer type. */
1760 ddatype = TREE_TYPE (datype);
1761 pddatype = build_pointer_type (ddatype);
1762 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1766 /* The lower bound and element sizes must be constant. */
1767 if (!TYPE_SIZE_UNIT (ddatype)
1768 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1769 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1770 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1773 /* All checks succeeded. Build a new node to merge the cast. */
1774 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1775 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1776 NULL_TREE, NULL_TREE);
1777 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1779 /* We can have stripped a required restrict qualifier above. */
1780 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1781 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1784 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1785 underneath as appropriate. */
1787 static enum gimplify_status
1788 gimplify_conversion (tree *expr_p)
1790 location_t loc = EXPR_LOCATION (*expr_p);
1791 gcc_assert (CONVERT_EXPR_P (*expr_p));
1793 /* Then strip away all but the outermost conversion. */
1794 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1796 /* And remove the outermost conversion if it's useless. */
1797 if (tree_ssa_useless_type_conversion (*expr_p))
1798 *expr_p = TREE_OPERAND (*expr_p, 0);
1800 /* If we still have a conversion at the toplevel,
1801 then canonicalize some constructs. */
1802 if (CONVERT_EXPR_P (*expr_p))
1804 tree sub = TREE_OPERAND (*expr_p, 0);
1806 /* If a NOP conversion is changing the type of a COMPONENT_REF
1807 expression, then canonicalize its type now in order to expose more
1808 redundant conversions. */
1809 if (TREE_CODE (sub) == COMPONENT_REF)
1810 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1812 /* If a NOP conversion is changing a pointer to array of foo
1813 to a pointer to foo, embed that change in the ADDR_EXPR. */
1814 else if (TREE_CODE (sub) == ADDR_EXPR)
1815 canonicalize_addr_expr (expr_p);
1818 /* If we have a conversion to a non-register type force the
1819 use of a VIEW_CONVERT_EXPR instead. */
1820 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1821 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1822 TREE_OPERAND (*expr_p, 0));
1824 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1825 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1826 TREE_SET_CODE (*expr_p, NOP_EXPR);
1831 /* Nonlocal VLAs seen in the current function. */
1832 static hash_set<tree> *nonlocal_vlas;
1834 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1835 static tree nonlocal_vla_vars;
1837 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1838 DECL_VALUE_EXPR, and it's worth re-examining things. */
1840 static enum gimplify_status
1841 gimplify_var_or_parm_decl (tree *expr_p)
1843 tree decl = *expr_p;
1845 /* ??? If this is a local variable, and it has not been seen in any
1846 outer BIND_EXPR, then it's probably the result of a duplicate
1847 declaration, for which we've already issued an error. It would
1848 be really nice if the front end wouldn't leak these at all.
1849 Currently the only known culprit is C++ destructors, as seen
1850 in g++.old-deja/g++.jason/binding.C. */
1851 if (TREE_CODE (decl) == VAR_DECL
1852 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1853 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1854 && decl_function_context (decl) == current_function_decl)
1856 gcc_assert (seen_error ());
1860 /* When within an OMP context, notice uses of variables. */
1861 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1864 /* If the decl is an alias for another expression, substitute it now. */
1865 if (DECL_HAS_VALUE_EXPR_P (decl))
1867 tree value_expr = DECL_VALUE_EXPR (decl);
1869 /* For referenced nonlocal VLAs add a decl for debugging purposes
1870 to the current function. */
1871 if (TREE_CODE (decl) == VAR_DECL
1872 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1873 && nonlocal_vlas != NULL
1874 && TREE_CODE (value_expr) == INDIRECT_REF
1875 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1876 && decl_function_context (decl) != current_function_decl)
1878 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1880 && (ctx->region_type == ORT_WORKSHARE
1881 || ctx->region_type == ORT_SIMD
1882 || ctx->region_type == ORT_ACC))
1883 ctx = ctx->outer_context;
1884 if (!ctx && !nonlocal_vlas->add (decl))
1886 tree copy = copy_node (decl);
1888 lang_hooks.dup_lang_specific_decl (copy);
1889 SET_DECL_RTL (copy, 0);
1890 TREE_USED (copy) = 1;
1891 DECL_CHAIN (copy) = nonlocal_vla_vars;
1892 nonlocal_vla_vars = copy;
1893 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1894 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1898 *expr_p = unshare_expr (value_expr);
1905 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1908 recalculate_side_effects (tree t)
1910 enum tree_code code = TREE_CODE (t);
1911 int len = TREE_OPERAND_LENGTH (t);
1914 switch (TREE_CODE_CLASS (code))
1916 case tcc_expression:
1922 case PREDECREMENT_EXPR:
1923 case PREINCREMENT_EXPR:
1924 case POSTDECREMENT_EXPR:
1925 case POSTINCREMENT_EXPR:
1926 /* All of these have side-effects, no matter what their
1935 case tcc_comparison: /* a comparison expression */
1936 case tcc_unary: /* a unary arithmetic expression */
1937 case tcc_binary: /* a binary arithmetic expression */
1938 case tcc_reference: /* a reference */
1939 case tcc_vl_exp: /* a function call */
1940 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1941 for (i = 0; i < len; ++i)
1943 tree op = TREE_OPERAND (t, i);
1944 if (op && TREE_SIDE_EFFECTS (op))
1945 TREE_SIDE_EFFECTS (t) = 1;
1950 /* No side-effects. */
1958 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1962 : min_lval '[' val ']'
1964 | compound_lval '[' val ']'
1965 | compound_lval '.' ID
1967 This is not part of the original SIMPLE definition, which separates
1968 array and member references, but it seems reasonable to handle them
1969 together. Also, this way we don't run into problems with union
1970 aliasing; gcc requires that for accesses through a union to alias, the
1971 union reference must be explicit, which was not always the case when we
1972 were splitting up array and member refs.
1974 PRE_P points to the sequence where side effects that must happen before
1975 *EXPR_P should be stored.
1977 POST_P points to the sequence where side effects that must happen after
1978 *EXPR_P should be stored. */
1980 static enum gimplify_status
1981 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1982 fallback_t fallback)
1985 enum gimplify_status ret = GS_ALL_DONE, tret;
1987 location_t loc = EXPR_LOCATION (*expr_p);
1988 tree expr = *expr_p;
1990 /* Create a stack of the subexpressions so later we can walk them in
1991 order from inner to outer. */
1992 auto_vec<tree, 10> expr_stack;
1994 /* We can handle anything that get_inner_reference can deal with. */
1995 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1998 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1999 if (TREE_CODE (*p) == INDIRECT_REF)
2000 *p = fold_indirect_ref_loc (loc, *p);
2002 if (handled_component_p (*p))
2004 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2005 additional COMPONENT_REFs. */
2006 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2007 && gimplify_var_or_parm_decl (p) == GS_OK)
2012 expr_stack.safe_push (*p);
2015 gcc_assert (expr_stack.length ());
2017 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2018 walked through and P points to the innermost expression.
2020 Java requires that we elaborated nodes in source order. That
2021 means we must gimplify the inner expression followed by each of
2022 the indices, in order. But we can't gimplify the inner
2023 expression until we deal with any variable bounds, sizes, or
2024 positions in order to deal with PLACEHOLDER_EXPRs.
2026 So we do this in three steps. First we deal with the annotations
2027 for any variables in the components, then we gimplify the base,
2028 then we gimplify any indices, from left to right. */
2029 for (i = expr_stack.length () - 1; i >= 0; i--)
2031 tree t = expr_stack[i];
2033 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2035 /* Gimplify the low bound and element type size and put them into
2036 the ARRAY_REF. If these values are set, they have already been
2038 if (TREE_OPERAND (t, 2) == NULL_TREE)
2040 tree low = unshare_expr (array_ref_low_bound (t));
2041 if (!is_gimple_min_invariant (low))
2043 TREE_OPERAND (t, 2) = low;
2044 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2045 post_p, is_gimple_reg,
2047 ret = MIN (ret, tret);
2052 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2053 is_gimple_reg, fb_rvalue);
2054 ret = MIN (ret, tret);
2057 if (TREE_OPERAND (t, 3) == NULL_TREE)
2059 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2060 tree elmt_size = unshare_expr (array_ref_element_size (t));
2061 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2063 /* Divide the element size by the alignment of the element
2066 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2068 if (!is_gimple_min_invariant (elmt_size))
2070 TREE_OPERAND (t, 3) = elmt_size;
2071 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2072 post_p, is_gimple_reg,
2074 ret = MIN (ret, tret);
2079 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2080 is_gimple_reg, fb_rvalue);
2081 ret = MIN (ret, tret);
2084 else if (TREE_CODE (t) == COMPONENT_REF)
2086 /* Set the field offset into T and gimplify it. */
2087 if (TREE_OPERAND (t, 2) == NULL_TREE)
2089 tree offset = unshare_expr (component_ref_field_offset (t));
2090 tree field = TREE_OPERAND (t, 1);
2092 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2094 /* Divide the offset by its alignment. */
2095 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2097 if (!is_gimple_min_invariant (offset))
2099 TREE_OPERAND (t, 2) = offset;
2100 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2101 post_p, is_gimple_reg,
2103 ret = MIN (ret, tret);
2108 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2109 is_gimple_reg, fb_rvalue);
2110 ret = MIN (ret, tret);
2115 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2116 so as to match the min_lval predicate. Failure to do so may result
2117 in the creation of large aggregate temporaries. */
2118 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2119 fallback | fb_lvalue);
2120 ret = MIN (ret, tret);
2122 /* And finally, the indices and operands of ARRAY_REF. During this
2123 loop we also remove any useless conversions. */
2124 for (; expr_stack.length () > 0; )
2126 tree t = expr_stack.pop ();
2128 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2130 /* Gimplify the dimension. */
2131 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2133 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2134 is_gimple_val, fb_rvalue);
2135 ret = MIN (ret, tret);
2139 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2141 /* The innermost expression P may have originally had
2142 TREE_SIDE_EFFECTS set which would have caused all the outer
2143 expressions in *EXPR_P leading to P to also have had
2144 TREE_SIDE_EFFECTS set. */
2145 recalculate_side_effects (t);
2148 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2149 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2151 canonicalize_component_ref (expr_p);
2154 expr_stack.release ();
2156 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2161 /* Gimplify the self modifying expression pointed to by EXPR_P
2164 PRE_P points to the list where side effects that must happen before
2165 *EXPR_P should be stored.
2167 POST_P points to the list where side effects that must happen after
2168 *EXPR_P should be stored.
2170 WANT_VALUE is nonzero iff we want to use the value of this expression
2171 in another expression.
2173 ARITH_TYPE is the type the computation should be performed in. */
2175 enum gimplify_status
2176 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2177 bool want_value, tree arith_type)
2179 enum tree_code code;
2180 tree lhs, lvalue, rhs, t1;
2181 gimple_seq post = NULL, *orig_post_p = post_p;
2183 enum tree_code arith_code;
2184 enum gimplify_status ret;
2185 location_t loc = EXPR_LOCATION (*expr_p);
2187 code = TREE_CODE (*expr_p);
2189 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2190 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2192 /* Prefix or postfix? */
2193 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2194 /* Faster to treat as prefix if result is not used. */
2195 postfix = want_value;
2199 /* For postfix, make sure the inner expression's post side effects
2200 are executed after side effects from this expression. */
2204 /* Add or subtract? */
2205 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2206 arith_code = PLUS_EXPR;
2208 arith_code = MINUS_EXPR;
2210 /* Gimplify the LHS into a GIMPLE lvalue. */
2211 lvalue = TREE_OPERAND (*expr_p, 0);
2212 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2213 if (ret == GS_ERROR)
2216 /* Extract the operands to the arithmetic operation. */
2218 rhs = TREE_OPERAND (*expr_p, 1);
2220 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2221 that as the result value and in the postqueue operation. */
2224 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2225 if (ret == GS_ERROR)
2228 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2231 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2232 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2234 rhs = convert_to_ptrofftype_loc (loc, rhs);
2235 if (arith_code == MINUS_EXPR)
2236 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2237 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2240 t1 = fold_convert (TREE_TYPE (*expr_p),
2241 fold_build2 (arith_code, arith_type,
2242 fold_convert (arith_type, lhs),
2243 fold_convert (arith_type, rhs)));
2247 gimplify_assign (lvalue, t1, pre_p);
2248 gimplify_seq_add_seq (orig_post_p, post);
2254 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2259 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2262 maybe_with_size_expr (tree *expr_p)
2264 tree expr = *expr_p;
2265 tree type = TREE_TYPE (expr);
2268 /* If we've already wrapped this or the type is error_mark_node, we can't do
2270 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2271 || type == error_mark_node)
2274 /* If the size isn't known or is a constant, we have nothing to do. */
2275 size = TYPE_SIZE_UNIT (type);
2276 if (!size || TREE_CODE (size) == INTEGER_CST)
2279 /* Otherwise, make a WITH_SIZE_EXPR. */
2280 size = unshare_expr (size);
2281 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2282 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2285 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2286 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2289 enum gimplify_status
2290 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2292 bool (*test) (tree);
2295 /* In general, we allow lvalues for function arguments to avoid
2296 extra overhead of copying large aggregates out of even larger
2297 aggregates into temporaries only to copy the temporaries to
2298 the argument list. Make optimizers happy by pulling out to
2299 temporaries those types that fit in registers. */
2300 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2301 test = is_gimple_val, fb = fb_rvalue;
2304 test = is_gimple_lvalue, fb = fb_either;
2305 /* Also strip a TARGET_EXPR that would force an extra copy. */
2306 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2308 tree init = TARGET_EXPR_INITIAL (*arg_p);
2310 && !VOID_TYPE_P (TREE_TYPE (init)))
2315 /* If this is a variable sized type, we must remember the size. */
2316 maybe_with_size_expr (arg_p);
2318 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2319 /* Make sure arguments have the same location as the function call
2321 protected_set_expr_location (*arg_p, call_location);
2323 /* There is a sequence point before a function call. Side effects in
2324 the argument list must occur before the actual call. So, when
2325 gimplifying arguments, force gimplify_expr to use an internal
2326 post queue which is then appended to the end of PRE_P. */
2327 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2330 /* Don't fold inside offloading or taskreg regions: it can break code by
2331 adding decl references that weren't in the source. We'll do it during
2332 omplower pass instead. */
2335 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2337 struct gimplify_omp_ctx *ctx;
2338 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2339 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2341 return fold_stmt (gsi);
2344 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2345 WANT_VALUE is true if the result of the call is desired. */
2347 static enum gimplify_status
2348 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2350 tree fndecl, parms, p, fnptrtype;
2351 enum gimplify_status ret;
2354 bool builtin_va_start_p = false;
2355 location_t loc = EXPR_LOCATION (*expr_p);
2357 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2359 /* For reliable diagnostics during inlining, it is necessary that
2360 every call_expr be annotated with file and line. */
2361 if (! EXPR_HAS_LOCATION (*expr_p))
2362 SET_EXPR_LOCATION (*expr_p, input_location);
2364 /* Gimplify internal functions created in the FEs. */
2365 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2370 nargs = call_expr_nargs (*expr_p);
2371 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2372 auto_vec<tree> vargs (nargs);
2374 for (i = 0; i < nargs; i++)
2376 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2377 EXPR_LOCATION (*expr_p));
2378 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2380 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2381 gimplify_seq_add_stmt (pre_p, call);
2385 /* This may be a call to a builtin function.
2387 Builtin function calls may be transformed into different
2388 (and more efficient) builtin function calls under certain
2389 circumstances. Unfortunately, gimplification can muck things
2390 up enough that the builtin expanders are not aware that certain
2391 transformations are still valid.
2393 So we attempt transformation/gimplification of the call before
2394 we gimplify the CALL_EXPR. At this time we do not manage to
2395 transform all calls in the same manner as the expanders do, but
2396 we do transform most of them. */
2397 fndecl = get_callee_fndecl (*expr_p);
2399 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2400 switch (DECL_FUNCTION_CODE (fndecl))
2402 case BUILT_IN_ALLOCA:
2403 case BUILT_IN_ALLOCA_WITH_ALIGN:
2404 /* If the call has been built for a variable-sized object, then we
2405 want to restore the stack level when the enclosing BIND_EXPR is
2406 exited to reclaim the allocated space; otherwise, we precisely
2407 need to do the opposite and preserve the latest stack level. */
2408 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2409 gimplify_ctxp->save_stack = true;
2411 gimplify_ctxp->keep_stack = true;
2414 case BUILT_IN_VA_START:
2416 builtin_va_start_p = TRUE;
2417 if (call_expr_nargs (*expr_p) < 2)
2419 error ("too few arguments to function %<va_start%>");
2420 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2424 if (fold_builtin_next_arg (*expr_p, true))
2426 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2433 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2434 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2439 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2440 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2443 case BUILT_IN_FUNCTION:
2445 const char *function;
2446 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2447 *expr_p = build_string_literal (strlen (function) + 1, function);
2453 if (fndecl && DECL_BUILT_IN (fndecl))
2455 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2456 if (new_tree && new_tree != *expr_p)
2458 /* There was a transformation of this call which computes the
2459 same value, but in a more efficient way. Return and try
2466 /* Remember the original function pointer type. */
2467 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2469 /* There is a sequence point before the call, so any side effects in
2470 the calling expression must occur before the actual call. Force
2471 gimplify_expr to use an internal post queue. */
2472 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2473 is_gimple_call_addr, fb_rvalue);
2475 nargs = call_expr_nargs (*expr_p);
2477 /* Get argument types for verification. */
2478 fndecl = get_callee_fndecl (*expr_p);
2481 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2483 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2485 if (fndecl && DECL_ARGUMENTS (fndecl))
2486 p = DECL_ARGUMENTS (fndecl);
2491 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2494 /* If the last argument is __builtin_va_arg_pack () and it is not
2495 passed as a named argument, decrease the number of CALL_EXPR
2496 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2499 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2501 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2502 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2505 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2506 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2507 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2509 tree call = *expr_p;
2512 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2513 CALL_EXPR_FN (call),
2514 nargs, CALL_EXPR_ARGP (call));
2516 /* Copy all CALL_EXPR flags, location and block, except
2517 CALL_EXPR_VA_ARG_PACK flag. */
2518 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2519 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2520 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2521 = CALL_EXPR_RETURN_SLOT_OPT (call);
2522 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2523 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2525 /* Set CALL_EXPR_VA_ARG_PACK. */
2526 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2530 /* Gimplify the function arguments. */
2533 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2534 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2535 PUSH_ARGS_REVERSED ? i-- : i++)
2537 enum gimplify_status t;
2539 /* Avoid gimplifying the second argument to va_start, which needs to
2540 be the plain PARM_DECL. */
2541 if ((i != 1) || !builtin_va_start_p)
2543 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2544 EXPR_LOCATION (*expr_p));
2552 /* Gimplify the static chain. */
2553 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2555 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2556 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2559 enum gimplify_status t;
2560 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2561 EXPR_LOCATION (*expr_p));
2567 /* Verify the function result. */
2568 if (want_value && fndecl
2569 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2571 error_at (loc, "using result of function returning %<void%>");
2575 /* Try this again in case gimplification exposed something. */
2576 if (ret != GS_ERROR)
2578 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2580 if (new_tree && new_tree != *expr_p)
2582 /* There was a transformation of this call which computes the
2583 same value, but in a more efficient way. Return and try
2591 *expr_p = error_mark_node;
2595 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2596 decl. This allows us to eliminate redundant or useless
2597 calls to "const" functions. */
2598 if (TREE_CODE (*expr_p) == CALL_EXPR)
2600 int flags = call_expr_flags (*expr_p);
2601 if (flags & (ECF_CONST | ECF_PURE)
2602 /* An infinite loop is considered a side effect. */
2603 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2604 TREE_SIDE_EFFECTS (*expr_p) = 0;
2607 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2608 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2609 form and delegate the creation of a GIMPLE_CALL to
2610 gimplify_modify_expr. This is always possible because when
2611 WANT_VALUE is true, the caller wants the result of this call into
2612 a temporary, which means that we will emit an INIT_EXPR in
2613 internal_get_tmp_var which will then be handled by
2614 gimplify_modify_expr. */
2617 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2618 have to do is replicate it as a GIMPLE_CALL tuple. */
2619 gimple_stmt_iterator gsi;
2620 call = gimple_build_call_from_tree (*expr_p);
2621 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2622 notice_special_calls (call);
2623 gimplify_seq_add_stmt (pre_p, call);
2624 gsi = gsi_last (*pre_p);
2625 maybe_fold_stmt (&gsi);
2626 *expr_p = NULL_TREE;
2629 /* Remember the original function type. */
2630 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2631 CALL_EXPR_FN (*expr_p));
2636 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2637 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2639 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2640 condition is true or false, respectively. If null, we should generate
2641 our own to skip over the evaluation of this specific expression.
2643 LOCUS is the source location of the COND_EXPR.
2645 This function is the tree equivalent of do_jump.
2647 shortcut_cond_r should only be called by shortcut_cond_expr. */
2650 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2653 tree local_label = NULL_TREE;
2654 tree t, expr = NULL;
2656 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2657 retain the shortcut semantics. Just insert the gotos here;
2658 shortcut_cond_expr will append the real blocks later. */
2659 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2661 location_t new_locus;
2663 /* Turn if (a && b) into
2665 if (a); else goto no;
2666 if (b) goto yes; else goto no;
2669 if (false_label_p == NULL)
2670 false_label_p = &local_label;
2672 /* Keep the original source location on the first 'if'. */
2673 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2674 append_to_statement_list (t, &expr);
2676 /* Set the source location of the && on the second 'if'. */
2677 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2678 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2680 append_to_statement_list (t, &expr);
2682 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2684 location_t new_locus;
2686 /* Turn if (a || b) into
2689 if (b) goto yes; else goto no;
2692 if (true_label_p == NULL)
2693 true_label_p = &local_label;
2695 /* Keep the original source location on the first 'if'. */
2696 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2697 append_to_statement_list (t, &expr);
2699 /* Set the source location of the || on the second 'if'. */
2700 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2701 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2703 append_to_statement_list (t, &expr);
2705 else if (TREE_CODE (pred) == COND_EXPR
2706 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2707 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2709 location_t new_locus;
2711 /* As long as we're messing with gotos, turn if (a ? b : c) into
2713 if (b) goto yes; else goto no;
2715 if (c) goto yes; else goto no;
2717 Don't do this if one of the arms has void type, which can happen
2718 in C++ when the arm is throw. */
2720 /* Keep the original source location on the first 'if'. Set the source
2721 location of the ? on the second 'if'. */
2722 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2723 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2724 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2725 false_label_p, locus),
2726 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2727 false_label_p, new_locus));
2731 expr = build3 (COND_EXPR, void_type_node, pred,
2732 build_and_jump (true_label_p),
2733 build_and_jump (false_label_p));
2734 SET_EXPR_LOCATION (expr, locus);
2739 t = build1 (LABEL_EXPR, void_type_node, local_label);
2740 append_to_statement_list (t, &expr);
2746 /* Given a conditional expression EXPR with short-circuit boolean
2747 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2748 predicate apart into the equivalent sequence of conditionals. */
2751 shortcut_cond_expr (tree expr)
2753 tree pred = TREE_OPERAND (expr, 0);
2754 tree then_ = TREE_OPERAND (expr, 1);
2755 tree else_ = TREE_OPERAND (expr, 2);
2756 tree true_label, false_label, end_label, t;
2758 tree *false_label_p;
2759 bool emit_end, emit_false, jump_over_else;
2760 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2761 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2763 /* First do simple transformations. */
2766 /* If there is no 'else', turn
2769 if (a) if (b) then c. */
2770 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2772 /* Keep the original source location on the first 'if'. */
2773 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2774 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2775 /* Set the source location of the && on the second 'if'. */
2776 if (EXPR_HAS_LOCATION (pred))
2777 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2778 then_ = shortcut_cond_expr (expr);
2779 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2780 pred = TREE_OPERAND (pred, 0);
2781 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2782 SET_EXPR_LOCATION (expr, locus);
2788 /* If there is no 'then', turn
2791 if (a); else if (b); else d. */
2792 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2794 /* Keep the original source location on the first 'if'. */
2795 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2796 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2797 /* Set the source location of the || on the second 'if'. */
2798 if (EXPR_HAS_LOCATION (pred))
2799 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2800 else_ = shortcut_cond_expr (expr);
2801 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2802 pred = TREE_OPERAND (pred, 0);
2803 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2804 SET_EXPR_LOCATION (expr, locus);
2808 /* If we're done, great. */
2809 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2810 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2813 /* Otherwise we need to mess with gotos. Change
2816 if (a); else goto no;
2819 and recursively gimplify the condition. */
2821 true_label = false_label = end_label = NULL_TREE;
2823 /* If our arms just jump somewhere, hijack those labels so we don't
2824 generate jumps to jumps. */
2827 && TREE_CODE (then_) == GOTO_EXPR
2828 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2830 true_label = GOTO_DESTINATION (then_);
2836 && TREE_CODE (else_) == GOTO_EXPR
2837 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2839 false_label = GOTO_DESTINATION (else_);
2844 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2846 true_label_p = &true_label;
2848 true_label_p = NULL;
2850 /* The 'else' branch also needs a label if it contains interesting code. */
2851 if (false_label || else_se)
2852 false_label_p = &false_label;
2854 false_label_p = NULL;
2856 /* If there was nothing else in our arms, just forward the label(s). */
2857 if (!then_se && !else_se)
2858 return shortcut_cond_r (pred, true_label_p, false_label_p,
2859 EXPR_LOC_OR_LOC (expr, input_location));
2861 /* If our last subexpression already has a terminal label, reuse it. */
2863 t = expr_last (else_);
2865 t = expr_last (then_);
2868 if (t && TREE_CODE (t) == LABEL_EXPR)
2869 end_label = LABEL_EXPR_LABEL (t);
2871 /* If we don't care about jumping to the 'else' branch, jump to the end
2872 if the condition is false. */
2874 false_label_p = &end_label;
2876 /* We only want to emit these labels if we aren't hijacking them. */
2877 emit_end = (end_label == NULL_TREE);
2878 emit_false = (false_label == NULL_TREE);
2880 /* We only emit the jump over the else clause if we have to--if the
2881 then clause may fall through. Otherwise we can wind up with a
2882 useless jump and a useless label at the end of gimplified code,
2883 which will cause us to think that this conditional as a whole
2884 falls through even if it doesn't. If we then inline a function
2885 which ends with such a condition, that can cause us to issue an
2886 inappropriate warning about control reaching the end of a
2887 non-void function. */
2888 jump_over_else = block_may_fallthru (then_);
2890 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2891 EXPR_LOC_OR_LOC (expr, input_location));
2894 append_to_statement_list (pred, &expr);
2896 append_to_statement_list (then_, &expr);
2901 tree last = expr_last (expr);
2902 t = build_and_jump (&end_label);
2903 if (EXPR_HAS_LOCATION (last))
2904 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2905 append_to_statement_list (t, &expr);
2909 t = build1 (LABEL_EXPR, void_type_node, false_label);
2910 append_to_statement_list (t, &expr);
2912 append_to_statement_list (else_, &expr);
2914 if (emit_end && end_label)
2916 t = build1 (LABEL_EXPR, void_type_node, end_label);
2917 append_to_statement_list (t, &expr);
2923 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2926 gimple_boolify (tree expr)
2928 tree type = TREE_TYPE (expr);
2929 location_t loc = EXPR_LOCATION (expr);
2931 if (TREE_CODE (expr) == NE_EXPR
2932 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2933 && integer_zerop (TREE_OPERAND (expr, 1)))
2935 tree call = TREE_OPERAND (expr, 0);
2936 tree fn = get_callee_fndecl (call);
2938 /* For __builtin_expect ((long) (x), y) recurse into x as well
2939 if x is truth_value_p. */
2941 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2942 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2943 && call_expr_nargs (call) == 2)
2945 tree arg = CALL_EXPR_ARG (call, 0);
2948 if (TREE_CODE (arg) == NOP_EXPR
2949 && TREE_TYPE (arg) == TREE_TYPE (call))
2950 arg = TREE_OPERAND (arg, 0);
2951 if (truth_value_p (TREE_CODE (arg)))
2953 arg = gimple_boolify (arg);
2954 CALL_EXPR_ARG (call, 0)
2955 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2961 switch (TREE_CODE (expr))
2963 case TRUTH_AND_EXPR:
2965 case TRUTH_XOR_EXPR:
2966 case TRUTH_ANDIF_EXPR:
2967 case TRUTH_ORIF_EXPR:
2968 /* Also boolify the arguments of truth exprs. */
2969 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2972 case TRUTH_NOT_EXPR:
2973 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2975 /* These expressions always produce boolean results. */
2976 if (TREE_CODE (type) != BOOLEAN_TYPE)
2977 TREE_TYPE (expr) = boolean_type_node;
2981 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2983 case annot_expr_ivdep_kind:
2984 case annot_expr_no_vector_kind:
2985 case annot_expr_vector_kind:
2986 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2987 if (TREE_CODE (type) != BOOLEAN_TYPE)
2988 TREE_TYPE (expr) = boolean_type_node;
2995 if (COMPARISON_CLASS_P (expr))
2997 /* There expressions always prduce boolean results. */
2998 if (TREE_CODE (type) != BOOLEAN_TYPE)
2999 TREE_TYPE (expr) = boolean_type_node;
3002 /* Other expressions that get here must have boolean values, but
3003 might need to be converted to the appropriate mode. */
3004 if (TREE_CODE (type) == BOOLEAN_TYPE)
3006 return fold_convert_loc (loc, boolean_type_node, expr);
3010 /* Given a conditional expression *EXPR_P without side effects, gimplify
3011 its operands. New statements are inserted to PRE_P. */
3013 static enum gimplify_status
3014 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3016 tree expr = *expr_p, cond;
3017 enum gimplify_status ret, tret;
3018 enum tree_code code;
3020 cond = gimple_boolify (COND_EXPR_COND (expr));
3022 /* We need to handle && and || specially, as their gimplification
3023 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3024 code = TREE_CODE (cond);
3025 if (code == TRUTH_ANDIF_EXPR)
3026 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3027 else if (code == TRUTH_ORIF_EXPR)
3028 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3029 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3030 COND_EXPR_COND (*expr_p) = cond;
3032 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3033 is_gimple_val, fb_rvalue);
3034 ret = MIN (ret, tret);
3035 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3036 is_gimple_val, fb_rvalue);
3038 return MIN (ret, tret);
3041 /* Return true if evaluating EXPR could trap.
3042 EXPR is GENERIC, while tree_could_trap_p can be called
3046 generic_expr_could_trap_p (tree expr)
3050 if (!expr || is_gimple_val (expr))
3053 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3056 n = TREE_OPERAND_LENGTH (expr);
3057 for (i = 0; i < n; i++)
3058 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3064 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3073 The second form is used when *EXPR_P is of type void.
3075 PRE_P points to the list where side effects that must happen before
3076 *EXPR_P should be stored. */
3078 static enum gimplify_status
3079 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3081 tree expr = *expr_p;
3082 tree type = TREE_TYPE (expr);
3083 location_t loc = EXPR_LOCATION (expr);
3084 tree tmp, arm1, arm2;
3085 enum gimplify_status ret;
3086 tree label_true, label_false, label_cont;
3087 bool have_then_clause_p, have_else_clause_p;
3089 enum tree_code pred_code;
3090 gimple_seq seq = NULL;
3092 /* If this COND_EXPR has a value, copy the values into a temporary within
3094 if (!VOID_TYPE_P (type))
3096 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3099 /* If either an rvalue is ok or we do not require an lvalue, create the
3100 temporary. But we cannot do that if the type is addressable. */
3101 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3102 && !TREE_ADDRESSABLE (type))
3104 if (gimplify_ctxp->allow_rhs_cond_expr
3105 /* If either branch has side effects or could trap, it can't be
3106 evaluated unconditionally. */
3107 && !TREE_SIDE_EFFECTS (then_)
3108 && !generic_expr_could_trap_p (then_)
3109 && !TREE_SIDE_EFFECTS (else_)
3110 && !generic_expr_could_trap_p (else_))
3111 return gimplify_pure_cond_expr (expr_p, pre_p);
3113 tmp = create_tmp_var (type, "iftmp");
3117 /* Otherwise, only create and copy references to the values. */
3120 type = build_pointer_type (type);
3122 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3123 then_ = build_fold_addr_expr_loc (loc, then_);
3125 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3126 else_ = build_fold_addr_expr_loc (loc, else_);
3129 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3131 tmp = create_tmp_var (type, "iftmp");
3132 result = build_simple_mem_ref_loc (loc, tmp);
3135 /* Build the new then clause, `tmp = then_;'. But don't build the
3136 assignment if the value is void; in C++ it can be if it's a throw. */
3137 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3138 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3140 /* Similarly, build the new else clause, `tmp = else_;'. */
3141 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3142 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3144 TREE_TYPE (expr) = void_type_node;
3145 recalculate_side_effects (expr);
3147 /* Move the COND_EXPR to the prequeue. */
3148 gimplify_stmt (&expr, pre_p);
3154 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3155 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3156 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3157 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3159 /* Make sure the condition has BOOLEAN_TYPE. */
3160 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3162 /* Break apart && and || conditions. */
3163 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3164 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3166 expr = shortcut_cond_expr (expr);
3168 if (expr != *expr_p)
3172 /* We can't rely on gimplify_expr to re-gimplify the expanded
3173 form properly, as cleanups might cause the target labels to be
3174 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3175 set up a conditional context. */
3176 gimple_push_condition ();
3177 gimplify_stmt (expr_p, &seq);
3178 gimple_pop_condition (pre_p);
3179 gimple_seq_add_seq (pre_p, seq);
3185 /* Now do the normal gimplification. */
3187 /* Gimplify condition. */
3188 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3190 if (ret == GS_ERROR)
3192 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3194 gimple_push_condition ();
3196 have_then_clause_p = have_else_clause_p = false;
3197 if (TREE_OPERAND (expr, 1) != NULL
3198 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3199 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3200 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3201 == current_function_decl)
3202 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3203 have different locations, otherwise we end up with incorrect
3204 location information on the branches. */
3206 || !EXPR_HAS_LOCATION (expr)
3207 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3208 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3210 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3211 have_then_clause_p = true;
3214 label_true = create_artificial_label (UNKNOWN_LOCATION);
3215 if (TREE_OPERAND (expr, 2) != NULL
3216 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3217 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3218 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3219 == current_function_decl)
3220 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3221 have different locations, otherwise we end up with incorrect
3222 location information on the branches. */
3224 || !EXPR_HAS_LOCATION (expr)
3225 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3226 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3228 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3229 have_else_clause_p = true;
3232 label_false = create_artificial_label (UNKNOWN_LOCATION);
3234 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3236 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3238 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3239 gimplify_seq_add_stmt (&seq, cond_stmt);
3240 gimple_stmt_iterator gsi = gsi_last (seq);
3241 maybe_fold_stmt (&gsi);
3243 label_cont = NULL_TREE;
3244 if (!have_then_clause_p)
3246 /* For if (...) {} else { code; } put label_true after
3248 if (TREE_OPERAND (expr, 1) == NULL_TREE
3249 && !have_else_clause_p
3250 && TREE_OPERAND (expr, 2) != NULL_TREE)
3251 label_cont = label_true;
3254 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3255 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3256 /* For if (...) { code; } else {} or
3257 if (...) { code; } else goto label; or
3258 if (...) { code; return; } else { ... }
3259 label_cont isn't needed. */
3260 if (!have_else_clause_p
3261 && TREE_OPERAND (expr, 2) != NULL_TREE
3262 && gimple_seq_may_fallthru (seq))
3265 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3267 g = gimple_build_goto (label_cont);
3269 /* GIMPLE_COND's are very low level; they have embedded
3270 gotos. This particular embedded goto should not be marked
3271 with the location of the original COND_EXPR, as it would
3272 correspond to the COND_EXPR's condition, not the ELSE or the
3273 THEN arms. To avoid marking it with the wrong location, flag
3274 it as "no location". */
3275 gimple_set_do_not_emit_location (g);
3277 gimplify_seq_add_stmt (&seq, g);
3281 if (!have_else_clause_p)
3283 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3284 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3287 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3289 gimple_pop_condition (pre_p);
3290 gimple_seq_add_seq (pre_p, seq);
3292 if (ret == GS_ERROR)
3294 else if (have_then_clause_p || have_else_clause_p)
3298 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3299 expr = TREE_OPERAND (expr, 0);
3300 gimplify_stmt (&expr, pre_p);
3307 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3308 to be marked addressable.
3310 We cannot rely on such an expression being directly markable if a temporary
3311 has been created by the gimplification. In this case, we create another
3312 temporary and initialize it with a copy, which will become a store after we
3313 mark it addressable. This can happen if the front-end passed us something
3314 that it could not mark addressable yet, like a Fortran pass-by-reference
3315 parameter (int) floatvar. */
3318 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3320 while (handled_component_p (*expr_p))
3321 expr_p = &TREE_OPERAND (*expr_p, 0);
3322 if (is_gimple_reg (*expr_p))
3324 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3325 DECL_GIMPLE_REG_P (var) = 0;
3330 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3331 a call to __builtin_memcpy. */
3333 static enum gimplify_status
3334 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3337 tree t, to, to_ptr, from, from_ptr;
3339 location_t loc = EXPR_LOCATION (*expr_p);
3341 to = TREE_OPERAND (*expr_p, 0);
3342 from = TREE_OPERAND (*expr_p, 1);
3344 /* Mark the RHS addressable. Beware that it may not be possible to do so
3345 directly if a temporary has been created by the gimplification. */
3346 prepare_gimple_addressable (&from, seq_p);
3348 mark_addressable (from);
3349 from_ptr = build_fold_addr_expr_loc (loc, from);
3350 gimplify_arg (&from_ptr, seq_p, loc);
3352 mark_addressable (to);
3353 to_ptr = build_fold_addr_expr_loc (loc, to);
3354 gimplify_arg (&to_ptr, seq_p, loc);
3356 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3358 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3362 /* tmp = memcpy() */
3363 t = create_tmp_var (TREE_TYPE (to_ptr));
3364 gimple_call_set_lhs (gs, t);
3365 gimplify_seq_add_stmt (seq_p, gs);
3367 *expr_p = build_simple_mem_ref (t);
3371 gimplify_seq_add_stmt (seq_p, gs);
3376 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3377 a call to __builtin_memset. In this case we know that the RHS is
3378 a CONSTRUCTOR with an empty element list. */
3380 static enum gimplify_status
3381 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3384 tree t, from, to, to_ptr;
3386 location_t loc = EXPR_LOCATION (*expr_p);
3388 /* Assert our assumptions, to abort instead of producing wrong code
3389 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3390 not be immediately exposed. */
3391 from = TREE_OPERAND (*expr_p, 1);
3392 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3393 from = TREE_OPERAND (from, 0);
3395 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3396 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3399 to = TREE_OPERAND (*expr_p, 0);
3401 to_ptr = build_fold_addr_expr_loc (loc, to);
3402 gimplify_arg (&to_ptr, seq_p, loc);
3403 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3405 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3409 /* tmp = memset() */
3410 t = create_tmp_var (TREE_TYPE (to_ptr));
3411 gimple_call_set_lhs (gs, t);
3412 gimplify_seq_add_stmt (seq_p, gs);
3414 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3418 gimplify_seq_add_stmt (seq_p, gs);
3423 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3424 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3425 assignment. Return non-null if we detect a potential overlap. */
3427 struct gimplify_init_ctor_preeval_data
3429 /* The base decl of the lhs object. May be NULL, in which case we
3430 have to assume the lhs is indirect. */
3433 /* The alias set of the lhs object. */
3434 alias_set_type lhs_alias_set;
3438 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3440 struct gimplify_init_ctor_preeval_data *data
3441 = (struct gimplify_init_ctor_preeval_data *) xdata;
3444 /* If we find the base object, obviously we have overlap. */
3445 if (data->lhs_base_decl == t)
3448 /* If the constructor component is indirect, determine if we have a
3449 potential overlap with the lhs. The only bits of information we
3450 have to go on at this point are addressability and alias sets. */
3451 if ((INDIRECT_REF_P (t)
3452 || TREE_CODE (t) == MEM_REF)
3453 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3454 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3457 /* If the constructor component is a call, determine if it can hide a
3458 potential overlap with the lhs through an INDIRECT_REF like above.
3459 ??? Ugh - this is completely broken. In fact this whole analysis
3460 doesn't look conservative. */
3461 if (TREE_CODE (t) == CALL_EXPR)
3463 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3465 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3466 if (POINTER_TYPE_P (TREE_VALUE (type))
3467 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3468 && alias_sets_conflict_p (data->lhs_alias_set,
3470 (TREE_TYPE (TREE_VALUE (type)))))
3474 if (IS_TYPE_OR_DECL_P (t))
3479 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3480 force values that overlap with the lhs (as described by *DATA)
3481 into temporaries. */
3484 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3485 struct gimplify_init_ctor_preeval_data *data)
3487 enum gimplify_status one;
3489 /* If the value is constant, then there's nothing to pre-evaluate. */
3490 if (TREE_CONSTANT (*expr_p))
3492 /* Ensure it does not have side effects, it might contain a reference to
3493 the object we're initializing. */
3494 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3498 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3499 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3502 /* Recurse for nested constructors. */
3503 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3505 unsigned HOST_WIDE_INT ix;
3506 constructor_elt *ce;
3507 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3509 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3510 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3515 /* If this is a variable sized type, we must remember the size. */
3516 maybe_with_size_expr (expr_p);
3518 /* Gimplify the constructor element to something appropriate for the rhs
3519 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3520 the gimplifier will consider this a store to memory. Doing this
3521 gimplification now means that we won't have to deal with complicated
3522 language-specific trees, nor trees like SAVE_EXPR that can induce
3523 exponential search behavior. */
3524 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3525 if (one == GS_ERROR)
3531 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3532 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3533 always be true for all scalars, since is_gimple_mem_rhs insists on a
3534 temporary variable for them. */
3535 if (DECL_P (*expr_p))
3538 /* If this is of variable size, we have no choice but to assume it doesn't
3539 overlap since we can't make a temporary for it. */
3540 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3543 /* Otherwise, we must search for overlap ... */
3544 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3547 /* ... and if found, force the value into a temporary. */
3548 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3551 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3552 a RANGE_EXPR in a CONSTRUCTOR for an array.
3556 object[var] = value;
3563 We increment var _after_ the loop exit check because we might otherwise
3564 fail if upper == TYPE_MAX_VALUE (type for upper).
3566 Note that we never have to deal with SAVE_EXPRs here, because this has
3567 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3569 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3570 gimple_seq *, bool);
3573 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3574 tree value, tree array_elt_type,
3575 gimple_seq *pre_p, bool cleared)
3577 tree loop_entry_label, loop_exit_label, fall_thru_label;
3578 tree var, var_type, cref, tmp;
3580 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3581 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3582 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3584 /* Create and initialize the index variable. */
3585 var_type = TREE_TYPE (upper);
3586 var = create_tmp_var (var_type);
3587 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3589 /* Add the loop entry label. */
3590 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3592 /* Build the reference. */
3593 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3594 var, NULL_TREE, NULL_TREE);
3596 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3597 the store. Otherwise just assign value to the reference. */
3599 if (TREE_CODE (value) == CONSTRUCTOR)
3600 /* NB we might have to call ourself recursively through
3601 gimplify_init_ctor_eval if the value is a constructor. */
3602 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3605 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3607 /* We exit the loop when the index var is equal to the upper bound. */
3608 gimplify_seq_add_stmt (pre_p,
3609 gimple_build_cond (EQ_EXPR, var, upper,
3610 loop_exit_label, fall_thru_label));
3612 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3614 /* Otherwise, increment the index var... */
3615 tmp = build2 (PLUS_EXPR, var_type, var,
3616 fold_convert (var_type, integer_one_node));
3617 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3619 /* ...and jump back to the loop entry. */
3620 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3622 /* Add the loop exit label. */
3623 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3626 /* Return true if FDECL is accessing a field that is zero sized. */
3629 zero_sized_field_decl (const_tree fdecl)
3631 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3632 && integer_zerop (DECL_SIZE (fdecl)))
3637 /* Return true if TYPE is zero sized. */
3640 zero_sized_type (const_tree type)
3642 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3643 && integer_zerop (TYPE_SIZE (type)))
3648 /* A subroutine of gimplify_init_constructor. Generate individual
3649 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3650 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3651 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3655 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3656 gimple_seq *pre_p, bool cleared)
3658 tree array_elt_type = NULL;
3659 unsigned HOST_WIDE_INT ix;
3660 tree purpose, value;
3662 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3663 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3665 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3669 /* NULL values are created above for gimplification errors. */
3673 if (cleared && initializer_zerop (value))
3676 /* ??? Here's to hoping the front end fills in all of the indices,
3677 so we don't have to figure out what's missing ourselves. */
3678 gcc_assert (purpose);
3680 /* Skip zero-sized fields, unless value has side-effects. This can
3681 happen with calls to functions returning a zero-sized type, which
3682 we shouldn't discard. As a number of downstream passes don't
3683 expect sets of zero-sized fields, we rely on the gimplification of
3684 the MODIFY_EXPR we make below to drop the assignment statement. */
3685 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3688 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3690 if (TREE_CODE (purpose) == RANGE_EXPR)
3692 tree lower = TREE_OPERAND (purpose, 0);
3693 tree upper = TREE_OPERAND (purpose, 1);
3695 /* If the lower bound is equal to upper, just treat it as if
3696 upper was the index. */
3697 if (simple_cst_equal (lower, upper))
3701 gimplify_init_ctor_eval_range (object, lower, upper, value,
3702 array_elt_type, pre_p, cleared);
3709 /* Do not use bitsizetype for ARRAY_REF indices. */
3710 if (TYPE_DOMAIN (TREE_TYPE (object)))
3712 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3714 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3715 purpose, NULL_TREE, NULL_TREE);
3719 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3720 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3721 unshare_expr (object), purpose, NULL_TREE);
3724 if (TREE_CODE (value) == CONSTRUCTOR
3725 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3726 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3730 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3731 gimplify_and_add (init, pre_p);
3737 /* Return the appropriate RHS predicate for this LHS. */
3740 rhs_predicate_for (tree lhs)
3742 if (is_gimple_reg (lhs))
3743 return is_gimple_reg_rhs_or_call;
3745 return is_gimple_mem_rhs_or_call;
3748 /* Gimplify a C99 compound literal expression. This just means adding
3749 the DECL_EXPR before the current statement and using its anonymous
3752 static enum gimplify_status
3753 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3754 bool (*gimple_test_f) (tree),
3755 fallback_t fallback)
3757 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3758 tree decl = DECL_EXPR_DECL (decl_s);
3759 tree init = DECL_INITIAL (decl);
3760 /* Mark the decl as addressable if the compound literal
3761 expression is addressable now, otherwise it is marked too late
3762 after we gimplify the initialization expression. */
3763 if (TREE_ADDRESSABLE (*expr_p))
3764 TREE_ADDRESSABLE (decl) = 1;
3765 /* Otherwise, if we don't need an lvalue and have a literal directly
3766 substitute it. Check if it matches the gimple predicate, as
3767 otherwise we'd generate a new temporary, and we can as well just
3768 use the decl we already have. */
3769 else if (!TREE_ADDRESSABLE (decl)
3771 && (fallback & fb_lvalue) == 0
3772 && gimple_test_f (init))
3778 /* Preliminarily mark non-addressed complex variables as eligible
3779 for promotion to gimple registers. We'll transform their uses
3781 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3782 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3783 && !TREE_THIS_VOLATILE (decl)
3784 && !needs_to_live_in_memory (decl))
3785 DECL_GIMPLE_REG_P (decl) = 1;
3787 /* If the decl is not addressable, then it is being used in some
3788 expression or on the right hand side of a statement, and it can
3789 be put into a readonly data section. */
3790 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3791 TREE_READONLY (decl) = 1;
3793 /* This decl isn't mentioned in the enclosing block, so add it to the
3794 list of temps. FIXME it seems a bit of a kludge to say that
3795 anonymous artificial vars aren't pushed, but everything else is. */
3796 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3797 gimple_add_tmp_var (decl);
3799 gimplify_and_add (decl_s, pre_p);
3804 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3805 return a new CONSTRUCTOR if something changed. */
3808 optimize_compound_literals_in_ctor (tree orig_ctor)
3810 tree ctor = orig_ctor;
3811 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3812 unsigned int idx, num = vec_safe_length (elts);
3814 for (idx = 0; idx < num; idx++)
3816 tree value = (*elts)[idx].value;
3817 tree newval = value;
3818 if (TREE_CODE (value) == CONSTRUCTOR)
3819 newval = optimize_compound_literals_in_ctor (value);
3820 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3822 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3823 tree decl = DECL_EXPR_DECL (decl_s);
3824 tree init = DECL_INITIAL (decl);
3826 if (!TREE_ADDRESSABLE (value)
3827 && !TREE_ADDRESSABLE (decl)
3829 && TREE_CODE (init) == CONSTRUCTOR)
3830 newval = optimize_compound_literals_in_ctor (init);
3832 if (newval == value)
3835 if (ctor == orig_ctor)
3837 ctor = copy_node (orig_ctor);
3838 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3839 elts = CONSTRUCTOR_ELTS (ctor);
3841 (*elts)[idx].value = newval;
3846 /* A subroutine of gimplify_modify_expr. Break out elements of a
3847 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3849 Note that we still need to clear any elements that don't have explicit
3850 initializers, so if not all elements are initialized we keep the
3851 original MODIFY_EXPR, we just remove all of the constructor elements.
3853 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3854 GS_ERROR if we would have to create a temporary when gimplifying
3855 this constructor. Otherwise, return GS_OK.
3857 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3859 static enum gimplify_status
3860 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3861 bool want_value, bool notify_temp_creation)
3863 tree object, ctor, type;
3864 enum gimplify_status ret;
3865 vec<constructor_elt, va_gc> *elts;
3867 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3869 if (!notify_temp_creation)
3871 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3872 is_gimple_lvalue, fb_lvalue);
3873 if (ret == GS_ERROR)
3877 object = TREE_OPERAND (*expr_p, 0);
3878 ctor = TREE_OPERAND (*expr_p, 1) =
3879 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3880 type = TREE_TYPE (ctor);
3881 elts = CONSTRUCTOR_ELTS (ctor);
3884 switch (TREE_CODE (type))
3888 case QUAL_UNION_TYPE:
3891 struct gimplify_init_ctor_preeval_data preeval_data;
3892 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3893 bool cleared, complete_p, valid_const_initializer;
3895 /* Aggregate types must lower constructors to initialization of
3896 individual elements. The exception is that a CONSTRUCTOR node
3897 with no elements indicates zero-initialization of the whole. */
3898 if (vec_safe_is_empty (elts))
3900 if (notify_temp_creation)
3905 /* Fetch information about the constructor to direct later processing.
3906 We might want to make static versions of it in various cases, and
3907 can only do so if it known to be a valid constant initializer. */
3908 valid_const_initializer
3909 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3910 &num_ctor_elements, &complete_p);
3912 /* If a const aggregate variable is being initialized, then it
3913 should never be a lose to promote the variable to be static. */
3914 if (valid_const_initializer
3915 && num_nonzero_elements > 1
3916 && TREE_READONLY (object)
3917 && TREE_CODE (object) == VAR_DECL
3918 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3920 if (notify_temp_creation)
3922 DECL_INITIAL (object) = ctor;
3923 TREE_STATIC (object) = 1;
3924 if (!DECL_NAME (object))
3925 DECL_NAME (object) = create_tmp_var_name ("C");
3926 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3928 /* ??? C++ doesn't automatically append a .<number> to the
3929 assembler name, and even when it does, it looks at FE private
3930 data structures to figure out what that number should be,
3931 which are not set for this variable. I suppose this is
3932 important for local statics for inline functions, which aren't
3933 "local" in the object file sense. So in order to get a unique
3934 TU-local symbol, we must invoke the lhd version now. */
3935 lhd_set_decl_assembler_name (object);
3937 *expr_p = NULL_TREE;
3941 /* If there are "lots" of initialized elements, even discounting
3942 those that are not address constants (and thus *must* be
3943 computed at runtime), then partition the constructor into
3944 constant and non-constant parts. Block copy the constant
3945 parts in, then generate code for the non-constant parts. */
3946 /* TODO. There's code in cp/typeck.c to do this. */
3948 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3949 /* store_constructor will ignore the clearing of variable-sized
3950 objects. Initializers for such objects must explicitly set
3951 every field that needs to be set. */
3953 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3954 /* If the constructor isn't complete, clear the whole object
3955 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3957 ??? This ought not to be needed. For any element not present
3958 in the initializer, we should simply set them to zero. Except
3959 we'd need to *find* the elements that are not present, and that
3960 requires trickery to avoid quadratic compile-time behavior in
3961 large cases or excessive memory use in small cases. */
3963 else if (num_ctor_elements - num_nonzero_elements
3964 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3965 && num_nonzero_elements < num_ctor_elements / 4)
3966 /* If there are "lots" of zeros, it's more efficient to clear
3967 the memory and then set the nonzero elements. */
3972 /* If there are "lots" of initialized elements, and all of them
3973 are valid address constants, then the entire initializer can
3974 be dropped to memory, and then memcpy'd out. Don't do this
3975 for sparse arrays, though, as it's more efficient to follow
3976 the standard CONSTRUCTOR behavior of memset followed by
3977 individual element initialization. Also don't do this for small
3978 all-zero initializers (which aren't big enough to merit
3979 clearing), and don't try to make bitwise copies of
3980 TREE_ADDRESSABLE types.
3982 We cannot apply such transformation when compiling chkp static
3983 initializer because creation of initializer image in the memory
3984 will require static initialization of bounds for it. It should
3985 result in another gimplification of similar initializer and we
3986 may fall into infinite loop. */
3987 if (valid_const_initializer
3988 && !(cleared || num_nonzero_elements == 0)
3989 && !TREE_ADDRESSABLE (type)
3990 && (!current_function_decl
3991 || !lookup_attribute ("chkp ctor",
3992 DECL_ATTRIBUTES (current_function_decl))))
3994 HOST_WIDE_INT size = int_size_in_bytes (type);
3997 /* ??? We can still get unbounded array types, at least
3998 from the C++ front end. This seems wrong, but attempt
3999 to work around it for now. */
4002 size = int_size_in_bytes (TREE_TYPE (object));
4004 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4007 /* Find the maximum alignment we can assume for the object. */
4008 /* ??? Make use of DECL_OFFSET_ALIGN. */
4009 if (DECL_P (object))
4010 align = DECL_ALIGN (object);
4012 align = TYPE_ALIGN (type);
4014 /* Do a block move either if the size is so small as to make
4015 each individual move a sub-unit move on average, or if it
4016 is so large as to make individual moves inefficient. */
4018 && num_nonzero_elements > 1
4019 && (size < num_nonzero_elements
4020 || !can_move_by_pieces (size, align)))
4022 if (notify_temp_creation)
4025 walk_tree (&ctor, force_labels_r, NULL, NULL);
4026 ctor = tree_output_constant_def (ctor);
4027 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4028 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4029 TREE_OPERAND (*expr_p, 1) = ctor;
4031 /* This is no longer an assignment of a CONSTRUCTOR, but
4032 we still may have processing to do on the LHS. So
4033 pretend we didn't do anything here to let that happen. */
4034 return GS_UNHANDLED;
4038 /* If the target is volatile, we have non-zero elements and more than
4039 one field to assign, initialize the target from a temporary. */
4040 if (TREE_THIS_VOLATILE (object)
4041 && !TREE_ADDRESSABLE (type)
4042 && num_nonzero_elements > 0
4043 && vec_safe_length (elts) > 1)
4045 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4046 TREE_OPERAND (*expr_p, 0) = temp;
4047 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4049 build2 (MODIFY_EXPR, void_type_node,
4054 if (notify_temp_creation)
4057 /* If there are nonzero elements and if needed, pre-evaluate to capture
4058 elements overlapping with the lhs into temporaries. We must do this
4059 before clearing to fetch the values before they are zeroed-out. */
4060 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4062 preeval_data.lhs_base_decl = get_base_address (object);
4063 if (!DECL_P (preeval_data.lhs_base_decl))
4064 preeval_data.lhs_base_decl = NULL;
4065 preeval_data.lhs_alias_set = get_alias_set (object);
4067 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4068 pre_p, post_p, &preeval_data);
4071 bool ctor_has_side_effects_p
4072 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4076 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4077 Note that we still have to gimplify, in order to handle the
4078 case of variable sized types. Avoid shared tree structures. */
4079 CONSTRUCTOR_ELTS (ctor) = NULL;
4080 TREE_SIDE_EFFECTS (ctor) = 0;
4081 object = unshare_expr (object);
4082 gimplify_stmt (expr_p, pre_p);
4085 /* If we have not block cleared the object, or if there are nonzero
4086 elements in the constructor, or if the constructor has side effects,
4087 add assignments to the individual scalar fields of the object. */
4089 || num_nonzero_elements > 0
4090 || ctor_has_side_effects_p)
4091 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4093 *expr_p = NULL_TREE;
4101 if (notify_temp_creation)
4104 /* Extract the real and imaginary parts out of the ctor. */
4105 gcc_assert (elts->length () == 2);
4106 r = (*elts)[0].value;
4107 i = (*elts)[1].value;
4108 if (r == NULL || i == NULL)
4110 tree zero = build_zero_cst (TREE_TYPE (type));
4117 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4118 represent creation of a complex value. */
4119 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4121 ctor = build_complex (type, r, i);
4122 TREE_OPERAND (*expr_p, 1) = ctor;
4126 ctor = build2 (COMPLEX_EXPR, type, r, i);
4127 TREE_OPERAND (*expr_p, 1) = ctor;
4128 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4131 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4139 unsigned HOST_WIDE_INT ix;
4140 constructor_elt *ce;
4142 if (notify_temp_creation)
4145 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4146 if (TREE_CONSTANT (ctor))
4148 bool constant_p = true;
4151 /* Even when ctor is constant, it might contain non-*_CST
4152 elements, such as addresses or trapping values like
4153 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4154 in VECTOR_CST nodes. */
4155 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4156 if (!CONSTANT_CLASS_P (value))
4164 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4168 TREE_CONSTANT (ctor) = 0;
4171 /* Vector types use CONSTRUCTOR all the way through gimple
4172 compilation as a general initializer. */
4173 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4175 enum gimplify_status tret;
4176 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4178 if (tret == GS_ERROR)
4180 else if (TREE_STATIC (ctor)
4181 && !initializer_constant_valid_p (ce->value,
4182 TREE_TYPE (ce->value)))
4183 TREE_STATIC (ctor) = 0;
4185 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4186 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4191 /* So how did we get a CONSTRUCTOR for a scalar type? */
4195 if (ret == GS_ERROR)
4197 /* If we have gimplified both sides of the initializer but have
4198 not emitted an assignment, do so now. */
4201 tree lhs = TREE_OPERAND (*expr_p, 0);
4202 tree rhs = TREE_OPERAND (*expr_p, 1);
4203 gassign *init = gimple_build_assign (lhs, rhs);
4204 gimplify_seq_add_stmt (pre_p, init);
4218 /* Given a pointer value OP0, return a simplified version of an
4219 indirection through OP0, or NULL_TREE if no simplification is
4220 possible. This may only be applied to a rhs of an expression.
4221 Note that the resulting type may be different from the type pointed
4222 to in the sense that it is still compatible from the langhooks
4226 gimple_fold_indirect_ref_rhs (tree t)
4228 return gimple_fold_indirect_ref (t);
4231 /* Subroutine of gimplify_modify_expr to do simplifications of
4232 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4233 something changes. */
4235 static enum gimplify_status
4236 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4237 gimple_seq *pre_p, gimple_seq *post_p,
4240 enum gimplify_status ret = GS_UNHANDLED;
4246 switch (TREE_CODE (*from_p))
4249 /* If we're assigning from a read-only variable initialized with
4250 a constructor, do the direct assignment from the constructor,
4251 but only if neither source nor target are volatile since this
4252 latter assignment might end up being done on a per-field basis. */
4253 if (DECL_INITIAL (*from_p)
4254 && TREE_READONLY (*from_p)
4255 && !TREE_THIS_VOLATILE (*from_p)
4256 && !TREE_THIS_VOLATILE (*to_p)
4257 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4259 tree old_from = *from_p;
4260 enum gimplify_status subret;
4262 /* Move the constructor into the RHS. */
4263 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4265 /* Let's see if gimplify_init_constructor will need to put
4267 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4269 if (subret == GS_ERROR)
4271 /* If so, revert the change. */
4283 /* If we have code like
4287 where the type of "x" is a (possibly cv-qualified variant
4288 of "A"), treat the entire expression as identical to "x".
4289 This kind of code arises in C++ when an object is bound
4290 to a const reference, and if "x" is a TARGET_EXPR we want
4291 to take advantage of the optimization below. */
4292 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4293 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4296 if (TREE_THIS_VOLATILE (t) != volatile_p)
4299 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4300 build_fold_addr_expr (t));
4301 if (REFERENCE_CLASS_P (t))
4302 TREE_THIS_VOLATILE (t) = volatile_p;
4313 /* If we are initializing something from a TARGET_EXPR, strip the
4314 TARGET_EXPR and initialize it directly, if possible. This can't
4315 be done if the initializer is void, since that implies that the
4316 temporary is set in some non-trivial way.
4318 ??? What about code that pulls out the temp and uses it
4319 elsewhere? I think that such code never uses the TARGET_EXPR as
4320 an initializer. If I'm wrong, we'll die because the temp won't
4321 have any RTL. In that case, I guess we'll need to replace
4322 references somehow. */
4323 tree init = TARGET_EXPR_INITIAL (*from_p);
4326 && !VOID_TYPE_P (TREE_TYPE (init)))
4336 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4338 gimplify_compound_expr (from_p, pre_p, true);
4344 /* If we already made some changes, let the front end have a
4345 crack at this before we break it down. */
4346 if (ret != GS_UNHANDLED)
4348 /* If we're initializing from a CONSTRUCTOR, break this into
4349 individual MODIFY_EXPRs. */
4350 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4354 /* If we're assigning to a non-register type, push the assignment
4355 down into the branches. This is mandatory for ADDRESSABLE types,
4356 since we cannot generate temporaries for such, but it saves a
4357 copy in other cases as well. */
4358 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4360 /* This code should mirror the code in gimplify_cond_expr. */
4361 enum tree_code code = TREE_CODE (*expr_p);
4362 tree cond = *from_p;
4363 tree result = *to_p;
4365 ret = gimplify_expr (&result, pre_p, post_p,
4366 is_gimple_lvalue, fb_lvalue);
4367 if (ret != GS_ERROR)
4370 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4371 TREE_OPERAND (cond, 1)
4372 = build2 (code, void_type_node, result,
4373 TREE_OPERAND (cond, 1));
4374 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4375 TREE_OPERAND (cond, 2)
4376 = build2 (code, void_type_node, unshare_expr (result),
4377 TREE_OPERAND (cond, 2));
4379 TREE_TYPE (cond) = void_type_node;
4380 recalculate_side_effects (cond);
4384 gimplify_and_add (cond, pre_p);
4385 *expr_p = unshare_expr (result);
4394 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4395 return slot so that we don't generate a temporary. */
4396 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4397 && aggregate_value_p (*from_p, *from_p))
4401 if (!(rhs_predicate_for (*to_p))(*from_p))
4402 /* If we need a temporary, *to_p isn't accurate. */
4404 /* It's OK to use the return slot directly unless it's an NRV. */
4405 else if (TREE_CODE (*to_p) == RESULT_DECL
4406 && DECL_NAME (*to_p) == NULL_TREE
4407 && needs_to_live_in_memory (*to_p))
4409 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4410 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4411 /* Don't force regs into memory. */
4413 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4414 /* It's OK to use the target directly if it's being
4417 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4419 /* Always use the target and thus RSO for variable-sized types.
4420 GIMPLE cannot deal with a variable-sized assignment
4421 embedded in a call statement. */
4423 else if (TREE_CODE (*to_p) != SSA_NAME
4424 && (!is_gimple_variable (*to_p)
4425 || needs_to_live_in_memory (*to_p)))
4426 /* Don't use the original target if it's already addressable;
4427 if its address escapes, and the called function uses the
4428 NRV optimization, a conforming program could see *to_p
4429 change before the called function returns; see c++/19317.
4430 When optimizing, the return_slot pass marks more functions
4431 as safe after we have escape info. */
4438 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4439 mark_addressable (*to_p);
4444 case WITH_SIZE_EXPR:
4445 /* Likewise for calls that return an aggregate of non-constant size,
4446 since we would not be able to generate a temporary at all. */
4447 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4449 *from_p = TREE_OPERAND (*from_p, 0);
4450 /* We don't change ret in this case because the
4451 WITH_SIZE_EXPR might have been added in
4452 gimplify_modify_expr, so returning GS_OK would lead to an
4458 /* If we're initializing from a container, push the initialization
4460 case CLEANUP_POINT_EXPR:
4462 case STATEMENT_LIST:
4464 tree wrap = *from_p;
4467 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4469 if (ret != GS_ERROR)
4472 t = voidify_wrapper_expr (wrap, *expr_p);
4473 gcc_assert (t == *expr_p);
4477 gimplify_and_add (wrap, pre_p);
4478 *expr_p = unshare_expr (*to_p);
4485 case COMPOUND_LITERAL_EXPR:
4487 tree complit = TREE_OPERAND (*expr_p, 1);
4488 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4489 tree decl = DECL_EXPR_DECL (decl_s);
4490 tree init = DECL_INITIAL (decl);
4492 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4493 into struct T x = { 0, 1, 2 } if the address of the
4494 compound literal has never been taken. */
4495 if (!TREE_ADDRESSABLE (complit)
4496 && !TREE_ADDRESSABLE (decl)
4499 *expr_p = copy_node (*expr_p);
4500 TREE_OPERAND (*expr_p, 1) = init;
4515 /* Return true if T looks like a valid GIMPLE statement. */
4518 is_gimple_stmt (tree t)
4520 const enum tree_code code = TREE_CODE (t);
4525 /* The only valid NOP_EXPR is the empty statement. */
4526 return IS_EMPTY_STMT (t);
4530 /* These are only valid if they're void. */
4531 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4537 case CASE_LABEL_EXPR:
4538 case TRY_CATCH_EXPR:
4539 case TRY_FINALLY_EXPR:
4540 case EH_FILTER_EXPR:
4543 case STATEMENT_LIST:
4547 case OACC_HOST_DATA:
4550 case OACC_ENTER_DATA:
4551 case OACC_EXIT_DATA:
4557 case OMP_DISTRIBUTE:
4568 case OMP_TARGET_DATA:
4569 case OMP_TARGET_UPDATE:
4570 case OMP_TARGET_ENTER_DATA:
4571 case OMP_TARGET_EXIT_DATA:
4574 /* These are always void. */
4580 /* These are valid regardless of their type. */
4589 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4590 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4591 DECL_GIMPLE_REG_P set.
4593 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4594 other, unmodified part of the complex object just before the total store.
4595 As a consequence, if the object is still uninitialized, an undefined value
4596 will be loaded into a register, which may result in a spurious exception
4597 if the register is floating-point and the value happens to be a signaling
4598 NaN for example. Then the fully-fledged complex operations lowering pass
4599 followed by a DCE pass are necessary in order to fix things up. */
4601 static enum gimplify_status
4602 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4605 enum tree_code code, ocode;
4606 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4608 lhs = TREE_OPERAND (*expr_p, 0);
4609 rhs = TREE_OPERAND (*expr_p, 1);
4610 code = TREE_CODE (lhs);
4611 lhs = TREE_OPERAND (lhs, 0);
4613 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4614 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4615 TREE_NO_WARNING (other) = 1;
4616 other = get_formal_tmp_var (other, pre_p);
4618 realpart = code == REALPART_EXPR ? rhs : other;
4619 imagpart = code == REALPART_EXPR ? other : rhs;
4621 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4622 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4624 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4626 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4627 *expr_p = (want_value) ? rhs : NULL_TREE;
4632 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4638 PRE_P points to the list where side effects that must happen before
4639 *EXPR_P should be stored.
4641 POST_P points to the list where side effects that must happen after
4642 *EXPR_P should be stored.
4644 WANT_VALUE is nonzero iff we want to use the value of this expression
4645 in another expression. */
4647 static enum gimplify_status
4648 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4651 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4652 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4653 enum gimplify_status ret = GS_UNHANDLED;
4655 location_t loc = EXPR_LOCATION (*expr_p);
4656 gimple_stmt_iterator gsi;
4658 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4659 || TREE_CODE (*expr_p) == INIT_EXPR);
4661 /* Trying to simplify a clobber using normal logic doesn't work,
4662 so handle it here. */
4663 if (TREE_CLOBBER_P (*from_p))
4665 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4666 if (ret == GS_ERROR)
4668 gcc_assert (!want_value
4669 && (TREE_CODE (*to_p) == VAR_DECL
4670 || TREE_CODE (*to_p) == MEM_REF));
4671 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4676 /* Insert pointer conversions required by the middle-end that are not
4677 required by the frontend. This fixes middle-end type checking for
4678 for example gcc.dg/redecl-6.c. */
4679 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4681 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4682 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4683 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4686 /* See if any simplifications can be done based on what the RHS is. */
4687 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4689 if (ret != GS_UNHANDLED)
4692 /* For zero sized types only gimplify the left hand side and right hand
4693 side as statements and throw away the assignment. Do this after
4694 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4696 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4698 gimplify_stmt (from_p, pre_p);
4699 gimplify_stmt (to_p, pre_p);
4700 *expr_p = NULL_TREE;
4704 /* If the value being copied is of variable width, compute the length
4705 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4706 before gimplifying any of the operands so that we can resolve any
4707 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4708 the size of the expression to be copied, not of the destination, so
4709 that is what we must do here. */
4710 maybe_with_size_expr (from_p);
4712 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4713 if (ret == GS_ERROR)
4716 /* As a special case, we have to temporarily allow for assignments
4717 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4718 a toplevel statement, when gimplifying the GENERIC expression
4719 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4720 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4722 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4723 prevent gimplify_expr from trying to create a new temporary for
4724 foo's LHS, we tell it that it should only gimplify until it
4725 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4726 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4727 and all we need to do here is set 'a' to be its LHS. */
4728 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4730 if (ret == GS_ERROR)
4733 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4734 size as argument to the call. */
4735 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4737 tree call = TREE_OPERAND (*from_p, 0);
4738 tree vlasize = TREE_OPERAND (*from_p, 1);
4740 if (TREE_CODE (call) == CALL_EXPR
4741 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4743 int nargs = call_expr_nargs (call);
4744 tree type = TREE_TYPE (call);
4745 tree ap = CALL_EXPR_ARG (call, 0);
4746 tree tag = CALL_EXPR_ARG (call, 1);
4747 tree aptag = CALL_EXPR_ARG (call, 2);
4748 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4752 TREE_OPERAND (*from_p, 0) = newcall;
4756 /* Now see if the above changed *from_p to something we handle specially. */
4757 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4759 if (ret != GS_UNHANDLED)
4762 /* If we've got a variable sized assignment between two lvalues (i.e. does
4763 not involve a call), then we can make things a bit more straightforward
4764 by converting the assignment to memcpy or memset. */
4765 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4767 tree from = TREE_OPERAND (*from_p, 0);
4768 tree size = TREE_OPERAND (*from_p, 1);
4770 if (TREE_CODE (from) == CONSTRUCTOR)
4771 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4773 if (is_gimple_addressable (from))
4776 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4781 /* Transform partial stores to non-addressable complex variables into
4782 total stores. This allows us to use real instead of virtual operands
4783 for these variables, which improves optimization. */
4784 if ((TREE_CODE (*to_p) == REALPART_EXPR
4785 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4786 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4787 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4789 /* Try to alleviate the effects of the gimplification creating artificial
4790 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4791 make sure not to create DECL_DEBUG_EXPR links across functions. */
4792 if (!gimplify_ctxp->into_ssa
4793 && TREE_CODE (*from_p) == VAR_DECL
4794 && DECL_IGNORED_P (*from_p)
4796 && !DECL_IGNORED_P (*to_p)
4797 && decl_function_context (*to_p) == current_function_decl)
4799 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4801 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4802 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4803 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4806 if (want_value && TREE_THIS_VOLATILE (*to_p))
4807 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4809 if (TREE_CODE (*from_p) == CALL_EXPR)
4811 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4812 instead of a GIMPLE_ASSIGN. */
4814 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4816 /* Gimplify internal functions created in the FEs. */
4817 int nargs = call_expr_nargs (*from_p), i;
4818 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4819 auto_vec<tree> vargs (nargs);
4821 for (i = 0; i < nargs; i++)
4823 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4824 EXPR_LOCATION (*from_p));
4825 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4827 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4828 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4832 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4833 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4834 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4835 tree fndecl = get_callee_fndecl (*from_p);
4837 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4838 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4839 && call_expr_nargs (*from_p) == 3)
4840 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4841 CALL_EXPR_ARG (*from_p, 0),
4842 CALL_EXPR_ARG (*from_p, 1),
4843 CALL_EXPR_ARG (*from_p, 2));
4846 call_stmt = gimple_build_call_from_tree (*from_p);
4847 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4850 notice_special_calls (call_stmt);
4851 if (!gimple_call_noreturn_p (call_stmt)
4852 || TREE_ADDRESSABLE (TREE_TYPE (*to_p))
4853 || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) != INTEGER_CST)
4854 gimple_call_set_lhs (call_stmt, *to_p);
4859 assign = gimple_build_assign (*to_p, *from_p);
4860 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4861 if (COMPARISON_CLASS_P (*from_p))
4862 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
4865 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4867 /* We should have got an SSA name from the start. */
4868 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4871 gimplify_seq_add_stmt (pre_p, assign);
4872 gsi = gsi_last (*pre_p);
4873 maybe_fold_stmt (&gsi);
4877 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4886 /* Gimplify a comparison between two variable-sized objects. Do this
4887 with a call to BUILT_IN_MEMCMP. */
4889 static enum gimplify_status
4890 gimplify_variable_sized_compare (tree *expr_p)
4892 location_t loc = EXPR_LOCATION (*expr_p);
4893 tree op0 = TREE_OPERAND (*expr_p, 0);
4894 tree op1 = TREE_OPERAND (*expr_p, 1);
4895 tree t, arg, dest, src, expr;
4897 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4898 arg = unshare_expr (arg);
4899 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4900 src = build_fold_addr_expr_loc (loc, op1);
4901 dest = build_fold_addr_expr_loc (loc, op0);
4902 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4903 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4906 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4907 SET_EXPR_LOCATION (expr, loc);
4913 /* Gimplify a comparison between two aggregate objects of integral scalar
4914 mode as a comparison between the bitwise equivalent scalar values. */
4916 static enum gimplify_status
4917 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4919 location_t loc = EXPR_LOCATION (*expr_p);
4920 tree op0 = TREE_OPERAND (*expr_p, 0);
4921 tree op1 = TREE_OPERAND (*expr_p, 1);
4923 tree type = TREE_TYPE (op0);
4924 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4926 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4927 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4930 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4935 /* Gimplify an expression sequence. This function gimplifies each
4936 expression and rewrites the original expression with the last
4937 expression of the sequence in GIMPLE form.
4939 PRE_P points to the list where the side effects for all the
4940 expressions in the sequence will be emitted.
4942 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4944 static enum gimplify_status
4945 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4951 tree *sub_p = &TREE_OPERAND (t, 0);
4953 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4954 gimplify_compound_expr (sub_p, pre_p, false);
4956 gimplify_stmt (sub_p, pre_p);
4958 t = TREE_OPERAND (t, 1);
4960 while (TREE_CODE (t) == COMPOUND_EXPR);
4967 gimplify_stmt (expr_p, pre_p);
4972 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4973 gimplify. After gimplification, EXPR_P will point to a new temporary
4974 that holds the original value of the SAVE_EXPR node.
4976 PRE_P points to the list where side effects that must happen before
4977 *EXPR_P should be stored. */
4979 static enum gimplify_status
4980 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4982 enum gimplify_status ret = GS_ALL_DONE;
4985 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4986 val = TREE_OPERAND (*expr_p, 0);
4988 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4989 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4991 /* The operand may be a void-valued expression such as SAVE_EXPRs
4992 generated by the Java frontend for class initialization. It is
4993 being executed only for its side-effects. */
4994 if (TREE_TYPE (val) == void_type_node)
4996 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4997 is_gimple_stmt, fb_none);
5001 val = get_initialized_tmp_var (val, pre_p, post_p);
5003 TREE_OPERAND (*expr_p, 0) = val;
5004 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5012 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5019 PRE_P points to the list where side effects that must happen before
5020 *EXPR_P should be stored.
5022 POST_P points to the list where side effects that must happen after
5023 *EXPR_P should be stored. */
5025 static enum gimplify_status
5026 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5028 tree expr = *expr_p;
5029 tree op0 = TREE_OPERAND (expr, 0);
5030 enum gimplify_status ret;
5031 location_t loc = EXPR_LOCATION (*expr_p);
5033 switch (TREE_CODE (op0))
5037 /* Check if we are dealing with an expression of the form '&*ptr'.
5038 While the front end folds away '&*ptr' into 'ptr', these
5039 expressions may be generated internally by the compiler (e.g.,
5040 builtins like __builtin_va_end). */
5041 /* Caution: the silent array decomposition semantics we allow for
5042 ADDR_EXPR means we can't always discard the pair. */
5043 /* Gimplification of the ADDR_EXPR operand may drop
5044 cv-qualification conversions, so make sure we add them if
5047 tree op00 = TREE_OPERAND (op0, 0);
5048 tree t_expr = TREE_TYPE (expr);
5049 tree t_op00 = TREE_TYPE (op00);
5051 if (!useless_type_conversion_p (t_expr, t_op00))
5052 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5058 case VIEW_CONVERT_EXPR:
5059 /* Take the address of our operand and then convert it to the type of
5062 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5063 all clear. The impact of this transformation is even less clear. */
5065 /* If the operand is a useless conversion, look through it. Doing so
5066 guarantees that the ADDR_EXPR and its operand will remain of the
5068 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5069 op0 = TREE_OPERAND (op0, 0);
5071 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5072 build_fold_addr_expr_loc (loc,
5073 TREE_OPERAND (op0, 0)));
5078 if (integer_zerop (TREE_OPERAND (op0, 1)))
5079 goto do_indirect_ref;
5081 /* ... fall through ... */
5084 /* If we see a call to a declared builtin or see its address
5085 being taken (we can unify those cases here) then we can mark
5086 the builtin for implicit generation by GCC. */
5087 if (TREE_CODE (op0) == FUNCTION_DECL
5088 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5089 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5090 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5092 /* We use fb_either here because the C frontend sometimes takes
5093 the address of a call that returns a struct; see
5094 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5095 the implied temporary explicit. */
5097 /* Make the operand addressable. */
5098 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5099 is_gimple_addressable, fb_either);
5100 if (ret == GS_ERROR)
5103 /* Then mark it. Beware that it may not be possible to do so directly
5104 if a temporary has been created by the gimplification. */
5105 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5107 op0 = TREE_OPERAND (expr, 0);
5109 /* For various reasons, the gimplification of the expression
5110 may have made a new INDIRECT_REF. */
5111 if (TREE_CODE (op0) == INDIRECT_REF)
5112 goto do_indirect_ref;
5114 mark_addressable (TREE_OPERAND (expr, 0));
5116 /* The FEs may end up building ADDR_EXPRs early on a decl with
5117 an incomplete type. Re-build ADDR_EXPRs in canonical form
5119 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5120 *expr_p = build_fold_addr_expr (op0);
5122 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5123 recompute_tree_invariant_for_addr_expr (*expr_p);
5125 /* If we re-built the ADDR_EXPR add a conversion to the original type
5127 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5128 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5136 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5137 value; output operands should be a gimple lvalue. */
5139 static enum gimplify_status
5140 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5144 const char **oconstraints;
5147 const char *constraint;
5148 bool allows_mem, allows_reg, is_inout;
5149 enum gimplify_status ret, tret;
5151 vec<tree, va_gc> *inputs;
5152 vec<tree, va_gc> *outputs;
5153 vec<tree, va_gc> *clobbers;
5154 vec<tree, va_gc> *labels;
5158 noutputs = list_length (ASM_OUTPUTS (expr));
5159 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5167 link_next = NULL_TREE;
5168 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5171 size_t constraint_len;
5173 link_next = TREE_CHAIN (link);
5177 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5178 constraint_len = strlen (constraint);
5179 if (constraint_len == 0)
5182 ok = parse_output_constraint (&constraint, i, 0, 0,
5183 &allows_mem, &allows_reg, &is_inout);
5190 if (!allows_reg && allows_mem)
5191 mark_addressable (TREE_VALUE (link));
5193 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5194 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5195 fb_lvalue | fb_mayfail);
5196 if (tret == GS_ERROR)
5198 error ("invalid lvalue in asm output %d", i);
5202 /* If the constraint does not allow memory make sure we gimplify
5203 it to a register if it is not already but its base is. This
5204 happens for complex and vector components. */
5207 tree op = TREE_VALUE (link);
5208 if (! is_gimple_val (op)
5209 && is_gimple_reg_type (TREE_TYPE (op))
5210 && is_gimple_reg (get_base_address (op)))
5212 tree tem = create_tmp_reg (TREE_TYPE (op));
5216 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5217 tem, unshare_expr (op));
5218 gimplify_and_add (ass, pre_p);
5220 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5221 gimplify_and_add (ass, post_p);
5223 TREE_VALUE (link) = tem;
5228 vec_safe_push (outputs, link);
5229 TREE_CHAIN (link) = NULL_TREE;
5233 /* An input/output operand. To give the optimizers more
5234 flexibility, split it into separate input and output
5239 /* Turn the in/out constraint into an output constraint. */
5240 char *p = xstrdup (constraint);
5242 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5244 /* And add a matching input constraint. */
5247 sprintf (buf, "%d", i);
5249 /* If there are multiple alternatives in the constraint,
5250 handle each of them individually. Those that allow register
5251 will be replaced with operand number, the others will stay
5253 if (strchr (p, ',') != NULL)
5255 size_t len = 0, buflen = strlen (buf);
5256 char *beg, *end, *str, *dst;
5260 end = strchr (beg, ',');
5262 end = strchr (beg, '\0');
5263 if ((size_t) (end - beg) < buflen)
5266 len += end - beg + 1;
5273 str = (char *) alloca (len);
5274 for (beg = p + 1, dst = str;;)
5277 bool mem_p, reg_p, inout_p;
5279 end = strchr (beg, ',');
5284 parse_output_constraint (&tem, i, 0, 0,
5285 &mem_p, ®_p, &inout_p);
5290 memcpy (dst, buf, buflen);
5299 memcpy (dst, beg, len);
5308 input = build_string (dst - str, str);
5311 input = build_string (strlen (buf), buf);
5314 input = build_string (constraint_len - 1, constraint + 1);
5318 input = build_tree_list (build_tree_list (NULL_TREE, input),
5319 unshare_expr (TREE_VALUE (link)));
5320 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5324 link_next = NULL_TREE;
5325 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5327 link_next = TREE_CHAIN (link);
5328 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5329 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5330 oconstraints, &allows_mem, &allows_reg);
5332 /* If we can't make copies, we can only accept memory. */
5333 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5339 error ("impossible constraint in %<asm%>");
5340 error ("non-memory input %d must stay in memory", i);
5345 /* If the operand is a memory input, it should be an lvalue. */
5346 if (!allows_reg && allows_mem)
5348 tree inputv = TREE_VALUE (link);
5349 STRIP_NOPS (inputv);
5350 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5351 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5352 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5353 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5354 || TREE_CODE (inputv) == MODIFY_EXPR)
5355 TREE_VALUE (link) = error_mark_node;
5356 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5357 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5358 if (tret != GS_ERROR)
5360 /* Unlike output operands, memory inputs are not guaranteed
5361 to be lvalues by the FE, and while the expressions are
5362 marked addressable there, if it is e.g. a statement
5363 expression, temporaries in it might not end up being
5364 addressable. They might be already used in the IL and thus
5365 it is too late to make them addressable now though. */
5366 tree x = TREE_VALUE (link);
5367 while (handled_component_p (x))
5368 x = TREE_OPERAND (x, 0);
5369 if (TREE_CODE (x) == MEM_REF
5370 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5371 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5372 if ((TREE_CODE (x) == VAR_DECL
5373 || TREE_CODE (x) == PARM_DECL
5374 || TREE_CODE (x) == RESULT_DECL)
5375 && !TREE_ADDRESSABLE (x)
5376 && is_gimple_reg (x))
5378 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5380 "memory input %d is not directly addressable",
5382 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5385 mark_addressable (TREE_VALUE (link));
5386 if (tret == GS_ERROR)
5388 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5389 "memory input %d is not directly addressable", i);
5395 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5396 is_gimple_asm_val, fb_rvalue);
5397 if (tret == GS_ERROR)
5401 TREE_CHAIN (link) = NULL_TREE;
5402 vec_safe_push (inputs, link);
5405 link_next = NULL_TREE;
5406 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5408 link_next = TREE_CHAIN (link);
5409 TREE_CHAIN (link) = NULL_TREE;
5410 vec_safe_push (clobbers, link);
5413 link_next = NULL_TREE;
5414 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5416 link_next = TREE_CHAIN (link);
5417 TREE_CHAIN (link) = NULL_TREE;
5418 vec_safe_push (labels, link);
5421 /* Do not add ASMs with errors to the gimple IL stream. */
5422 if (ret != GS_ERROR)
5424 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5425 inputs, outputs, clobbers, labels);
5427 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5428 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5430 gimplify_seq_add_stmt (pre_p, stmt);
5436 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5437 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5438 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5439 return to this function.
5441 FIXME should we complexify the prequeue handling instead? Or use flags
5442 for all the cleanups and let the optimizer tighten them up? The current
5443 code seems pretty fragile; it will break on a cleanup within any
5444 non-conditional nesting. But any such nesting would be broken, anyway;
5445 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5446 and continues out of it. We can do that at the RTL level, though, so
5447 having an optimizer to tighten up try/finally regions would be a Good
5450 static enum gimplify_status
5451 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5453 gimple_stmt_iterator iter;
5454 gimple_seq body_sequence = NULL;
5456 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5458 /* We only care about the number of conditions between the innermost
5459 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5460 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5461 int old_conds = gimplify_ctxp->conditions;
5462 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5463 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5464 gimplify_ctxp->conditions = 0;
5465 gimplify_ctxp->conditional_cleanups = NULL;
5466 gimplify_ctxp->in_cleanup_point_expr = true;
5468 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5470 gimplify_ctxp->conditions = old_conds;
5471 gimplify_ctxp->conditional_cleanups = old_cleanups;
5472 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5474 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5476 gimple *wce = gsi_stmt (iter);
5478 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5480 if (gsi_one_before_end_p (iter))
5482 /* Note that gsi_insert_seq_before and gsi_remove do not
5483 scan operands, unlike some other sequence mutators. */
5484 if (!gimple_wce_cleanup_eh_only (wce))
5485 gsi_insert_seq_before_without_update (&iter,
5486 gimple_wce_cleanup (wce),
5488 gsi_remove (&iter, true);
5495 enum gimple_try_flags kind;
5497 if (gimple_wce_cleanup_eh_only (wce))
5498 kind = GIMPLE_TRY_CATCH;
5500 kind = GIMPLE_TRY_FINALLY;
5501 seq = gsi_split_seq_after (iter);
5503 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5504 /* Do not use gsi_replace here, as it may scan operands.
5505 We want to do a simple structural modification only. */
5506 gsi_set_stmt (&iter, gtry);
5507 iter = gsi_start (gtry->eval);
5514 gimplify_seq_add_seq (pre_p, body_sequence);
5527 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5528 is the cleanup action required. EH_ONLY is true if the cleanup should
5529 only be executed if an exception is thrown, not on normal exit. */
5532 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5535 gimple_seq cleanup_stmts = NULL;
5537 /* Errors can result in improperly nested cleanups. Which results in
5538 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5542 if (gimple_conditional_context ())
5544 /* If we're in a conditional context, this is more complex. We only
5545 want to run the cleanup if we actually ran the initialization that
5546 necessitates it, but we want to run it after the end of the
5547 conditional context. So we wrap the try/finally around the
5548 condition and use a flag to determine whether or not to actually
5549 run the destructor. Thus
5553 becomes (approximately)
5557 if (test) { A::A(temp); flag = 1; val = f(temp); }
5560 if (flag) A::~A(temp);
5564 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5565 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5566 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5568 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5569 gimplify_stmt (&cleanup, &cleanup_stmts);
5570 wce = gimple_build_wce (cleanup_stmts);
5572 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5573 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5574 gimplify_seq_add_stmt (pre_p, ftrue);
5576 /* Because of this manipulation, and the EH edges that jump
5577 threading cannot redirect, the temporary (VAR) will appear
5578 to be used uninitialized. Don't warn. */
5579 TREE_NO_WARNING (var) = 1;
5583 gimplify_stmt (&cleanup, &cleanup_stmts);
5584 wce = gimple_build_wce (cleanup_stmts);
5585 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5586 gimplify_seq_add_stmt (pre_p, wce);
5590 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5592 static enum gimplify_status
5593 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5595 tree targ = *expr_p;
5596 tree temp = TARGET_EXPR_SLOT (targ);
5597 tree init = TARGET_EXPR_INITIAL (targ);
5598 enum gimplify_status ret;
5602 tree cleanup = NULL_TREE;
5604 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5605 to the temps list. Handle also variable length TARGET_EXPRs. */
5606 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5608 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5609 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5610 gimplify_vla_decl (temp, pre_p);
5613 gimple_add_tmp_var (temp);
5615 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5616 expression is supposed to initialize the slot. */
5617 if (VOID_TYPE_P (TREE_TYPE (init)))
5618 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5621 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5623 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5625 ggc_free (init_expr);
5627 if (ret == GS_ERROR)
5629 /* PR c++/28266 Make sure this is expanded only once. */
5630 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5634 gimplify_and_add (init, pre_p);
5636 /* If needed, push the cleanup for the temp. */
5637 if (TARGET_EXPR_CLEANUP (targ))
5639 if (CLEANUP_EH_ONLY (targ))
5640 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5641 CLEANUP_EH_ONLY (targ), pre_p);
5643 cleanup = TARGET_EXPR_CLEANUP (targ);
5646 /* Add a clobber for the temporary going out of scope, like
5647 gimplify_bind_expr. */
5648 if (gimplify_ctxp->in_cleanup_point_expr
5649 && needs_to_live_in_memory (temp)
5650 && flag_stack_reuse == SR_ALL)
5652 tree clobber = build_constructor (TREE_TYPE (temp),
5654 TREE_THIS_VOLATILE (clobber) = true;
5655 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5657 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5664 gimple_push_cleanup (temp, cleanup, false, pre_p);
5666 /* Only expand this once. */
5667 TREE_OPERAND (targ, 3) = init;
5668 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5671 /* We should have expanded this before. */
5672 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5678 /* Gimplification of expression trees. */
5680 /* Gimplify an expression which appears at statement context. The
5681 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5682 NULL, a new sequence is allocated.
5684 Return true if we actually added a statement to the queue. */
5687 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5689 gimple_seq_node last;
5691 last = gimple_seq_last (*seq_p);
5692 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5693 return last != gimple_seq_last (*seq_p);
5696 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5697 to CTX. If entries already exist, force them to be some flavor of private.
5698 If there is no enclosing parallel, do nothing. */
5701 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5705 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5710 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5713 if (n->value & GOVD_SHARED)
5714 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5715 else if (n->value & GOVD_MAP)
5716 n->value |= GOVD_MAP_TO_ONLY;
5720 else if ((ctx->region_type & ORT_TARGET) != 0)
5722 if (ctx->target_map_scalars_firstprivate)
5723 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5725 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5727 else if (ctx->region_type != ORT_WORKSHARE
5728 && ctx->region_type != ORT_SIMD
5729 && ctx->region_type != ORT_ACC
5730 && !(ctx->region_type & ORT_TARGET_DATA))
5731 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5733 ctx = ctx->outer_context;
5738 /* Similarly for each of the type sizes of TYPE. */
5741 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5743 if (type == NULL || type == error_mark_node)
5745 type = TYPE_MAIN_VARIANT (type);
5747 if (ctx->privatized_types->add (type))
5750 switch (TREE_CODE (type))
5756 case FIXED_POINT_TYPE:
5757 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5758 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5762 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5763 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5768 case QUAL_UNION_TYPE:
5771 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5772 if (TREE_CODE (field) == FIELD_DECL)
5774 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5775 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5781 case REFERENCE_TYPE:
5782 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5789 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5790 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5791 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5794 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5797 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5800 unsigned int nflags;
5803 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5806 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5807 there are constructors involved somewhere. */
5808 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5809 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5812 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5813 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5815 /* We shouldn't be re-adding the decl with the same data
5817 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5818 nflags = n->value | flags;
5819 /* The only combination of data sharing classes we should see is
5820 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5821 reduction variables to be used in data sharing clauses. */
5822 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5823 || ((nflags & GOVD_DATA_SHARE_CLASS)
5824 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5825 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5830 /* When adding a variable-sized variable, we have to handle all sorts
5831 of additional bits of data: the pointer replacement variable, and
5832 the parameters of the type. */
5833 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5835 /* Add the pointer replacement variable as PRIVATE if the variable
5836 replacement is private, else FIRSTPRIVATE since we'll need the
5837 address of the original variable either for SHARED, or for the
5838 copy into or out of the context. */
5839 if (!(flags & GOVD_LOCAL))
5841 if (flags & GOVD_MAP)
5842 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5843 else if (flags & GOVD_PRIVATE)
5844 nflags = GOVD_PRIVATE;
5845 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5846 && (flags & GOVD_FIRSTPRIVATE))
5847 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5849 nflags = GOVD_FIRSTPRIVATE;
5850 nflags |= flags & GOVD_SEEN;
5851 t = DECL_VALUE_EXPR (decl);
5852 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5853 t = TREE_OPERAND (t, 0);
5854 gcc_assert (DECL_P (t));
5855 omp_add_variable (ctx, t, nflags);
5858 /* Add all of the variable and type parameters (which should have
5859 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5860 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5861 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5862 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5864 /* The variable-sized variable itself is never SHARED, only some form
5865 of PRIVATE. The sharing would take place via the pointer variable
5866 which we remapped above. */
5867 if (flags & GOVD_SHARED)
5868 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5869 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5871 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5872 alloca statement we generate for the variable, so make sure it
5873 is available. This isn't automatically needed for the SHARED
5874 case, since we won't be allocating local storage then.
5875 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5876 in this case omp_notice_variable will be called later
5877 on when it is gimplified. */
5878 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5879 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5880 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5882 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5883 && lang_hooks.decls.omp_privatize_by_reference (decl))
5885 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5887 /* Similar to the direct variable sized case above, we'll need the
5888 size of references being privatized. */
5889 if ((flags & GOVD_SHARED) == 0)
5891 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5893 omp_notice_variable (ctx, t, true);
5900 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5902 /* For reductions clauses in OpenACC loop directives, by default create a
5903 copy clause on the enclosing parallel construct for carrying back the
5905 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
5907 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
5910 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
5913 /* Ignore local variables and explicitly declared clauses. */
5914 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
5916 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
5918 /* According to the OpenACC spec, such a reduction variable
5919 should already have a copy map on a kernels construct,
5920 verify that here. */
5921 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
5922 && (n->value & GOVD_MAP));
5924 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
5926 /* Remove firstprivate and make it a copy map. */
5927 n->value &= ~GOVD_FIRSTPRIVATE;
5928 n->value |= GOVD_MAP;
5931 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
5933 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
5934 GOVD_MAP | GOVD_SEEN);
5937 outer_ctx = outer_ctx->outer_context;
5942 /* Notice a threadprivate variable DECL used in OMP context CTX.
5943 This just prints out diagnostics about threadprivate variable uses
5944 in untied tasks. If DECL2 is non-NULL, prevent this warning
5945 on that variable. */
5948 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5952 struct gimplify_omp_ctx *octx;
5954 for (octx = ctx; octx; octx = octx->outer_context)
5955 if ((octx->region_type & ORT_TARGET) != 0)
5957 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5960 error ("threadprivate variable %qE used in target region",
5962 error_at (octx->location, "enclosing target region");
5963 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5966 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5969 if (ctx->region_type != ORT_UNTIED_TASK)
5971 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5974 error ("threadprivate variable %qE used in untied task",
5976 error_at (ctx->location, "enclosing task");
5977 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5980 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5984 /* Return true if global var DECL is device resident. */
5987 device_resident_p (tree decl)
5989 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5994 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5996 tree c = TREE_VALUE (t);
5997 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6004 /* Determine outer default flags for DECL mentioned in an OMP region
6005 but not declared in an enclosing clause.
6007 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6008 remapped firstprivate instead of shared. To some extent this is
6009 addressed in omp_firstprivatize_type_sizes, but not
6013 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6014 bool in_code, unsigned flags)
6016 enum omp_clause_default_kind default_kind = ctx->default_kind;
6017 enum omp_clause_default_kind kind;
6019 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6020 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6021 default_kind = kind;
6023 switch (default_kind)
6025 case OMP_CLAUSE_DEFAULT_NONE:
6029 if (ctx->region_type & ORT_PARALLEL)
6031 else if (ctx->region_type & ORT_TASK)
6033 else if (ctx->region_type & ORT_TEAMS)
6038 error ("%qE not specified in enclosing %s",
6039 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6040 error_at (ctx->location, "enclosing %s", rtype);
6043 case OMP_CLAUSE_DEFAULT_SHARED:
6044 flags |= GOVD_SHARED;
6046 case OMP_CLAUSE_DEFAULT_PRIVATE:
6047 flags |= GOVD_PRIVATE;
6049 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6050 flags |= GOVD_FIRSTPRIVATE;
6052 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6053 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6054 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6055 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6057 omp_notice_variable (octx, decl, in_code);
6058 for (; octx; octx = octx->outer_context)
6062 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6063 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6064 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6066 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6068 flags |= GOVD_FIRSTPRIVATE;
6071 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6073 flags |= GOVD_SHARED;
6079 if (TREE_CODE (decl) == PARM_DECL
6080 || (!is_global_var (decl)
6081 && DECL_CONTEXT (decl) == current_function_decl))
6082 flags |= GOVD_FIRSTPRIVATE;
6084 flags |= GOVD_SHARED;
6096 /* Determine outer default flags for DECL mentioned in an OACC region
6097 but not declared in an enclosing clause. */
6100 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6103 bool on_device = false;
6104 tree type = TREE_TYPE (decl);
6106 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6107 type = TREE_TYPE (type);
6109 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6110 && is_global_var (decl)
6111 && device_resident_p (decl))
6114 flags |= GOVD_MAP_TO_ONLY;
6117 switch (ctx->region_type)
6122 case ORT_ACC_KERNELS:
6123 /* Scalars are default 'copy' under kernels, non-scalars are default
6124 'present_or_copy'. */
6126 if (!AGGREGATE_TYPE_P (type))
6127 flags |= GOVD_MAP_FORCE;
6132 case ORT_ACC_PARALLEL:
6134 if (on_device || AGGREGATE_TYPE_P (type))
6135 /* Aggregates default to 'present_or_copy'. */
6138 /* Scalars default to 'firstprivate'. */
6139 flags |= GOVD_FIRSTPRIVATE;
6145 if (DECL_ARTIFICIAL (decl))
6146 ; /* We can get compiler-generated decls, and should not complain
6148 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6150 error ("%qE not specified in enclosing OpenACC %qs construct",
6151 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6152 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6155 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6160 /* Record the fact that DECL was used within the OMP context CTX.
6161 IN_CODE is true when real code uses DECL, and false when we should
6162 merely emit default(none) errors. Return true if DECL is going to
6163 be remapped and thus DECL shouldn't be gimplified into its
6164 DECL_VALUE_EXPR (if any). */
6167 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6170 unsigned flags = in_code ? GOVD_SEEN : 0;
6171 bool ret = false, shared;
6173 if (error_operand_p (decl))
6176 if (ctx->region_type == ORT_NONE)
6177 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6179 if (is_global_var (decl))
6181 /* Threadprivate variables are predetermined. */
6182 if (DECL_THREAD_LOCAL_P (decl))
6183 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6185 if (DECL_HAS_VALUE_EXPR_P (decl))
6187 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6189 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6190 return omp_notice_threadprivate_variable (ctx, decl, value);
6193 if (gimplify_omp_ctxp->outer_context == NULL
6195 && get_oacc_fn_attrib (current_function_decl))
6197 location_t loc = DECL_SOURCE_LOCATION (decl);
6199 if (lookup_attribute ("omp declare target link",
6200 DECL_ATTRIBUTES (decl)))
6203 "%qE with %<link%> clause used in %<routine%> function",
6207 else if (!lookup_attribute ("omp declare target",
6208 DECL_ATTRIBUTES (decl)))
6211 "%qE requires a %<declare%> directive for use "
6212 "in a %<routine%> function", DECL_NAME (decl));
6218 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6219 if ((ctx->region_type & ORT_TARGET) != 0)
6221 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6224 unsigned nflags = flags;
6225 if (ctx->target_map_pointers_as_0len_arrays
6226 || ctx->target_map_scalars_firstprivate)
6228 bool is_declare_target = false;
6229 bool is_scalar = false;
6230 if (is_global_var (decl)
6231 && varpool_node::get_create (decl)->offloadable)
6233 struct gimplify_omp_ctx *octx;
6234 for (octx = ctx->outer_context;
6235 octx; octx = octx->outer_context)
6237 n = splay_tree_lookup (octx->variables,
6238 (splay_tree_key)decl);
6240 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6241 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6244 is_declare_target = octx == NULL;
6246 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6248 tree type = TREE_TYPE (decl);
6249 if (TREE_CODE (type) == REFERENCE_TYPE)
6250 type = TREE_TYPE (type);
6251 if (TREE_CODE (type) == COMPLEX_TYPE)
6252 type = TREE_TYPE (type);
6253 if (INTEGRAL_TYPE_P (type)
6254 || SCALAR_FLOAT_TYPE_P (type)
6255 || TREE_CODE (type) == POINTER_TYPE)
6258 if (is_declare_target)
6260 else if (ctx->target_map_pointers_as_0len_arrays
6261 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6262 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6263 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6265 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6267 nflags |= GOVD_FIRSTPRIVATE;
6270 struct gimplify_omp_ctx *octx = ctx->outer_context;
6271 if ((ctx->region_type & ORT_ACC) && octx)
6273 /* Look in outer OpenACC contexts, to see if there's a
6274 data attribute for this variable. */
6275 omp_notice_variable (octx, decl, in_code);
6277 for (; octx; octx = octx->outer_context)
6279 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6282 = splay_tree_lookup (octx->variables,
6283 (splay_tree_key) decl);
6286 if (octx->region_type == ORT_ACC_HOST_DATA)
6287 error ("variable %qE declared in enclosing "
6288 "%<host_data%> region", DECL_NAME (decl));
6296 tree type = TREE_TYPE (decl);
6299 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6300 && lang_hooks.decls.omp_privatize_by_reference (decl))
6301 type = TREE_TYPE (type);
6303 && !lang_hooks.types.omp_mappable_type (type))
6305 error ("%qD referenced in target region does not have "
6306 "a mappable type", decl);
6307 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6309 else if (nflags == flags)
6311 if ((ctx->region_type & ORT_ACC) != 0)
6312 nflags = oacc_default_clause (ctx, decl, flags);
6318 omp_add_variable (ctx, decl, nflags);
6322 /* If nothing changed, there's nothing left to do. */
6323 if ((n->value & flags) == flags)
6333 if (ctx->region_type == ORT_WORKSHARE
6334 || ctx->region_type == ORT_SIMD
6335 || ctx->region_type == ORT_ACC
6336 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6339 flags = omp_default_clause (ctx, decl, in_code, flags);
6341 if ((flags & GOVD_PRIVATE)
6342 && lang_hooks.decls.omp_private_outer_ref (decl))
6343 flags |= GOVD_PRIVATE_OUTER_REF;
6345 omp_add_variable (ctx, decl, flags);
6347 shared = (flags & GOVD_SHARED) != 0;
6348 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6352 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6353 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6354 && DECL_SIZE (decl))
6356 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6359 tree t = DECL_VALUE_EXPR (decl);
6360 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6361 t = TREE_OPERAND (t, 0);
6362 gcc_assert (DECL_P (t));
6363 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6364 n2->value |= GOVD_SEEN;
6366 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6367 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6368 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6372 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6373 gcc_assert (DECL_P (t));
6374 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6376 n2->value |= GOVD_SEEN;
6380 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6381 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6383 /* If nothing changed, there's nothing left to do. */
6384 if ((n->value & flags) == flags)
6390 /* If the variable is private in the current context, then we don't
6391 need to propagate anything to an outer context. */
6392 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6394 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6395 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6397 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6398 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6399 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6401 if (ctx->outer_context
6402 && omp_notice_variable (ctx->outer_context, decl, in_code))
6407 /* Verify that DECL is private within CTX. If there's specific information
6408 to the contrary in the innermost scope, generate an error. */
6411 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6415 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6418 if (n->value & GOVD_SHARED)
6420 if (ctx == gimplify_omp_ctxp)
6423 error ("iteration variable %qE is predetermined linear",
6426 error ("iteration variable %qE should be private",
6428 n->value = GOVD_PRIVATE;
6434 else if ((n->value & GOVD_EXPLICIT) != 0
6435 && (ctx == gimplify_omp_ctxp
6436 || (ctx->region_type == ORT_COMBINED_PARALLEL
6437 && gimplify_omp_ctxp->outer_context == ctx)))
6439 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6440 error ("iteration variable %qE should not be firstprivate",
6442 else if ((n->value & GOVD_REDUCTION) != 0)
6443 error ("iteration variable %qE should not be reduction",
6445 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6446 error ("iteration variable %qE should not be linear",
6448 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6449 error ("iteration variable %qE should not be lastprivate",
6451 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6452 error ("iteration variable %qE should not be private",
6454 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6455 error ("iteration variable %qE is predetermined linear",
6458 return (ctx == gimplify_omp_ctxp
6459 || (ctx->region_type == ORT_COMBINED_PARALLEL
6460 && gimplify_omp_ctxp->outer_context == ctx));
6463 if (ctx->region_type != ORT_WORKSHARE
6464 && ctx->region_type != ORT_SIMD
6465 && ctx->region_type != ORT_ACC)
6467 else if (ctx->outer_context)
6468 return omp_is_private (ctx->outer_context, decl, simd);
6472 /* Return true if DECL is private within a parallel region
6473 that binds to the current construct's context or in parallel
6474 region's REDUCTION clause. */
6477 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6483 ctx = ctx->outer_context;
6486 if (is_global_var (decl))
6489 /* References might be private, but might be shared too,
6490 when checking for copyprivate, assume they might be
6491 private, otherwise assume they might be shared. */
6495 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6498 /* Treat C++ privatized non-static data members outside
6499 of the privatization the same. */
6500 if (omp_member_access_dummy_var (decl))
6506 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6508 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6509 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6514 if ((n->value & GOVD_LOCAL) != 0
6515 && omp_member_access_dummy_var (decl))
6517 return (n->value & GOVD_SHARED) == 0;
6520 while (ctx->region_type == ORT_WORKSHARE
6521 || ctx->region_type == ORT_SIMD
6522 || ctx->region_type == ORT_ACC);
6526 /* Return true if the CTX is combined with distribute and thus
6527 lastprivate can't be supported. */
6530 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6534 if (ctx->outer_context == NULL)
6536 ctx = ctx->outer_context;
6537 switch (ctx->region_type)
6540 if (!ctx->combined_loop)
6542 if (ctx->distribute)
6543 return lang_GNU_Fortran ();
6545 case ORT_COMBINED_PARALLEL:
6547 case ORT_COMBINED_TEAMS:
6548 return lang_GNU_Fortran ();
6556 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6559 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6563 /* If this node has been visited, unmark it and keep looking. */
6564 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6567 if (IS_TYPE_OR_DECL_P (t))
6572 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6573 and previous omp contexts. */
6576 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6577 enum omp_region_type region_type,
6578 enum tree_code code)
6580 struct gimplify_omp_ctx *ctx, *outer_ctx;
6582 hash_map<tree, tree> *struct_map_to_clause = NULL;
6583 tree *prev_list_p = NULL;
6585 ctx = new_omp_context (region_type);
6586 outer_ctx = ctx->outer_context;
6587 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6589 ctx->target_map_pointers_as_0len_arrays = true;
6590 /* FIXME: For Fortran we want to set this too, when
6591 the Fortran FE is updated to OpenMP 4.5. */
6592 ctx->target_map_scalars_firstprivate = true;
6594 if (!lang_GNU_Fortran ())
6598 case OMP_TARGET_DATA:
6599 case OMP_TARGET_ENTER_DATA:
6600 case OMP_TARGET_EXIT_DATA:
6601 case OACC_HOST_DATA:
6602 ctx->target_firstprivatize_array_bases = true;
6607 while ((c = *list_p) != NULL)
6609 bool remove = false;
6610 bool notice_outer = true;
6611 const char *check_non_private = NULL;
6615 switch (OMP_CLAUSE_CODE (c))
6617 case OMP_CLAUSE_PRIVATE:
6618 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6619 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6621 flags |= GOVD_PRIVATE_OUTER_REF;
6622 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6625 notice_outer = false;
6627 case OMP_CLAUSE_SHARED:
6628 flags = GOVD_SHARED | GOVD_EXPLICIT;
6630 case OMP_CLAUSE_FIRSTPRIVATE:
6631 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6632 check_non_private = "firstprivate";
6634 case OMP_CLAUSE_LASTPRIVATE:
6635 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6636 check_non_private = "lastprivate";
6637 decl = OMP_CLAUSE_DECL (c);
6638 if (omp_no_lastprivate (ctx))
6640 notice_outer = false;
6641 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6643 else if (error_operand_p (decl))
6646 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6647 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6648 && splay_tree_lookup (outer_ctx->variables,
6649 (splay_tree_key) decl) == NULL)
6651 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6652 if (outer_ctx->outer_context)
6653 omp_notice_variable (outer_ctx->outer_context, decl, true);
6656 && (outer_ctx->region_type & ORT_TASK) != 0
6657 && outer_ctx->combined_loop
6658 && splay_tree_lookup (outer_ctx->variables,
6659 (splay_tree_key) decl) == NULL)
6661 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6662 if (outer_ctx->outer_context)
6663 omp_notice_variable (outer_ctx->outer_context, decl, true);
6666 && (outer_ctx->region_type == ORT_WORKSHARE
6667 || outer_ctx->region_type == ORT_ACC)
6668 && outer_ctx->combined_loop
6669 && splay_tree_lookup (outer_ctx->variables,
6670 (splay_tree_key) decl) == NULL
6671 && !omp_check_private (outer_ctx, decl, false))
6673 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6674 if (outer_ctx->outer_context
6675 && (outer_ctx->outer_context->region_type
6676 == ORT_COMBINED_PARALLEL)
6677 && splay_tree_lookup (outer_ctx->outer_context->variables,
6678 (splay_tree_key) decl) == NULL)
6680 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6681 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6682 if (octx->outer_context)
6683 omp_notice_variable (octx->outer_context, decl, true);
6685 else if (outer_ctx->outer_context)
6686 omp_notice_variable (outer_ctx->outer_context, decl, true);
6689 case OMP_CLAUSE_REDUCTION:
6690 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6691 /* OpenACC permits reductions on private variables. */
6692 if (!(region_type & ORT_ACC))
6693 check_non_private = "reduction";
6694 decl = OMP_CLAUSE_DECL (c);
6695 if (TREE_CODE (decl) == MEM_REF)
6697 tree type = TREE_TYPE (decl);
6698 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6699 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6704 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6707 omp_firstprivatize_variable (ctx, v);
6708 omp_notice_variable (ctx, v, true);
6710 decl = TREE_OPERAND (decl, 0);
6711 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6713 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6714 NULL, is_gimple_val, fb_rvalue)
6720 v = TREE_OPERAND (decl, 1);
6723 omp_firstprivatize_variable (ctx, v);
6724 omp_notice_variable (ctx, v, true);
6726 decl = TREE_OPERAND (decl, 0);
6728 if (TREE_CODE (decl) == ADDR_EXPR
6729 || TREE_CODE (decl) == INDIRECT_REF)
6730 decl = TREE_OPERAND (decl, 0);
6733 case OMP_CLAUSE_LINEAR:
6734 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6735 is_gimple_val, fb_rvalue) == GS_ERROR)
6742 if (code == OMP_SIMD
6743 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6745 struct gimplify_omp_ctx *octx = outer_ctx;
6747 && octx->region_type == ORT_WORKSHARE
6748 && octx->combined_loop
6749 && !octx->distribute)
6751 if (octx->outer_context
6752 && (octx->outer_context->region_type
6753 == ORT_COMBINED_PARALLEL))
6754 octx = octx->outer_context->outer_context;
6756 octx = octx->outer_context;
6759 && octx->region_type == ORT_WORKSHARE
6760 && octx->combined_loop
6762 && !lang_GNU_Fortran ())
6764 error_at (OMP_CLAUSE_LOCATION (c),
6765 "%<linear%> clause for variable other than "
6766 "loop iterator specified on construct "
6767 "combined with %<distribute%>");
6772 /* For combined #pragma omp parallel for simd, need to put
6773 lastprivate and perhaps firstprivate too on the
6774 parallel. Similarly for #pragma omp for simd. */
6775 struct gimplify_omp_ctx *octx = outer_ctx;
6777 if (omp_no_lastprivate (ctx))
6778 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6781 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6782 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6784 decl = OMP_CLAUSE_DECL (c);
6785 if (error_operand_p (decl))
6791 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6792 flags |= GOVD_FIRSTPRIVATE;
6793 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6794 flags |= GOVD_LASTPRIVATE;
6796 && octx->region_type == ORT_WORKSHARE
6797 && octx->combined_loop)
6799 if (octx->outer_context
6800 && (octx->outer_context->region_type
6801 == ORT_COMBINED_PARALLEL))
6802 octx = octx->outer_context;
6803 else if (omp_check_private (octx, decl, false))
6807 && (octx->region_type & ORT_TASK) != 0
6808 && octx->combined_loop)
6811 && octx->region_type == ORT_COMBINED_PARALLEL
6812 && ctx->region_type == ORT_WORKSHARE
6813 && octx == outer_ctx)
6814 flags = GOVD_SEEN | GOVD_SHARED;
6816 && octx->region_type == ORT_COMBINED_TEAMS)
6817 flags = GOVD_SEEN | GOVD_SHARED;
6819 && octx->region_type == ORT_COMBINED_TARGET)
6821 flags &= ~GOVD_LASTPRIVATE;
6822 if (flags == GOVD_SEEN)
6828 = splay_tree_lookup (octx->variables,
6829 (splay_tree_key) decl);
6830 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6835 omp_add_variable (octx, decl, flags);
6836 if (octx->outer_context == NULL)
6838 octx = octx->outer_context;
6843 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6844 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6845 omp_notice_variable (octx, decl, true);
6847 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6848 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6849 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6851 notice_outer = false;
6852 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6856 case OMP_CLAUSE_MAP:
6857 decl = OMP_CLAUSE_DECL (c);
6858 if (error_operand_p (decl))
6864 case OMP_TARGET_DATA:
6865 case OMP_TARGET_ENTER_DATA:
6866 case OMP_TARGET_EXIT_DATA:
6867 case OACC_HOST_DATA:
6868 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6869 || (OMP_CLAUSE_MAP_KIND (c)
6870 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6871 /* For target {,enter ,exit }data only the array slice is
6872 mapped, but not the pointer to it. */
6880 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6882 struct gimplify_omp_ctx *octx;
6883 for (octx = outer_ctx; octx; octx = octx->outer_context)
6885 if (octx->region_type != ORT_ACC_HOST_DATA)
6888 = splay_tree_lookup (octx->variables,
6889 (splay_tree_key) decl);
6891 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6892 "declared in enclosing %<host_data%> region",
6896 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6897 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6898 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6899 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6900 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6905 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6906 || (OMP_CLAUSE_MAP_KIND (c)
6907 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6908 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6911 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6912 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6913 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6918 if (TREE_CODE (d) == ARRAY_REF)
6920 while (TREE_CODE (d) == ARRAY_REF)
6921 d = TREE_OPERAND (d, 0);
6922 if (TREE_CODE (d) == COMPONENT_REF
6923 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6926 pd = &OMP_CLAUSE_DECL (c);
6928 && TREE_CODE (decl) == INDIRECT_REF
6929 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6930 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6933 pd = &TREE_OPERAND (decl, 0);
6934 decl = TREE_OPERAND (decl, 0);
6936 if (TREE_CODE (decl) == COMPONENT_REF)
6938 while (TREE_CODE (decl) == COMPONENT_REF)
6939 decl = TREE_OPERAND (decl, 0);
6940 if (TREE_CODE (decl) == INDIRECT_REF
6941 && DECL_P (TREE_OPERAND (decl, 0))
6942 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6944 decl = TREE_OPERAND (decl, 0);
6946 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6954 if (error_operand_p (decl))
6960 tree stype = TREE_TYPE (decl);
6961 if (TREE_CODE (stype) == REFERENCE_TYPE)
6962 stype = TREE_TYPE (stype);
6963 if (TYPE_SIZE_UNIT (stype) == NULL
6964 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
6966 error_at (OMP_CLAUSE_LOCATION (c),
6967 "mapping field %qE of variable length "
6968 "structure", OMP_CLAUSE_DECL (c));
6973 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6975 /* Error recovery. */
6976 if (prev_list_p == NULL)
6981 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6983 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6984 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6993 HOST_WIDE_INT bitsize, bitpos;
6995 int unsignedp, reversep, volatilep = 0;
6996 tree base = OMP_CLAUSE_DECL (c);
6997 while (TREE_CODE (base) == ARRAY_REF)
6998 base = TREE_OPERAND (base, 0);
6999 if (TREE_CODE (base) == INDIRECT_REF)
7000 base = TREE_OPERAND (base, 0);
7001 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7002 &mode, &unsignedp, &reversep,
7004 tree orig_base = base;
7005 if ((TREE_CODE (base) == INDIRECT_REF
7006 || (TREE_CODE (base) == MEM_REF
7007 && integer_zerop (TREE_OPERAND (base, 1))))
7008 && DECL_P (TREE_OPERAND (base, 0))
7009 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7011 base = TREE_OPERAND (base, 0);
7012 gcc_assert (base == decl
7013 && (offset == NULL_TREE
7014 || TREE_CODE (offset) == INTEGER_CST));
7017 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7018 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7019 == GOMP_MAP_ALWAYS_POINTER);
7020 if (n == NULL || (n->value & GOVD_MAP) == 0)
7022 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7024 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7025 if (orig_base != base)
7026 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7028 OMP_CLAUSE_DECL (l) = decl;
7029 OMP_CLAUSE_SIZE (l) = size_int (1);
7030 if (struct_map_to_clause == NULL)
7031 struct_map_to_clause = new hash_map<tree, tree>;
7032 struct_map_to_clause->put (decl, l);
7035 enum gomp_map_kind mkind
7036 = code == OMP_TARGET_EXIT_DATA
7037 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7038 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7040 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7041 OMP_CLAUSE_DECL (c2)
7042 = unshare_expr (OMP_CLAUSE_DECL (c));
7043 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7044 OMP_CLAUSE_SIZE (c2)
7045 = TYPE_SIZE_UNIT (ptr_type_node);
7046 OMP_CLAUSE_CHAIN (l) = c2;
7047 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7049 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7051 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7053 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7054 OMP_CLAUSE_DECL (c3)
7055 = unshare_expr (OMP_CLAUSE_DECL (c4));
7056 OMP_CLAUSE_SIZE (c3)
7057 = TYPE_SIZE_UNIT (ptr_type_node);
7058 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7059 OMP_CLAUSE_CHAIN (c2) = c3;
7066 OMP_CLAUSE_CHAIN (l) = c;
7068 list_p = &OMP_CLAUSE_CHAIN (l);
7070 if (orig_base != base && code == OMP_TARGET)
7072 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7074 enum gomp_map_kind mkind
7075 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7076 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7077 OMP_CLAUSE_DECL (c2) = decl;
7078 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7079 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7080 OMP_CLAUSE_CHAIN (l) = c2;
7082 flags = GOVD_MAP | GOVD_EXPLICIT;
7083 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7089 tree *osc = struct_map_to_clause->get (decl);
7090 tree *sc = NULL, *scp = NULL;
7091 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7092 n->value |= GOVD_SEEN;
7095 o1 = wi::to_offset (offset);
7099 o1 = o1 + bitpos / BITS_PER_UNIT;
7100 sc = &OMP_CLAUSE_CHAIN (*osc);
7102 && (OMP_CLAUSE_MAP_KIND (*sc)
7103 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7104 sc = &OMP_CLAUSE_CHAIN (*sc);
7105 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7106 if (ptr && sc == prev_list_p)
7108 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7110 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7112 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7118 HOST_WIDE_INT bitsize2, bitpos2;
7119 base = OMP_CLAUSE_DECL (*sc);
7120 if (TREE_CODE (base) == ARRAY_REF)
7122 while (TREE_CODE (base) == ARRAY_REF)
7123 base = TREE_OPERAND (base, 0);
7124 if (TREE_CODE (base) != COMPONENT_REF
7125 || (TREE_CODE (TREE_TYPE (base))
7129 else if (TREE_CODE (base) == INDIRECT_REF
7130 && (TREE_CODE (TREE_OPERAND (base, 0))
7132 && (TREE_CODE (TREE_TYPE
7133 (TREE_OPERAND (base, 0)))
7135 base = TREE_OPERAND (base, 0);
7136 base = get_inner_reference (base, &bitsize2,
7139 &reversep, &volatilep,
7141 if ((TREE_CODE (base) == INDIRECT_REF
7142 || (TREE_CODE (base) == MEM_REF
7143 && integer_zerop (TREE_OPERAND (base,
7145 && DECL_P (TREE_OPERAND (base, 0))
7146 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7149 base = TREE_OPERAND (base, 0);
7154 gcc_assert (offset == NULL_TREE
7155 || TREE_CODE (offset) == INTEGER_CST);
7156 tree d1 = OMP_CLAUSE_DECL (*sc);
7157 tree d2 = OMP_CLAUSE_DECL (c);
7158 while (TREE_CODE (d1) == ARRAY_REF)
7159 d1 = TREE_OPERAND (d1, 0);
7160 while (TREE_CODE (d2) == ARRAY_REF)
7161 d2 = TREE_OPERAND (d2, 0);
7162 if (TREE_CODE (d1) == INDIRECT_REF)
7163 d1 = TREE_OPERAND (d1, 0);
7164 if (TREE_CODE (d2) == INDIRECT_REF)
7165 d2 = TREE_OPERAND (d2, 0);
7166 while (TREE_CODE (d1) == COMPONENT_REF)
7167 if (TREE_CODE (d2) == COMPONENT_REF
7168 && TREE_OPERAND (d1, 1)
7169 == TREE_OPERAND (d2, 1))
7171 d1 = TREE_OPERAND (d1, 0);
7172 d2 = TREE_OPERAND (d2, 0);
7178 error_at (OMP_CLAUSE_LOCATION (c),
7179 "%qE appears more than once in map "
7180 "clauses", OMP_CLAUSE_DECL (c));
7185 o2 = wi::to_offset (offset2);
7189 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7190 if (wi::ltu_p (o1, o2)
7191 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7201 OMP_CLAUSE_SIZE (*osc)
7202 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7206 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7208 tree cl = NULL_TREE;
7209 enum gomp_map_kind mkind
7210 = code == OMP_TARGET_EXIT_DATA
7211 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7212 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7213 OMP_CLAUSE_DECL (c2)
7214 = unshare_expr (OMP_CLAUSE_DECL (c));
7215 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7216 OMP_CLAUSE_SIZE (c2)
7217 = TYPE_SIZE_UNIT (ptr_type_node);
7218 cl = scp ? *prev_list_p : c2;
7219 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7221 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7223 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7225 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7226 OMP_CLAUSE_DECL (c3)
7227 = unshare_expr (OMP_CLAUSE_DECL (c4));
7228 OMP_CLAUSE_SIZE (c3)
7229 = TYPE_SIZE_UNIT (ptr_type_node);
7230 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7232 OMP_CLAUSE_CHAIN (c2) = c3;
7238 if (sc == prev_list_p)
7245 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7246 list_p = prev_list_p;
7248 OMP_CLAUSE_CHAIN (c) = *sc;
7255 *list_p = OMP_CLAUSE_CHAIN (c);
7256 OMP_CLAUSE_CHAIN (c) = *sc;
7263 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7264 && OMP_CLAUSE_CHAIN (c)
7265 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7266 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7267 == GOMP_MAP_ALWAYS_POINTER))
7268 prev_list_p = list_p;
7271 flags = GOVD_MAP | GOVD_EXPLICIT;
7272 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7273 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7274 flags |= GOVD_MAP_ALWAYS_TO;
7277 case OMP_CLAUSE_DEPEND:
7278 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7279 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7281 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7285 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7287 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7288 NULL, is_gimple_val, fb_rvalue);
7289 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7291 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7296 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7297 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7298 is_gimple_val, fb_rvalue) == GS_ERROR)
7306 case OMP_CLAUSE_FROM:
7307 case OMP_CLAUSE__CACHE_:
7308 decl = OMP_CLAUSE_DECL (c);
7309 if (error_operand_p (decl))
7314 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7315 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7316 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7317 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7318 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7325 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7326 NULL, is_gimple_lvalue, fb_lvalue)
7336 case OMP_CLAUSE_USE_DEVICE_PTR:
7337 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7339 case OMP_CLAUSE_IS_DEVICE_PTR:
7340 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7344 decl = OMP_CLAUSE_DECL (c);
7346 if (error_operand_p (decl))
7351 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7353 tree t = omp_member_access_dummy_var (decl);
7356 tree v = DECL_VALUE_EXPR (decl);
7357 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7359 omp_notice_variable (outer_ctx, t, true);
7362 omp_add_variable (ctx, decl, flags);
7363 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7364 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7366 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7367 GOVD_LOCAL | GOVD_SEEN);
7368 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7369 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7371 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7373 omp_add_variable (ctx,
7374 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7375 GOVD_LOCAL | GOVD_SEEN);
7376 gimplify_omp_ctxp = ctx;
7377 push_gimplify_context ();
7379 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7380 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7382 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7383 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7384 pop_gimplify_context
7385 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7386 push_gimplify_context ();
7387 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7388 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7389 pop_gimplify_context
7390 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7391 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7392 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7394 gimplify_omp_ctxp = outer_ctx;
7396 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7397 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7399 gimplify_omp_ctxp = ctx;
7400 push_gimplify_context ();
7401 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7403 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7405 TREE_SIDE_EFFECTS (bind) = 1;
7406 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7407 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7409 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7410 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7411 pop_gimplify_context
7412 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7413 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7415 gimplify_omp_ctxp = outer_ctx;
7417 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7418 && OMP_CLAUSE_LINEAR_STMT (c))
7420 gimplify_omp_ctxp = ctx;
7421 push_gimplify_context ();
7422 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7424 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7426 TREE_SIDE_EFFECTS (bind) = 1;
7427 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7428 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7430 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7431 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7432 pop_gimplify_context
7433 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7434 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7436 gimplify_omp_ctxp = outer_ctx;
7442 case OMP_CLAUSE_COPYIN:
7443 case OMP_CLAUSE_COPYPRIVATE:
7444 decl = OMP_CLAUSE_DECL (c);
7445 if (error_operand_p (decl))
7450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7452 && !omp_check_private (ctx, decl, true))
7455 if (is_global_var (decl))
7457 if (DECL_THREAD_LOCAL_P (decl))
7459 else if (DECL_HAS_VALUE_EXPR_P (decl))
7461 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7465 && DECL_THREAD_LOCAL_P (value))
7470 error_at (OMP_CLAUSE_LOCATION (c),
7471 "copyprivate variable %qE is not threadprivate"
7472 " or private in outer context", DECL_NAME (decl));
7476 omp_notice_variable (outer_ctx, decl, true);
7477 if (check_non_private
7478 && region_type == ORT_WORKSHARE
7479 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7480 || decl == OMP_CLAUSE_DECL (c)
7481 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7482 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7484 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7485 == POINTER_PLUS_EXPR
7486 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7487 (OMP_CLAUSE_DECL (c), 0), 0))
7489 && omp_check_private (ctx, decl, false))
7491 error ("%s variable %qE is private in outer context",
7492 check_non_private, DECL_NAME (decl));
7498 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7499 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7502 for (int i = 0; i < 2; i++)
7503 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7505 case OMP_PARALLEL: p[i] = "parallel"; break;
7506 case OMP_TASK: p[i] = "task"; break;
7507 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7508 case OMP_TARGET_DATA: p[i] = "target data"; break;
7509 case OMP_TARGET: p[i] = "target"; break;
7510 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7511 case OMP_TARGET_ENTER_DATA:
7512 p[i] = "target enter data"; break;
7513 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7514 default: gcc_unreachable ();
7516 error_at (OMP_CLAUSE_LOCATION (c),
7517 "expected %qs %<if%> clause modifier rather than %qs",
7523 case OMP_CLAUSE_FINAL:
7524 OMP_CLAUSE_OPERAND (c, 0)
7525 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7528 case OMP_CLAUSE_SCHEDULE:
7529 case OMP_CLAUSE_NUM_THREADS:
7530 case OMP_CLAUSE_NUM_TEAMS:
7531 case OMP_CLAUSE_THREAD_LIMIT:
7532 case OMP_CLAUSE_DIST_SCHEDULE:
7533 case OMP_CLAUSE_DEVICE:
7534 case OMP_CLAUSE_PRIORITY:
7535 case OMP_CLAUSE_GRAINSIZE:
7536 case OMP_CLAUSE_NUM_TASKS:
7537 case OMP_CLAUSE_HINT:
7538 case OMP_CLAUSE__CILK_FOR_COUNT_:
7539 case OMP_CLAUSE_ASYNC:
7540 case OMP_CLAUSE_WAIT:
7541 case OMP_CLAUSE_NUM_GANGS:
7542 case OMP_CLAUSE_NUM_WORKERS:
7543 case OMP_CLAUSE_VECTOR_LENGTH:
7544 case OMP_CLAUSE_WORKER:
7545 case OMP_CLAUSE_VECTOR:
7546 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7547 is_gimple_val, fb_rvalue) == GS_ERROR)
7551 case OMP_CLAUSE_GANG:
7552 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7553 is_gimple_val, fb_rvalue) == GS_ERROR)
7555 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7556 is_gimple_val, fb_rvalue) == GS_ERROR)
7560 case OMP_CLAUSE_TILE:
7561 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7562 list = TREE_CHAIN (list))
7564 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7565 is_gimple_val, fb_rvalue) == GS_ERROR)
7570 case OMP_CLAUSE_DEVICE_RESIDENT:
7574 case OMP_CLAUSE_NOWAIT:
7575 case OMP_CLAUSE_ORDERED:
7576 case OMP_CLAUSE_UNTIED:
7577 case OMP_CLAUSE_COLLAPSE:
7578 case OMP_CLAUSE_AUTO:
7579 case OMP_CLAUSE_SEQ:
7580 case OMP_CLAUSE_INDEPENDENT:
7581 case OMP_CLAUSE_MERGEABLE:
7582 case OMP_CLAUSE_PROC_BIND:
7583 case OMP_CLAUSE_SAFELEN:
7584 case OMP_CLAUSE_SIMDLEN:
7585 case OMP_CLAUSE_NOGROUP:
7586 case OMP_CLAUSE_THREADS:
7587 case OMP_CLAUSE_SIMD:
7590 case OMP_CLAUSE_DEFAULTMAP:
7591 ctx->target_map_scalars_firstprivate = false;
7594 case OMP_CLAUSE_ALIGNED:
7595 decl = OMP_CLAUSE_DECL (c);
7596 if (error_operand_p (decl))
7601 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7602 is_gimple_val, fb_rvalue) == GS_ERROR)
7607 if (!is_global_var (decl)
7608 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7609 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7612 case OMP_CLAUSE_DEFAULT:
7613 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7621 *list_p = OMP_CLAUSE_CHAIN (c);
7623 list_p = &OMP_CLAUSE_CHAIN (c);
7626 gimplify_omp_ctxp = ctx;
7627 if (struct_map_to_clause)
7628 delete struct_map_to_clause;
7631 /* Return true if DECL is a candidate for shared to firstprivate
7632 optimization. We only consider non-addressable scalars, not
7633 too big, and not references. */
7636 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7638 if (TREE_ADDRESSABLE (decl))
7640 tree type = TREE_TYPE (decl);
7641 if (!is_gimple_reg_type (type)
7642 || TREE_CODE (type) == REFERENCE_TYPE
7643 || TREE_ADDRESSABLE (type))
7645 /* Don't optimize too large decls, as each thread/task will have
7647 HOST_WIDE_INT len = int_size_in_bytes (type);
7648 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7650 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7655 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7656 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7657 GOVD_WRITTEN in outer contexts. */
7660 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7662 for (; ctx; ctx = ctx->outer_context)
7664 splay_tree_node n = splay_tree_lookup (ctx->variables,
7665 (splay_tree_key) decl);
7668 else if (n->value & GOVD_SHARED)
7670 n->value |= GOVD_WRITTEN;
7673 else if (n->value & GOVD_DATA_SHARE_CLASS)
7678 /* Helper callback for walk_gimple_seq to discover possible stores
7679 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7680 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7684 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7686 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7695 if (handled_component_p (op))
7696 op = TREE_OPERAND (op, 0);
7697 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7698 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7699 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7704 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7707 omp_mark_stores (gimplify_omp_ctxp, op);
7711 /* Helper callback for walk_gimple_seq to discover possible stores
7712 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7713 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7717 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7718 bool *handled_ops_p,
7719 struct walk_stmt_info *wi)
7721 gimple *stmt = gsi_stmt (*gsi_p);
7722 switch (gimple_code (stmt))
7724 /* Don't recurse on OpenMP constructs for which
7725 gimplify_adjust_omp_clauses already handled the bodies,
7726 except handle gimple_omp_for_pre_body. */
7727 case GIMPLE_OMP_FOR:
7728 *handled_ops_p = true;
7729 if (gimple_omp_for_pre_body (stmt))
7730 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7731 omp_find_stores_stmt, omp_find_stores_op, wi);
7733 case GIMPLE_OMP_PARALLEL:
7734 case GIMPLE_OMP_TASK:
7735 case GIMPLE_OMP_SECTIONS:
7736 case GIMPLE_OMP_SINGLE:
7737 case GIMPLE_OMP_TARGET:
7738 case GIMPLE_OMP_TEAMS:
7739 case GIMPLE_OMP_CRITICAL:
7740 *handled_ops_p = true;
7748 struct gimplify_adjust_omp_clauses_data
7754 /* For all variables that were not actually used within the context,
7755 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7758 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7760 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7762 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7763 tree decl = (tree) n->key;
7764 unsigned flags = n->value;
7765 enum omp_clause_code code;
7769 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7771 if ((flags & GOVD_SEEN) == 0)
7773 if (flags & GOVD_DEBUG_PRIVATE)
7775 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7776 private_debug = true;
7778 else if (flags & GOVD_MAP)
7779 private_debug = false;
7782 = lang_hooks.decls.omp_private_debug_clause (decl,
7783 !!(flags & GOVD_SHARED));
7785 code = OMP_CLAUSE_PRIVATE;
7786 else if (flags & GOVD_MAP)
7787 code = OMP_CLAUSE_MAP;
7788 else if (flags & GOVD_SHARED)
7790 if (is_global_var (decl))
7792 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7796 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7797 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7798 | GOVD_PRIVATE | GOVD_REDUCTION
7799 | GOVD_LINEAR | GOVD_MAP)) != 0)
7801 ctx = ctx->outer_context;
7806 code = OMP_CLAUSE_SHARED;
7808 else if (flags & GOVD_PRIVATE)
7809 code = OMP_CLAUSE_PRIVATE;
7810 else if (flags & GOVD_FIRSTPRIVATE)
7811 code = OMP_CLAUSE_FIRSTPRIVATE;
7812 else if (flags & GOVD_LASTPRIVATE)
7813 code = OMP_CLAUSE_LASTPRIVATE;
7814 else if (flags & GOVD_ALIGNED)
7819 if (((flags & GOVD_LASTPRIVATE)
7820 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7821 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7822 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7824 tree chain = *list_p;
7825 clause = build_omp_clause (input_location, code);
7826 OMP_CLAUSE_DECL (clause) = decl;
7827 OMP_CLAUSE_CHAIN (clause) = chain;
7829 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7830 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7831 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7832 else if (code == OMP_CLAUSE_SHARED
7833 && (flags & GOVD_WRITTEN) == 0
7834 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7835 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7836 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
7837 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
7838 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7840 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7841 OMP_CLAUSE_DECL (nc) = decl;
7842 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7843 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7844 OMP_CLAUSE_DECL (clause)
7845 = build_simple_mem_ref_loc (input_location, decl);
7846 OMP_CLAUSE_DECL (clause)
7847 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7848 build_int_cst (build_pointer_type (char_type_node), 0));
7849 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7850 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7851 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7852 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7853 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7854 OMP_CLAUSE_CHAIN (nc) = chain;
7855 OMP_CLAUSE_CHAIN (clause) = nc;
7856 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7857 gimplify_omp_ctxp = ctx->outer_context;
7858 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7859 pre_p, NULL, is_gimple_val, fb_rvalue);
7860 gimplify_omp_ctxp = ctx;
7862 else if (code == OMP_CLAUSE_MAP)
7864 int kind = (flags & GOVD_MAP_TO_ONLY
7867 if (flags & GOVD_MAP_FORCE)
7868 kind |= GOMP_MAP_FLAG_FORCE;
7869 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7870 if (DECL_SIZE (decl)
7871 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7873 tree decl2 = DECL_VALUE_EXPR (decl);
7874 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7875 decl2 = TREE_OPERAND (decl2, 0);
7876 gcc_assert (DECL_P (decl2));
7877 tree mem = build_simple_mem_ref (decl2);
7878 OMP_CLAUSE_DECL (clause) = mem;
7879 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7880 if (gimplify_omp_ctxp->outer_context)
7882 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7883 omp_notice_variable (ctx, decl2, true);
7884 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7886 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7888 OMP_CLAUSE_DECL (nc) = decl;
7889 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7890 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7891 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7893 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7894 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7895 OMP_CLAUSE_CHAIN (clause) = nc;
7897 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7898 && lang_hooks.decls.omp_privatize_by_reference (decl))
7900 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7901 OMP_CLAUSE_SIZE (clause)
7902 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7903 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7904 gimplify_omp_ctxp = ctx->outer_context;
7905 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7906 pre_p, NULL, is_gimple_val, fb_rvalue);
7907 gimplify_omp_ctxp = ctx;
7908 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7910 OMP_CLAUSE_DECL (nc) = decl;
7911 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7912 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7913 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7914 OMP_CLAUSE_CHAIN (clause) = nc;
7917 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7919 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7921 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7922 OMP_CLAUSE_DECL (nc) = decl;
7923 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7924 OMP_CLAUSE_CHAIN (nc) = chain;
7925 OMP_CLAUSE_CHAIN (clause) = nc;
7926 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7927 gimplify_omp_ctxp = ctx->outer_context;
7928 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7929 gimplify_omp_ctxp = ctx;
7932 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7933 gimplify_omp_ctxp = ctx->outer_context;
7934 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7935 if (gimplify_omp_ctxp)
7936 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
7937 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
7938 && DECL_P (OMP_CLAUSE_SIZE (clause)))
7939 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
7941 gimplify_omp_ctxp = ctx;
7946 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7947 enum tree_code code)
7949 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7954 struct gimplify_omp_ctx *octx;
7955 for (octx = ctx; octx; octx = octx->outer_context)
7956 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7960 struct walk_stmt_info wi;
7961 memset (&wi, 0, sizeof (wi));
7962 walk_gimple_seq (body, omp_find_stores_stmt,
7963 omp_find_stores_op, &wi);
7966 while ((c = *list_p) != NULL)
7969 bool remove = false;
7971 switch (OMP_CLAUSE_CODE (c))
7973 case OMP_CLAUSE_PRIVATE:
7974 case OMP_CLAUSE_SHARED:
7975 case OMP_CLAUSE_FIRSTPRIVATE:
7976 case OMP_CLAUSE_LINEAR:
7977 decl = OMP_CLAUSE_DECL (c);
7978 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7979 remove = !(n->value & GOVD_SEEN);
7982 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7983 if ((n->value & GOVD_DEBUG_PRIVATE)
7984 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7986 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7987 || ((n->value & GOVD_DATA_SHARE_CLASS)
7989 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7990 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7992 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7993 && (n->value & GOVD_WRITTEN) == 0
7995 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7996 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7997 else if (DECL_P (decl)
7998 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7999 && (n->value & GOVD_WRITTEN) != 1)
8000 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8001 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8002 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8003 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8007 case OMP_CLAUSE_LASTPRIVATE:
8008 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8009 accurately reflect the presence of a FIRSTPRIVATE clause. */
8010 decl = OMP_CLAUSE_DECL (c);
8011 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8012 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8013 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8014 if (omp_no_lastprivate (ctx))
8016 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8019 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
8021 else if (code == OMP_DISTRIBUTE
8022 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8025 error_at (OMP_CLAUSE_LOCATION (c),
8026 "same variable used in %<firstprivate%> and "
8027 "%<lastprivate%> clauses on %<distribute%> "
8031 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8033 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8034 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8037 case OMP_CLAUSE_ALIGNED:
8038 decl = OMP_CLAUSE_DECL (c);
8039 if (!is_global_var (decl))
8041 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8042 remove = n == NULL || !(n->value & GOVD_SEEN);
8043 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8045 struct gimplify_omp_ctx *octx;
8047 && (n->value & (GOVD_DATA_SHARE_CLASS
8048 & ~GOVD_FIRSTPRIVATE)))
8051 for (octx = ctx->outer_context; octx;
8052 octx = octx->outer_context)
8054 n = splay_tree_lookup (octx->variables,
8055 (splay_tree_key) decl);
8058 if (n->value & GOVD_LOCAL)
8060 /* We have to avoid assigning a shared variable
8061 to itself when trying to add
8062 __builtin_assume_aligned. */
8063 if (n->value & GOVD_SHARED)
8071 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8073 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8074 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8079 case OMP_CLAUSE_MAP:
8080 if (code == OMP_TARGET_EXIT_DATA
8081 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8086 decl = OMP_CLAUSE_DECL (c);
8087 /* Data clasues associated with acc parallel reductions must be
8088 compatible with present_or_copy. Warn and adjust the clause
8089 if that is not the case. */
8090 if (ctx->region_type == ORT_ACC_PARALLEL)
8092 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8096 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8098 if (n && (n->value & GOVD_REDUCTION))
8100 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8102 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8103 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8104 && kind != GOMP_MAP_FORCE_PRESENT
8105 && kind != GOMP_MAP_POINTER)
8107 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8108 "incompatible data clause with reduction "
8109 "on %qE; promoting to present_or_copy",
8111 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8117 if ((ctx->region_type & ORT_TARGET) != 0
8118 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8120 if (TREE_CODE (decl) == INDIRECT_REF
8121 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8122 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8124 decl = TREE_OPERAND (decl, 0);
8125 if (TREE_CODE (decl) == COMPONENT_REF)
8127 while (TREE_CODE (decl) == COMPONENT_REF)
8128 decl = TREE_OPERAND (decl, 0);
8131 n = splay_tree_lookup (ctx->variables,
8132 (splay_tree_key) decl);
8133 if (!(n->value & GOVD_SEEN))
8140 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8141 if ((ctx->region_type & ORT_TARGET) != 0
8142 && !(n->value & GOVD_SEEN)
8143 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8144 && !lookup_attribute ("omp declare target link",
8145 DECL_ATTRIBUTES (decl)))
8148 /* For struct element mapping, if struct is never referenced
8149 in target block and none of the mapping has always modifier,
8150 remove all the struct element mappings, which immediately
8151 follow the GOMP_MAP_STRUCT map clause. */
8152 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8154 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8156 OMP_CLAUSE_CHAIN (c)
8157 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8160 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8161 && code == OMP_TARGET_EXIT_DATA)
8163 else if (DECL_SIZE (decl)
8164 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8165 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8166 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8167 && (OMP_CLAUSE_MAP_KIND (c)
8168 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8170 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8171 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8173 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8175 tree decl2 = DECL_VALUE_EXPR (decl);
8176 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8177 decl2 = TREE_OPERAND (decl2, 0);
8178 gcc_assert (DECL_P (decl2));
8179 tree mem = build_simple_mem_ref (decl2);
8180 OMP_CLAUSE_DECL (c) = mem;
8181 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8182 if (ctx->outer_context)
8184 omp_notice_variable (ctx->outer_context, decl2, true);
8185 omp_notice_variable (ctx->outer_context,
8186 OMP_CLAUSE_SIZE (c), true);
8188 if (((ctx->region_type & ORT_TARGET) != 0
8189 || !ctx->target_firstprivatize_array_bases)
8190 && ((n->value & GOVD_SEEN) == 0
8191 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8193 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8195 OMP_CLAUSE_DECL (nc) = decl;
8196 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8197 if (ctx->target_firstprivatize_array_bases)
8198 OMP_CLAUSE_SET_MAP_KIND (nc,
8199 GOMP_MAP_FIRSTPRIVATE_POINTER);
8201 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8202 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8203 OMP_CLAUSE_CHAIN (c) = nc;
8209 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8210 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8211 gcc_assert ((n->value & GOVD_SEEN) == 0
8212 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8218 case OMP_CLAUSE_FROM:
8219 case OMP_CLAUSE__CACHE_:
8220 decl = OMP_CLAUSE_DECL (c);
8223 if (DECL_SIZE (decl)
8224 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8226 tree decl2 = DECL_VALUE_EXPR (decl);
8227 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8228 decl2 = TREE_OPERAND (decl2, 0);
8229 gcc_assert (DECL_P (decl2));
8230 tree mem = build_simple_mem_ref (decl2);
8231 OMP_CLAUSE_DECL (c) = mem;
8232 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8233 if (ctx->outer_context)
8235 omp_notice_variable (ctx->outer_context, decl2, true);
8236 omp_notice_variable (ctx->outer_context,
8237 OMP_CLAUSE_SIZE (c), true);
8240 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8241 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8244 case OMP_CLAUSE_REDUCTION:
8245 decl = OMP_CLAUSE_DECL (c);
8246 /* OpenACC reductions need a present_or_copy data clause.
8247 Add one if necessary. Error is the reduction is private. */
8248 if (ctx->region_type == ORT_ACC_PARALLEL)
8250 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8251 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8252 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8253 "reduction on %qE", DECL_NAME (decl));
8254 else if ((n->value & GOVD_MAP) == 0)
8256 tree next = OMP_CLAUSE_CHAIN (c);
8257 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8258 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8259 OMP_CLAUSE_DECL (nc) = decl;
8260 OMP_CLAUSE_CHAIN (c) = nc;
8261 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8264 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8265 if (OMP_CLAUSE_CHAIN (nc) == NULL)
8267 nc = OMP_CLAUSE_CHAIN (nc);
8269 OMP_CLAUSE_CHAIN (nc) = next;
8270 n->value |= GOVD_MAP;
8274 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8275 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8277 case OMP_CLAUSE_COPYIN:
8278 case OMP_CLAUSE_COPYPRIVATE:
8280 case OMP_CLAUSE_NUM_THREADS:
8281 case OMP_CLAUSE_NUM_TEAMS:
8282 case OMP_CLAUSE_THREAD_LIMIT:
8283 case OMP_CLAUSE_DIST_SCHEDULE:
8284 case OMP_CLAUSE_DEVICE:
8285 case OMP_CLAUSE_SCHEDULE:
8286 case OMP_CLAUSE_NOWAIT:
8287 case OMP_CLAUSE_ORDERED:
8288 case OMP_CLAUSE_DEFAULT:
8289 case OMP_CLAUSE_UNTIED:
8290 case OMP_CLAUSE_COLLAPSE:
8291 case OMP_CLAUSE_FINAL:
8292 case OMP_CLAUSE_MERGEABLE:
8293 case OMP_CLAUSE_PROC_BIND:
8294 case OMP_CLAUSE_SAFELEN:
8295 case OMP_CLAUSE_SIMDLEN:
8296 case OMP_CLAUSE_DEPEND:
8297 case OMP_CLAUSE_PRIORITY:
8298 case OMP_CLAUSE_GRAINSIZE:
8299 case OMP_CLAUSE_NUM_TASKS:
8300 case OMP_CLAUSE_NOGROUP:
8301 case OMP_CLAUSE_THREADS:
8302 case OMP_CLAUSE_SIMD:
8303 case OMP_CLAUSE_HINT:
8304 case OMP_CLAUSE_DEFAULTMAP:
8305 case OMP_CLAUSE_USE_DEVICE_PTR:
8306 case OMP_CLAUSE_IS_DEVICE_PTR:
8307 case OMP_CLAUSE__CILK_FOR_COUNT_:
8308 case OMP_CLAUSE_ASYNC:
8309 case OMP_CLAUSE_WAIT:
8310 case OMP_CLAUSE_DEVICE_RESIDENT:
8311 case OMP_CLAUSE_INDEPENDENT:
8312 case OMP_CLAUSE_NUM_GANGS:
8313 case OMP_CLAUSE_NUM_WORKERS:
8314 case OMP_CLAUSE_VECTOR_LENGTH:
8315 case OMP_CLAUSE_GANG:
8316 case OMP_CLAUSE_WORKER:
8317 case OMP_CLAUSE_VECTOR:
8318 case OMP_CLAUSE_AUTO:
8319 case OMP_CLAUSE_SEQ:
8322 case OMP_CLAUSE_TILE:
8323 /* We're not yet making use of the information provided by OpenACC
8324 tile clauses. Discard these here, to simplify later middle end
8334 *list_p = OMP_CLAUSE_CHAIN (c);
8336 list_p = &OMP_CLAUSE_CHAIN (c);
8339 /* Add in any implicit data sharing. */
8340 struct gimplify_adjust_omp_clauses_data data;
8341 data.list_p = list_p;
8343 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8345 gimplify_omp_ctxp = ctx->outer_context;
8346 delete_omp_context (ctx);
8349 /* Gimplify OACC_CACHE. */
8352 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8354 tree expr = *expr_p;
8356 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8358 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8361 /* TODO: Do something sensible with this information. */
8363 *expr_p = NULL_TREE;
8366 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8367 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8368 kind. The entry kind will replace the one in CLAUSE, while the exit
8369 kind will be used in a new omp_clause and returned to the caller. */
8372 gimplify_oacc_declare_1 (tree clause)
8374 HOST_WIDE_INT kind, new_op;
8378 kind = OMP_CLAUSE_MAP_KIND (clause);
8382 case GOMP_MAP_ALLOC:
8383 case GOMP_MAP_FORCE_ALLOC:
8384 case GOMP_MAP_FORCE_TO:
8385 new_op = GOMP_MAP_DELETE;
8389 case GOMP_MAP_FORCE_FROM:
8390 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8391 new_op = GOMP_MAP_FORCE_FROM;
8395 case GOMP_MAP_FORCE_TOFROM:
8396 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8397 new_op = GOMP_MAP_FORCE_FROM;
8402 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8403 new_op = GOMP_MAP_FROM;
8407 case GOMP_MAP_TOFROM:
8408 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8409 new_op = GOMP_MAP_FROM;
8413 case GOMP_MAP_DEVICE_RESIDENT:
8414 case GOMP_MAP_FORCE_DEVICEPTR:
8415 case GOMP_MAP_FORCE_PRESENT:
8417 case GOMP_MAP_POINTER:
8428 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8429 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8430 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8436 /* Gimplify OACC_DECLARE. */
8439 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8441 tree expr = *expr_p;
8445 clauses = OACC_DECLARE_CLAUSES (expr);
8447 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8449 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8451 tree decl = OMP_CLAUSE_DECL (t);
8453 if (TREE_CODE (decl) == MEM_REF)
8456 if (TREE_CODE (decl) == VAR_DECL
8457 && !is_global_var (decl)
8458 && DECL_CONTEXT (decl) == current_function_decl)
8460 tree c = gimplify_oacc_declare_1 (t);
8463 if (oacc_declare_returns == NULL)
8464 oacc_declare_returns = new hash_map<tree, tree>;
8466 oacc_declare_returns->put (decl, c);
8470 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8473 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8476 gimplify_seq_add_stmt (pre_p, stmt);
8478 *expr_p = NULL_TREE;
8481 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8482 gimplification of the body, as well as scanning the body for used
8483 variables. We need to do this scan now, because variable-sized
8484 decls will be decomposed during gimplification. */
8487 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8489 tree expr = *expr_p;
8491 gimple_seq body = NULL;
8493 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8494 OMP_PARALLEL_COMBINED (expr)
8495 ? ORT_COMBINED_PARALLEL
8496 : ORT_PARALLEL, OMP_PARALLEL);
8498 push_gimplify_context ();
8500 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8501 if (gimple_code (g) == GIMPLE_BIND)
8502 pop_gimplify_context (g);
8504 pop_gimplify_context (NULL);
8506 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8509 g = gimple_build_omp_parallel (body,
8510 OMP_PARALLEL_CLAUSES (expr),
8511 NULL_TREE, NULL_TREE);
8512 if (OMP_PARALLEL_COMBINED (expr))
8513 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8514 gimplify_seq_add_stmt (pre_p, g);
8515 *expr_p = NULL_TREE;
8518 /* Gimplify the contents of an OMP_TASK statement. This involves
8519 gimplification of the body, as well as scanning the body for used
8520 variables. We need to do this scan now, because variable-sized
8521 decls will be decomposed during gimplification. */
8524 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8526 tree expr = *expr_p;
8528 gimple_seq body = NULL;
8530 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8531 find_omp_clause (OMP_TASK_CLAUSES (expr),
8533 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8535 push_gimplify_context ();
8537 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8538 if (gimple_code (g) == GIMPLE_BIND)
8539 pop_gimplify_context (g);
8541 pop_gimplify_context (NULL);
8543 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8546 g = gimple_build_omp_task (body,
8547 OMP_TASK_CLAUSES (expr),
8548 NULL_TREE, NULL_TREE,
8549 NULL_TREE, NULL_TREE, NULL_TREE);
8550 gimplify_seq_add_stmt (pre_p, g);
8551 *expr_p = NULL_TREE;
8554 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8555 with non-NULL OMP_FOR_INIT. */
8558 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8561 switch (TREE_CODE (*tp))
8567 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8571 case STATEMENT_LIST:
8581 /* Gimplify the gross structure of an OMP_FOR statement. */
8583 static enum gimplify_status
8584 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8586 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8587 enum gimplify_status ret = GS_ALL_DONE;
8588 enum gimplify_status tret;
8590 gimple_seq for_body, for_pre_body;
8592 bitmap has_decl_expr = NULL;
8593 enum omp_region_type ort = ORT_WORKSHARE;
8595 orig_for_stmt = for_stmt = *expr_p;
8597 switch (TREE_CODE (for_stmt))
8601 case OMP_DISTRIBUTE:
8607 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8608 ort = ORT_UNTIED_TASK;
8620 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8621 clause for the IV. */
8622 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8624 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8625 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8626 decl = TREE_OPERAND (t, 0);
8627 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8628 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8629 && OMP_CLAUSE_DECL (c) == decl)
8631 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8636 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8638 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8639 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8640 find_combined_omp_for, NULL, NULL);
8641 if (inner_for_stmt == NULL_TREE)
8643 gcc_assert (seen_error ());
8644 *expr_p = NULL_TREE;
8649 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8650 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8651 TREE_CODE (for_stmt));
8653 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8654 gimplify_omp_ctxp->distribute = true;
8656 /* Handle OMP_FOR_INIT. */
8657 for_pre_body = NULL;
8658 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8660 has_decl_expr = BITMAP_ALLOC (NULL);
8661 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8662 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8665 t = OMP_FOR_PRE_BODY (for_stmt);
8666 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8668 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8670 tree_stmt_iterator si;
8671 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8675 if (TREE_CODE (t) == DECL_EXPR
8676 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8677 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8681 if (OMP_FOR_PRE_BODY (for_stmt))
8683 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8684 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8687 struct gimplify_omp_ctx ctx;
8688 memset (&ctx, 0, sizeof (ctx));
8689 ctx.region_type = ORT_NONE;
8690 gimplify_omp_ctxp = &ctx;
8691 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8692 gimplify_omp_ctxp = NULL;
8695 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8697 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8698 for_stmt = inner_for_stmt;
8700 /* For taskloop, need to gimplify the start, end and step before the
8701 taskloop, outside of the taskloop omp context. */
8702 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8704 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8706 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8707 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8710 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8712 tree c = build_omp_clause (input_location,
8713 OMP_CLAUSE_FIRSTPRIVATE);
8714 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8715 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8716 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8719 /* Handle OMP_FOR_COND. */
8720 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8721 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8724 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8725 gimple_seq_empty_p (for_pre_body)
8726 ? pre_p : &for_pre_body, NULL);
8727 tree c = build_omp_clause (input_location,
8728 OMP_CLAUSE_FIRSTPRIVATE);
8729 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8730 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8731 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8734 /* Handle OMP_FOR_INCR. */
8735 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8736 if (TREE_CODE (t) == MODIFY_EXPR)
8738 decl = TREE_OPERAND (t, 0);
8739 t = TREE_OPERAND (t, 1);
8740 tree *tp = &TREE_OPERAND (t, 1);
8741 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8742 tp = &TREE_OPERAND (t, 0);
8744 if (!is_gimple_constant (*tp))
8746 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8747 ? pre_p : &for_pre_body;
8748 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8749 tree c = build_omp_clause (input_location,
8750 OMP_CLAUSE_FIRSTPRIVATE);
8751 OMP_CLAUSE_DECL (c) = *tp;
8752 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8753 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8758 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8762 if (orig_for_stmt != for_stmt)
8763 gimplify_omp_ctxp->combined_loop = true;
8766 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8767 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8768 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8769 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8771 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8772 bool is_doacross = false;
8773 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8776 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8777 (OMP_FOR_INIT (for_stmt))
8781 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8783 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8784 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8786 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8787 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8788 decl = TREE_OPERAND (t, 0);
8789 gcc_assert (DECL_P (decl));
8790 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8791 || POINTER_TYPE_P (TREE_TYPE (decl)));
8794 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8795 gimplify_omp_ctxp->loop_iter_var.quick_push
8796 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8798 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8799 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8802 /* Make sure the iteration variable is private. */
8804 tree c2 = NULL_TREE;
8805 if (orig_for_stmt != for_stmt)
8806 /* Do this only on innermost construct for combined ones. */;
8807 else if (ort == ORT_SIMD)
8809 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8810 (splay_tree_key) decl);
8811 omp_is_private (gimplify_omp_ctxp, decl,
8812 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8814 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8815 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8816 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8818 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8819 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8820 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8822 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8823 || omp_no_lastprivate (gimplify_omp_ctxp))
8825 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8826 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8828 struct gimplify_omp_ctx *outer
8829 = gimplify_omp_ctxp->outer_context;
8830 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8832 if (outer->region_type == ORT_WORKSHARE
8833 && outer->combined_loop)
8835 n = splay_tree_lookup (outer->variables,
8836 (splay_tree_key)decl);
8837 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8839 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8840 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8844 struct gimplify_omp_ctx *octx = outer->outer_context;
8846 && octx->region_type == ORT_COMBINED_PARALLEL
8847 && octx->outer_context
8848 && (octx->outer_context->region_type
8850 && octx->outer_context->combined_loop)
8852 octx = octx->outer_context;
8853 n = splay_tree_lookup (octx->variables,
8854 (splay_tree_key)decl);
8855 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8857 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8858 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8865 OMP_CLAUSE_DECL (c) = decl;
8866 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8867 OMP_FOR_CLAUSES (for_stmt) = c;
8868 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8869 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8871 if (outer->region_type == ORT_WORKSHARE
8872 && outer->combined_loop)
8874 if (outer->outer_context
8875 && (outer->outer_context->region_type
8876 == ORT_COMBINED_PARALLEL))
8877 outer = outer->outer_context;
8878 else if (omp_check_private (outer, decl, false))
8881 else if (((outer->region_type & ORT_TASK) != 0)
8882 && outer->combined_loop
8883 && !omp_check_private (gimplify_omp_ctxp,
8886 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8888 omp_notice_variable (outer, decl, true);
8893 n = splay_tree_lookup (outer->variables,
8894 (splay_tree_key)decl);
8895 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8897 omp_add_variable (outer, decl,
8898 GOVD_LASTPRIVATE | GOVD_SEEN);
8899 if (outer->region_type == ORT_COMBINED_PARALLEL
8900 && outer->outer_context
8901 && (outer->outer_context->region_type
8903 && outer->outer_context->combined_loop)
8905 outer = outer->outer_context;
8906 n = splay_tree_lookup (outer->variables,
8907 (splay_tree_key)decl);
8908 if (omp_check_private (outer, decl, false))
8911 || ((n->value & GOVD_DATA_SHARE_CLASS)
8913 omp_add_variable (outer, decl,
8919 if (outer && outer->outer_context
8920 && (outer->outer_context->region_type
8921 == ORT_COMBINED_TEAMS))
8923 outer = outer->outer_context;
8924 n = splay_tree_lookup (outer->variables,
8925 (splay_tree_key)decl);
8927 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8928 omp_add_variable (outer, decl,
8929 GOVD_SHARED | GOVD_SEEN);
8933 if (outer && outer->outer_context)
8934 omp_notice_variable (outer->outer_context, decl,
8944 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8945 && !omp_no_lastprivate (gimplify_omp_ctxp);
8946 struct gimplify_omp_ctx *outer
8947 = gimplify_omp_ctxp->outer_context;
8948 if (outer && lastprivate)
8950 if (outer->region_type == ORT_WORKSHARE
8951 && outer->combined_loop)
8953 n = splay_tree_lookup (outer->variables,
8954 (splay_tree_key)decl);
8955 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8957 lastprivate = false;
8960 else if (outer->outer_context
8961 && (outer->outer_context->region_type
8962 == ORT_COMBINED_PARALLEL))
8963 outer = outer->outer_context;
8964 else if (omp_check_private (outer, decl, false))
8967 else if (((outer->region_type & ORT_TASK) != 0)
8968 && outer->combined_loop
8969 && !omp_check_private (gimplify_omp_ctxp,
8972 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8974 omp_notice_variable (outer, decl, true);
8979 n = splay_tree_lookup (outer->variables,
8980 (splay_tree_key)decl);
8981 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8983 omp_add_variable (outer, decl,
8984 GOVD_LASTPRIVATE | GOVD_SEEN);
8985 if (outer->region_type == ORT_COMBINED_PARALLEL
8986 && outer->outer_context
8987 && (outer->outer_context->region_type
8989 && outer->outer_context->combined_loop)
8991 outer = outer->outer_context;
8992 n = splay_tree_lookup (outer->variables,
8993 (splay_tree_key)decl);
8994 if (omp_check_private (outer, decl, false))
8997 || ((n->value & GOVD_DATA_SHARE_CLASS)
8999 omp_add_variable (outer, decl,
9005 if (outer && outer->outer_context
9006 && (outer->outer_context->region_type
9007 == ORT_COMBINED_TEAMS))
9009 outer = outer->outer_context;
9010 n = splay_tree_lookup (outer->variables,
9011 (splay_tree_key)decl);
9013 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9014 omp_add_variable (outer, decl,
9015 GOVD_SHARED | GOVD_SEEN);
9019 if (outer && outer->outer_context)
9020 omp_notice_variable (outer->outer_context, decl,
9026 c = build_omp_clause (input_location,
9027 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9028 : OMP_CLAUSE_PRIVATE);
9029 OMP_CLAUSE_DECL (c) = decl;
9030 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9031 OMP_FOR_CLAUSES (for_stmt) = c;
9032 omp_add_variable (gimplify_omp_ctxp, decl,
9033 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9034 | GOVD_EXPLICIT | GOVD_SEEN);
9038 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9039 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9041 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9043 /* If DECL is not a gimple register, create a temporary variable to act
9044 as an iteration counter. This is valid, since DECL cannot be
9045 modified in the body of the loop. Similarly for any iteration vars
9046 in simd with collapse > 1 where the iterator vars must be
9048 if (orig_for_stmt != for_stmt)
9050 else if (!is_gimple_reg (decl)
9052 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9054 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9055 /* Make sure omp_add_variable is not called on it prematurely.
9056 We call it ourselves a few lines later. */
9057 gimplify_omp_ctxp = NULL;
9058 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9059 gimplify_omp_ctxp = ctx;
9060 TREE_OPERAND (t, 0) = var;
9062 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9065 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9067 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9068 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9069 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9070 OMP_CLAUSE_DECL (c2) = var;
9071 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9072 OMP_FOR_CLAUSES (for_stmt) = c2;
9073 omp_add_variable (gimplify_omp_ctxp, var,
9074 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9082 omp_add_variable (gimplify_omp_ctxp, var,
9083 GOVD_PRIVATE | GOVD_SEEN);
9088 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9089 is_gimple_val, fb_rvalue);
9090 ret = MIN (ret, tret);
9091 if (ret == GS_ERROR)
9094 /* Handle OMP_FOR_COND. */
9095 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9096 gcc_assert (COMPARISON_CLASS_P (t));
9097 gcc_assert (TREE_OPERAND (t, 0) == decl);
9099 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9100 is_gimple_val, fb_rvalue);
9101 ret = MIN (ret, tret);
9103 /* Handle OMP_FOR_INCR. */
9104 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9105 switch (TREE_CODE (t))
9107 case PREINCREMENT_EXPR:
9108 case POSTINCREMENT_EXPR:
9110 tree decl = TREE_OPERAND (t, 0);
9111 /* c_omp_for_incr_canonicalize_ptr() should have been
9112 called to massage things appropriately. */
9113 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9115 if (orig_for_stmt != for_stmt)
9117 t = build_int_cst (TREE_TYPE (decl), 1);
9119 OMP_CLAUSE_LINEAR_STEP (c) = t;
9120 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9121 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9122 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9126 case PREDECREMENT_EXPR:
9127 case POSTDECREMENT_EXPR:
9128 /* c_omp_for_incr_canonicalize_ptr() should have been
9129 called to massage things appropriately. */
9130 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9131 if (orig_for_stmt != for_stmt)
9133 t = build_int_cst (TREE_TYPE (decl), -1);
9135 OMP_CLAUSE_LINEAR_STEP (c) = t;
9136 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9137 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9138 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9142 gcc_assert (TREE_OPERAND (t, 0) == decl);
9143 TREE_OPERAND (t, 0) = var;
9145 t = TREE_OPERAND (t, 1);
9146 switch (TREE_CODE (t))
9149 if (TREE_OPERAND (t, 1) == decl)
9151 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9152 TREE_OPERAND (t, 0) = var;
9158 case POINTER_PLUS_EXPR:
9159 gcc_assert (TREE_OPERAND (t, 0) == decl);
9160 TREE_OPERAND (t, 0) = var;
9166 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9167 is_gimple_val, fb_rvalue);
9168 ret = MIN (ret, tret);
9171 tree step = TREE_OPERAND (t, 1);
9172 tree stept = TREE_TYPE (decl);
9173 if (POINTER_TYPE_P (stept))
9175 step = fold_convert (stept, step);
9176 if (TREE_CODE (t) == MINUS_EXPR)
9177 step = fold_build1 (NEGATE_EXPR, stept, step);
9178 OMP_CLAUSE_LINEAR_STEP (c) = step;
9179 if (step != TREE_OPERAND (t, 1))
9181 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9182 &for_pre_body, NULL,
9183 is_gimple_val, fb_rvalue);
9184 ret = MIN (ret, tret);
9196 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9199 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
9201 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9202 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9203 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9204 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9205 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9206 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9207 && OMP_CLAUSE_DECL (c) == decl)
9209 if (is_doacross && (collapse == 1 || i >= collapse))
9213 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9214 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9215 gcc_assert (TREE_OPERAND (t, 0) == var);
9216 t = TREE_OPERAND (t, 1);
9217 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9218 || TREE_CODE (t) == MINUS_EXPR
9219 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9220 gcc_assert (TREE_OPERAND (t, 0) == var);
9221 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9222 is_doacross ? var : decl,
9223 TREE_OPERAND (t, 1));
9226 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9227 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9229 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9230 gimplify_assign (decl, t, seq);
9235 BITMAP_FREE (has_decl_expr);
9237 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9239 push_gimplify_context ();
9240 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9242 OMP_FOR_BODY (orig_for_stmt)
9243 = build3 (BIND_EXPR, void_type_node, NULL,
9244 OMP_FOR_BODY (orig_for_stmt), NULL);
9245 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9249 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9252 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9254 if (gimple_code (g) == GIMPLE_BIND)
9255 pop_gimplify_context (g);
9257 pop_gimplify_context (NULL);
9260 if (orig_for_stmt != for_stmt)
9261 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9263 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9264 decl = TREE_OPERAND (t, 0);
9265 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9266 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9267 gimplify_omp_ctxp = ctx->outer_context;
9268 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9269 gimplify_omp_ctxp = ctx;
9270 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9271 TREE_OPERAND (t, 0) = var;
9272 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9273 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9274 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9277 gimplify_adjust_omp_clauses (pre_p, for_body,
9278 &OMP_FOR_CLAUSES (orig_for_stmt),
9279 TREE_CODE (orig_for_stmt));
9282 switch (TREE_CODE (orig_for_stmt))
9284 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9285 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9286 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9287 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9288 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9289 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9290 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9294 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9295 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9297 if (orig_for_stmt != for_stmt)
9298 gimple_omp_for_set_combined_p (gfor, true);
9299 if (gimplify_omp_ctxp
9300 && (gimplify_omp_ctxp->combined_loop
9301 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9302 && gimplify_omp_ctxp->outer_context
9303 && gimplify_omp_ctxp->outer_context->combined_loop)))
9305 gimple_omp_for_set_combined_into_p (gfor, true);
9306 if (gimplify_omp_ctxp->combined_loop)
9307 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9309 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9312 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9314 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9315 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9316 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9317 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9318 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9319 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9320 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9321 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9324 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9325 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9326 The outer taskloop stands for computing the number of iterations,
9327 counts for collapsed loops and holding taskloop specific clauses.
9328 The task construct stands for the effect of data sharing on the
9329 explicit task it creates and the inner taskloop stands for expansion
9330 of the static loop inside of the explicit task construct. */
9331 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9333 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9334 tree task_clauses = NULL_TREE;
9335 tree c = *gfor_clauses_ptr;
9336 tree *gtask_clauses_ptr = &task_clauses;
9337 tree outer_for_clauses = NULL_TREE;
9338 tree *gforo_clauses_ptr = &outer_for_clauses;
9339 for (; c; c = OMP_CLAUSE_CHAIN (c))
9340 switch (OMP_CLAUSE_CODE (c))
9342 /* These clauses are allowed on task, move them there. */
9343 case OMP_CLAUSE_SHARED:
9344 case OMP_CLAUSE_FIRSTPRIVATE:
9345 case OMP_CLAUSE_DEFAULT:
9347 case OMP_CLAUSE_UNTIED:
9348 case OMP_CLAUSE_FINAL:
9349 case OMP_CLAUSE_MERGEABLE:
9350 case OMP_CLAUSE_PRIORITY:
9351 *gtask_clauses_ptr = c;
9352 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9354 case OMP_CLAUSE_PRIVATE:
9355 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9357 /* We want private on outer for and firstprivate
9360 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9361 OMP_CLAUSE_FIRSTPRIVATE);
9362 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9363 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9364 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9365 *gforo_clauses_ptr = c;
9366 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9370 *gtask_clauses_ptr = c;
9371 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9374 /* These clauses go into outer taskloop clauses. */
9375 case OMP_CLAUSE_GRAINSIZE:
9376 case OMP_CLAUSE_NUM_TASKS:
9377 case OMP_CLAUSE_NOGROUP:
9378 *gforo_clauses_ptr = c;
9379 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9381 /* Taskloop clause we duplicate on both taskloops. */
9382 case OMP_CLAUSE_COLLAPSE:
9383 *gfor_clauses_ptr = c;
9384 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9385 *gforo_clauses_ptr = copy_node (c);
9386 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9388 /* For lastprivate, keep the clause on inner taskloop, and add
9389 a shared clause on task. If the same decl is also firstprivate,
9390 add also firstprivate clause on the inner taskloop. */
9391 case OMP_CLAUSE_LASTPRIVATE:
9392 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9394 /* For taskloop C++ lastprivate IVs, we want:
9395 1) private on outer taskloop
9396 2) firstprivate and shared on task
9397 3) lastprivate on inner taskloop */
9399 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9400 OMP_CLAUSE_FIRSTPRIVATE);
9401 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9402 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9403 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9404 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9405 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9406 OMP_CLAUSE_PRIVATE);
9407 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9408 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9409 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9410 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9412 *gfor_clauses_ptr = c;
9413 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9415 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9416 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9417 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9418 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9420 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9425 *gfor_clauses_ptr = NULL_TREE;
9426 *gtask_clauses_ptr = NULL_TREE;
9427 *gforo_clauses_ptr = NULL_TREE;
9428 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9429 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9430 NULL_TREE, NULL_TREE, NULL_TREE);
9431 gimple_omp_task_set_taskloop_p (g, true);
9432 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9434 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9435 gimple_omp_for_collapse (gfor),
9436 gimple_omp_for_pre_body (gfor));
9437 gimple_omp_for_set_pre_body (gfor, NULL);
9438 gimple_omp_for_set_combined_p (gforo, true);
9439 gimple_omp_for_set_combined_into_p (gfor, true);
9440 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9442 t = unshare_expr (gimple_omp_for_index (gfor, i));
9443 gimple_omp_for_set_index (gforo, i, t);
9444 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9445 gimple_omp_for_set_initial (gforo, i, t);
9446 gimple_omp_for_set_cond (gforo, i,
9447 gimple_omp_for_cond (gfor, i));
9448 t = unshare_expr (gimple_omp_for_final (gfor, i));
9449 gimple_omp_for_set_final (gforo, i, t);
9450 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9451 gimple_omp_for_set_incr (gforo, i, t);
9453 gimplify_seq_add_stmt (pre_p, gforo);
9456 gimplify_seq_add_stmt (pre_p, gfor);
9457 if (ret != GS_ALL_DONE)
9459 *expr_p = NULL_TREE;
9463 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9464 of OMP_TARGET's body. */
9467 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9470 switch (TREE_CODE (*tp))
9475 case STATEMENT_LIST:
9484 /* Helper function of optimize_target_teams, determine if the expression
9485 can be computed safely before the target construct on the host. */
9488 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9497 switch (TREE_CODE (*tp))
9503 if (error_operand_p (*tp)
9504 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9505 || DECL_HAS_VALUE_EXPR_P (*tp)
9506 || DECL_THREAD_LOCAL_P (*tp)
9507 || TREE_SIDE_EFFECTS (*tp)
9508 || TREE_THIS_VOLATILE (*tp))
9510 if (is_global_var (*tp)
9511 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9512 || lookup_attribute ("omp declare target link",
9513 DECL_ATTRIBUTES (*tp))))
9515 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9516 (splay_tree_key) *tp);
9519 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9523 else if (n->value & GOVD_LOCAL)
9525 else if (n->value & GOVD_FIRSTPRIVATE)
9527 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9528 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9532 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9536 if (TARGET_EXPR_INITIAL (*tp)
9537 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9539 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9540 walk_subtrees, NULL);
9541 /* Allow some reasonable subset of integral arithmetics. */
9545 case TRUNC_DIV_EXPR:
9547 case FLOOR_DIV_EXPR:
9548 case ROUND_DIV_EXPR:
9549 case TRUNC_MOD_EXPR:
9551 case FLOOR_MOD_EXPR:
9552 case ROUND_MOD_EXPR:
9554 case EXACT_DIV_EXPR:
9565 case NON_LVALUE_EXPR:
9567 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9570 /* And disallow anything else, except for comparisons. */
9572 if (COMPARISON_CLASS_P (*tp))
9578 /* Try to determine if the num_teams and/or thread_limit expressions
9579 can have their values determined already before entering the
9581 INTEGER_CSTs trivially are,
9582 integral decls that are firstprivate (explicitly or implicitly)
9583 or explicitly map(always, to:) or map(always, tofrom:) on the target
9584 region too, and expressions involving simple arithmetics on those
9585 too, function calls are not ok, dereferencing something neither etc.
9586 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9587 EXPR based on what we find:
9588 0 stands for clause not specified at all, use implementation default
9589 -1 stands for value that can't be determined easily before entering
9590 the target construct.
9591 If teams construct is not present at all, use 1 for num_teams
9592 and 0 for thread_limit (only one team is involved, and the thread
9593 limit is implementation defined. */
9596 optimize_target_teams (tree target, gimple_seq *pre_p)
9598 tree body = OMP_BODY (target);
9599 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9600 tree num_teams = integer_zero_node;
9601 tree thread_limit = integer_zero_node;
9602 location_t num_teams_loc = EXPR_LOCATION (target);
9603 location_t thread_limit_loc = EXPR_LOCATION (target);
9605 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9607 if (teams == NULL_TREE)
9608 num_teams = integer_one_node;
9610 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9612 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9615 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9617 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9620 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9624 expr = OMP_CLAUSE_OPERAND (c, 0);
9625 if (TREE_CODE (expr) == INTEGER_CST)
9630 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9632 *p = integer_minus_one_node;
9636 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9637 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9640 gimplify_omp_ctxp = target_ctx;
9641 *p = integer_minus_one_node;
9644 gimplify_omp_ctxp = target_ctx;
9645 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9646 OMP_CLAUSE_OPERAND (c, 0) = *p;
9648 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9649 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9650 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9651 OMP_TARGET_CLAUSES (target) = c;
9652 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9653 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9654 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9655 OMP_TARGET_CLAUSES (target) = c;
9658 /* Gimplify the gross structure of several OMP constructs. */
9661 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9663 tree expr = *expr_p;
9665 gimple_seq body = NULL;
9666 enum omp_region_type ort;
9668 switch (TREE_CODE (expr))
9672 ort = ORT_WORKSHARE;
9675 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9678 ort = ORT_ACC_KERNELS;
9681 ort = ORT_ACC_PARALLEL;
9686 case OMP_TARGET_DATA:
9687 ort = ORT_TARGET_DATA;
9690 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9692 case OACC_HOST_DATA:
9693 ort = ORT_ACC_HOST_DATA;
9698 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9700 if (TREE_CODE (expr) == OMP_TARGET)
9701 optimize_target_teams (expr, pre_p);
9702 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9704 push_gimplify_context ();
9705 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9706 if (gimple_code (g) == GIMPLE_BIND)
9707 pop_gimplify_context (g);
9709 pop_gimplify_context (NULL);
9710 if ((ort & ORT_TARGET_DATA) != 0)
9712 enum built_in_function end_ix;
9713 switch (TREE_CODE (expr))
9716 case OACC_HOST_DATA:
9717 end_ix = BUILT_IN_GOACC_DATA_END;
9719 case OMP_TARGET_DATA:
9720 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9725 tree fn = builtin_decl_explicit (end_ix);
9726 g = gimple_build_call (fn, 0);
9727 gimple_seq cleanup = NULL;
9728 gimple_seq_add_stmt (&cleanup, g);
9729 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9731 gimple_seq_add_stmt (&body, g);
9735 gimplify_and_add (OMP_BODY (expr), &body);
9736 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9739 switch (TREE_CODE (expr))
9742 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9743 OMP_CLAUSES (expr));
9746 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9747 OMP_CLAUSES (expr));
9749 case OACC_HOST_DATA:
9750 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9751 OMP_CLAUSES (expr));
9754 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9755 OMP_CLAUSES (expr));
9758 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9761 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9764 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9765 OMP_CLAUSES (expr));
9767 case OMP_TARGET_DATA:
9768 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9769 OMP_CLAUSES (expr));
9772 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9778 gimplify_seq_add_stmt (pre_p, stmt);
9779 *expr_p = NULL_TREE;
9782 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9783 target update constructs. */
9786 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9788 tree expr = *expr_p;
9791 enum omp_region_type ort = ORT_WORKSHARE;
9793 switch (TREE_CODE (expr))
9795 case OACC_ENTER_DATA:
9796 case OACC_EXIT_DATA:
9797 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9801 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9804 case OMP_TARGET_UPDATE:
9805 kind = GF_OMP_TARGET_KIND_UPDATE;
9807 case OMP_TARGET_ENTER_DATA:
9808 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9810 case OMP_TARGET_EXIT_DATA:
9811 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9816 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9817 ort, TREE_CODE (expr));
9818 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9820 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9822 gimplify_seq_add_stmt (pre_p, stmt);
9823 *expr_p = NULL_TREE;
9826 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9827 stabilized the lhs of the atomic operation as *ADDR. Return true if
9828 EXPR is this stabilized form. */
9831 goa_lhs_expr_p (tree expr, tree addr)
9833 /* Also include casts to other type variants. The C front end is fond
9834 of adding these for e.g. volatile variables. This is like
9835 STRIP_TYPE_NOPS but includes the main variant lookup. */
9836 STRIP_USELESS_TYPE_CONVERSION (expr);
9838 if (TREE_CODE (expr) == INDIRECT_REF)
9840 expr = TREE_OPERAND (expr, 0);
9842 && (CONVERT_EXPR_P (expr)
9843 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9844 && TREE_CODE (expr) == TREE_CODE (addr)
9845 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9847 expr = TREE_OPERAND (expr, 0);
9848 addr = TREE_OPERAND (addr, 0);
9852 return (TREE_CODE (addr) == ADDR_EXPR
9853 && TREE_CODE (expr) == ADDR_EXPR
9854 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9856 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9861 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9862 expression does not involve the lhs, evaluate it into a temporary.
9863 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9864 or -1 if an error was encountered. */
9867 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9870 tree expr = *expr_p;
9873 if (goa_lhs_expr_p (expr, lhs_addr))
9878 if (is_gimple_val (expr))
9882 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9885 case tcc_comparison:
9886 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9889 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9892 case tcc_expression:
9893 switch (TREE_CODE (expr))
9895 case TRUTH_ANDIF_EXPR:
9896 case TRUTH_ORIF_EXPR:
9897 case TRUTH_AND_EXPR:
9899 case TRUTH_XOR_EXPR:
9900 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9902 case TRUTH_NOT_EXPR:
9903 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9907 /* Break out any preevaluations from cp_build_modify_expr. */
9908 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9909 expr = TREE_OPERAND (expr, 1))
9910 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9912 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9923 enum gimplify_status gs;
9924 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9925 if (gs != GS_ALL_DONE)
9932 /* Gimplify an OMP_ATOMIC statement. */
9934 static enum gimplify_status
9935 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9937 tree addr = TREE_OPERAND (*expr_p, 0);
9938 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9939 ? NULL : TREE_OPERAND (*expr_p, 1);
9940 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9942 gomp_atomic_load *loadstmt;
9943 gomp_atomic_store *storestmt;
9945 tmp_load = create_tmp_reg (type);
9946 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9949 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9953 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9954 gimplify_seq_add_stmt (pre_p, loadstmt);
9955 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9959 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9961 storestmt = gimple_build_omp_atomic_store (rhs);
9962 gimplify_seq_add_stmt (pre_p, storestmt);
9963 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9965 gimple_omp_atomic_set_seq_cst (loadstmt);
9966 gimple_omp_atomic_set_seq_cst (storestmt);
9968 switch (TREE_CODE (*expr_p))
9970 case OMP_ATOMIC_READ:
9971 case OMP_ATOMIC_CAPTURE_OLD:
9973 gimple_omp_atomic_set_need_value (loadstmt);
9975 case OMP_ATOMIC_CAPTURE_NEW:
9977 gimple_omp_atomic_set_need_value (storestmt);
9987 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9988 body, and adding some EH bits. */
9990 static enum gimplify_status
9991 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9993 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9995 gtransaction *trans_stmt;
9996 gimple_seq body = NULL;
9999 /* Wrap the transaction body in a BIND_EXPR so we have a context
10000 where to put decls for OMP. */
10001 if (TREE_CODE (tbody) != BIND_EXPR)
10003 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10004 TREE_SIDE_EFFECTS (bind) = 1;
10005 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10006 TRANSACTION_EXPR_BODY (expr) = bind;
10009 push_gimplify_context ();
10010 temp = voidify_wrapper_expr (*expr_p, NULL);
10012 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10013 pop_gimplify_context (body_stmt);
10015 trans_stmt = gimple_build_transaction (body);
10016 if (TRANSACTION_EXPR_OUTER (expr))
10017 subcode = GTMA_IS_OUTER;
10018 else if (TRANSACTION_EXPR_RELAXED (expr))
10019 subcode = GTMA_IS_RELAXED;
10020 gimple_transaction_set_subcode (trans_stmt, subcode);
10022 gimplify_seq_add_stmt (pre_p, trans_stmt);
10030 *expr_p = NULL_TREE;
10031 return GS_ALL_DONE;
10034 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10035 is the OMP_BODY of the original EXPR (which has already been
10036 gimplified so it's not present in the EXPR).
10038 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10041 gimplify_omp_ordered (tree expr, gimple_seq body)
10046 tree source_c = NULL_TREE;
10047 tree sink_c = NULL_TREE;
10049 if (gimplify_omp_ctxp)
10051 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10053 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10054 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10055 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10057 error_at (OMP_CLAUSE_LOCATION (c),
10058 "%<ordered%> construct with %<depend%> clause must be "
10059 "closely nested inside a loop with %<ordered%> clause "
10060 "with a parameter");
10063 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10064 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10067 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10068 decls && TREE_CODE (decls) == TREE_LIST;
10069 decls = TREE_CHAIN (decls), ++i)
10070 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10072 else if (TREE_VALUE (decls)
10073 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10075 error_at (OMP_CLAUSE_LOCATION (c),
10076 "variable %qE is not an iteration "
10077 "of outermost loop %d, expected %qE",
10078 TREE_VALUE (decls), i + 1,
10079 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10085 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10086 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10088 error_at (OMP_CLAUSE_LOCATION (c),
10089 "number of variables in %<depend(sink)%> "
10090 "clause does not match number of "
10091 "iteration variables");
10096 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10097 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10101 error_at (OMP_CLAUSE_LOCATION (c),
10102 "more than one %<depend(source)%> clause on an "
10103 "%<ordered%> construct");
10110 if (source_c && sink_c)
10112 error_at (OMP_CLAUSE_LOCATION (source_c),
10113 "%<depend(source)%> clause specified together with "
10114 "%<depend(sink:)%> clauses on the same construct");
10119 return gimple_build_nop ();
10120 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10123 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10124 expression produces a value to be used as an operand inside a GIMPLE
10125 statement, the value will be stored back in *EXPR_P. This value will
10126 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10127 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10128 emitted in PRE_P and POST_P.
10130 Additionally, this process may overwrite parts of the input
10131 expression during gimplification. Ideally, it should be
10132 possible to do non-destructive gimplification.
10134 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10135 the expression needs to evaluate to a value to be used as
10136 an operand in a GIMPLE statement, this value will be stored in
10137 *EXPR_P on exit. This happens when the caller specifies one
10138 of fb_lvalue or fb_rvalue fallback flags.
10140 PRE_P will contain the sequence of GIMPLE statements corresponding
10141 to the evaluation of EXPR and all the side-effects that must
10142 be executed before the main expression. On exit, the last
10143 statement of PRE_P is the core statement being gimplified. For
10144 instance, when gimplifying 'if (++a)' the last statement in
10145 PRE_P will be 'if (t.1)' where t.1 is the result of
10146 pre-incrementing 'a'.
10148 POST_P will contain the sequence of GIMPLE statements corresponding
10149 to the evaluation of all the side-effects that must be executed
10150 after the main expression. If this is NULL, the post
10151 side-effects are stored at the end of PRE_P.
10153 The reason why the output is split in two is to handle post
10154 side-effects explicitly. In some cases, an expression may have
10155 inner and outer post side-effects which need to be emitted in
10156 an order different from the one given by the recursive
10157 traversal. For instance, for the expression (*p--)++ the post
10158 side-effects of '--' must actually occur *after* the post
10159 side-effects of '++'. However, gimplification will first visit
10160 the inner expression, so if a separate POST sequence was not
10161 used, the resulting sequence would be:
10168 However, the post-decrement operation in line #2 must not be
10169 evaluated until after the store to *p at line #4, so the
10170 correct sequence should be:
10177 So, by specifying a separate post queue, it is possible
10178 to emit the post side-effects in the correct order.
10179 If POST_P is NULL, an internal queue will be used. Before
10180 returning to the caller, the sequence POST_P is appended to
10181 the main output sequence PRE_P.
10183 GIMPLE_TEST_F points to a function that takes a tree T and
10184 returns nonzero if T is in the GIMPLE form requested by the
10185 caller. The GIMPLE predicates are in gimple.c.
10187 FALLBACK tells the function what sort of a temporary we want if
10188 gimplification cannot produce an expression that complies with
10191 fb_none means that no temporary should be generated
10192 fb_rvalue means that an rvalue is OK to generate
10193 fb_lvalue means that an lvalue is OK to generate
10194 fb_either means that either is OK, but an lvalue is preferable.
10195 fb_mayfail means that gimplification may fail (in which case
10196 GS_ERROR will be returned)
10198 The return value is either GS_ERROR or GS_ALL_DONE, since this
10199 function iterates until EXPR is completely gimplified or an error
10202 enum gimplify_status
10203 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10204 bool (*gimple_test_f) (tree), fallback_t fallback)
10207 gimple_seq internal_pre = NULL;
10208 gimple_seq internal_post = NULL;
10211 location_t saved_location;
10212 enum gimplify_status ret;
10213 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10215 save_expr = *expr_p;
10216 if (save_expr == NULL_TREE)
10217 return GS_ALL_DONE;
10219 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10220 is_statement = gimple_test_f == is_gimple_stmt;
10222 gcc_assert (pre_p);
10224 /* Consistency checks. */
10225 if (gimple_test_f == is_gimple_reg)
10226 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10227 else if (gimple_test_f == is_gimple_val
10228 || gimple_test_f == is_gimple_call_addr
10229 || gimple_test_f == is_gimple_condexpr
10230 || gimple_test_f == is_gimple_mem_rhs
10231 || gimple_test_f == is_gimple_mem_rhs_or_call
10232 || gimple_test_f == is_gimple_reg_rhs
10233 || gimple_test_f == is_gimple_reg_rhs_or_call
10234 || gimple_test_f == is_gimple_asm_val
10235 || gimple_test_f == is_gimple_mem_ref_addr)
10236 gcc_assert (fallback & fb_rvalue);
10237 else if (gimple_test_f == is_gimple_min_lval
10238 || gimple_test_f == is_gimple_lvalue)
10239 gcc_assert (fallback & fb_lvalue);
10240 else if (gimple_test_f == is_gimple_addressable)
10241 gcc_assert (fallback & fb_either);
10242 else if (gimple_test_f == is_gimple_stmt)
10243 gcc_assert (fallback == fb_none);
10246 /* We should have recognized the GIMPLE_TEST_F predicate to
10247 know what kind of fallback to use in case a temporary is
10248 needed to hold the value or address of *EXPR_P. */
10249 gcc_unreachable ();
10252 /* We used to check the predicate here and return immediately if it
10253 succeeds. This is wrong; the design is for gimplification to be
10254 idempotent, and for the predicates to only test for valid forms, not
10255 whether they are fully simplified. */
10257 pre_p = &internal_pre;
10259 if (post_p == NULL)
10260 post_p = &internal_post;
10262 /* Remember the last statements added to PRE_P and POST_P. Every
10263 new statement added by the gimplification helpers needs to be
10264 annotated with location information. To centralize the
10265 responsibility, we remember the last statement that had been
10266 added to both queues before gimplifying *EXPR_P. If
10267 gimplification produces new statements in PRE_P and POST_P, those
10268 statements will be annotated with the same location information
10270 pre_last_gsi = gsi_last (*pre_p);
10271 post_last_gsi = gsi_last (*post_p);
10273 saved_location = input_location;
10274 if (save_expr != error_mark_node
10275 && EXPR_HAS_LOCATION (*expr_p))
10276 input_location = EXPR_LOCATION (*expr_p);
10278 /* Loop over the specific gimplifiers until the toplevel node
10279 remains the same. */
10282 /* Strip away as many useless type conversions as possible
10283 at the toplevel. */
10284 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10286 /* Remember the expr. */
10287 save_expr = *expr_p;
10289 /* Die, die, die, my darling. */
10290 if (save_expr == error_mark_node
10291 || (TREE_TYPE (save_expr)
10292 && TREE_TYPE (save_expr) == error_mark_node))
10298 /* Do any language-specific gimplification. */
10299 ret = ((enum gimplify_status)
10300 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10303 if (*expr_p == NULL_TREE)
10305 if (*expr_p != save_expr)
10308 else if (ret != GS_UNHANDLED)
10311 /* Make sure that all the cases set 'ret' appropriately. */
10312 ret = GS_UNHANDLED;
10313 switch (TREE_CODE (*expr_p))
10315 /* First deal with the special cases. */
10317 case POSTINCREMENT_EXPR:
10318 case POSTDECREMENT_EXPR:
10319 case PREINCREMENT_EXPR:
10320 case PREDECREMENT_EXPR:
10321 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10322 fallback != fb_none,
10323 TREE_TYPE (*expr_p));
10326 case VIEW_CONVERT_EXPR:
10327 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10328 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10330 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10331 post_p, is_gimple_val, fb_rvalue);
10332 recalculate_side_effects (*expr_p);
10338 case ARRAY_RANGE_REF:
10339 case REALPART_EXPR:
10340 case IMAGPART_EXPR:
10341 case COMPONENT_REF:
10342 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10343 fallback ? fallback : fb_rvalue);
10347 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10349 /* C99 code may assign to an array in a structure value of a
10350 conditional expression, and this has undefined behavior
10351 only on execution, so create a temporary if an lvalue is
10353 if (fallback == fb_lvalue)
10355 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10356 mark_addressable (*expr_p);
10362 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10364 /* C99 code may assign to an array in a structure returned
10365 from a function, and this has undefined behavior only on
10366 execution, so create a temporary if an lvalue is
10368 if (fallback == fb_lvalue)
10370 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10371 mark_addressable (*expr_p);
10377 gcc_unreachable ();
10379 case COMPOUND_EXPR:
10380 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10383 case COMPOUND_LITERAL_EXPR:
10384 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10385 gimple_test_f, fallback);
10390 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10391 fallback != fb_none);
10394 case TRUTH_ANDIF_EXPR:
10395 case TRUTH_ORIF_EXPR:
10397 /* Preserve the original type of the expression and the
10398 source location of the outer expression. */
10399 tree org_type = TREE_TYPE (*expr_p);
10400 *expr_p = gimple_boolify (*expr_p);
10401 *expr_p = build3_loc (input_location, COND_EXPR,
10405 org_type, boolean_true_node),
10408 org_type, boolean_false_node));
10413 case TRUTH_NOT_EXPR:
10415 tree type = TREE_TYPE (*expr_p);
10416 /* The parsers are careful to generate TRUTH_NOT_EXPR
10417 only with operands that are always zero or one.
10418 We do not fold here but handle the only interesting case
10419 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10420 *expr_p = gimple_boolify (*expr_p);
10421 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10422 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10423 TREE_TYPE (*expr_p),
10424 TREE_OPERAND (*expr_p, 0));
10426 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10427 TREE_TYPE (*expr_p),
10428 TREE_OPERAND (*expr_p, 0),
10429 build_int_cst (TREE_TYPE (*expr_p), 1));
10430 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10431 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10437 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10440 case ANNOTATE_EXPR:
10442 tree cond = TREE_OPERAND (*expr_p, 0);
10443 tree kind = TREE_OPERAND (*expr_p, 1);
10444 tree type = TREE_TYPE (cond);
10445 if (!INTEGRAL_TYPE_P (type))
10451 tree tmp = create_tmp_var (type);
10452 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10454 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10455 gimple_call_set_lhs (call, tmp);
10456 gimplify_seq_add_stmt (pre_p, call);
10463 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10467 if (IS_EMPTY_STMT (*expr_p))
10473 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10474 || fallback == fb_none)
10476 /* Just strip a conversion to void (or in void context) and
10478 *expr_p = TREE_OPERAND (*expr_p, 0);
10483 ret = gimplify_conversion (expr_p);
10484 if (ret == GS_ERROR)
10486 if (*expr_p != save_expr)
10490 case FIX_TRUNC_EXPR:
10491 /* unary_expr: ... | '(' cast ')' val | ... */
10492 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10493 is_gimple_val, fb_rvalue);
10494 recalculate_side_effects (*expr_p);
10499 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10500 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10501 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10503 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10504 if (*expr_p != save_expr)
10510 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10511 is_gimple_reg, fb_rvalue);
10512 if (ret == GS_ERROR)
10515 recalculate_side_effects (*expr_p);
10516 *expr_p = fold_build2_loc (input_location, MEM_REF,
10517 TREE_TYPE (*expr_p),
10518 TREE_OPERAND (*expr_p, 0),
10519 build_int_cst (saved_ptr_type, 0));
10520 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10521 TREE_THIS_NOTRAP (*expr_p) = notrap;
10526 /* We arrive here through the various re-gimplifcation paths. */
10528 /* First try re-folding the whole thing. */
10529 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10530 TREE_OPERAND (*expr_p, 0),
10531 TREE_OPERAND (*expr_p, 1));
10534 REF_REVERSE_STORAGE_ORDER (tmp)
10535 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10537 recalculate_side_effects (*expr_p);
10541 /* Avoid re-gimplifying the address operand if it is already
10542 in suitable form. Re-gimplifying would mark the address
10543 operand addressable. Always gimplify when not in SSA form
10544 as we still may have to gimplify decls with value-exprs. */
10545 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10546 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10548 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10549 is_gimple_mem_ref_addr, fb_rvalue);
10550 if (ret == GS_ERROR)
10553 recalculate_side_effects (*expr_p);
10557 /* Constants need not be gimplified. */
10564 /* Drop the overflow flag on constants, we do not want
10565 that in the GIMPLE IL. */
10566 if (TREE_OVERFLOW_P (*expr_p))
10567 *expr_p = drop_tree_overflow (*expr_p);
10572 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10573 CONST_DECL node. Otherwise the decl is replaceable by its
10575 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10576 if (fallback & fb_lvalue)
10580 *expr_p = DECL_INITIAL (*expr_p);
10586 ret = gimplify_decl_expr (expr_p, pre_p);
10590 ret = gimplify_bind_expr (expr_p, pre_p);
10594 ret = gimplify_loop_expr (expr_p, pre_p);
10598 ret = gimplify_switch_expr (expr_p, pre_p);
10602 ret = gimplify_exit_expr (expr_p);
10606 /* If the target is not LABEL, then it is a computed jump
10607 and the target needs to be gimplified. */
10608 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10610 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10611 NULL, is_gimple_val, fb_rvalue);
10612 if (ret == GS_ERROR)
10615 gimplify_seq_add_stmt (pre_p,
10616 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10621 gimplify_seq_add_stmt (pre_p,
10622 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10623 PREDICT_EXPR_OUTCOME (*expr_p)));
10629 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10630 == current_function_decl);
10631 gimplify_seq_add_stmt (pre_p,
10632 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10635 case CASE_LABEL_EXPR:
10636 ret = gimplify_case_label_expr (expr_p, pre_p);
10640 ret = gimplify_return_expr (*expr_p, pre_p);
10644 /* Don't reduce this in place; let gimplify_init_constructor work its
10645 magic. Buf if we're just elaborating this for side effects, just
10646 gimplify any element that has side-effects. */
10647 if (fallback == fb_none)
10649 unsigned HOST_WIDE_INT ix;
10651 tree temp = NULL_TREE;
10652 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10653 if (TREE_SIDE_EFFECTS (val))
10654 append_to_statement_list (val, &temp);
10657 ret = temp ? GS_OK : GS_ALL_DONE;
10659 /* C99 code may assign to an array in a constructed
10660 structure or union, and this has undefined behavior only
10661 on execution, so create a temporary if an lvalue is
10663 else if (fallback == fb_lvalue)
10665 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10666 mark_addressable (*expr_p);
10673 /* The following are special cases that are not handled by the
10674 original GIMPLE grammar. */
10676 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10679 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10682 case BIT_FIELD_REF:
10683 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10684 post_p, is_gimple_lvalue, fb_either);
10685 recalculate_side_effects (*expr_p);
10688 case TARGET_MEM_REF:
10690 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10692 if (TMR_BASE (*expr_p))
10693 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10694 post_p, is_gimple_mem_ref_addr, fb_either);
10695 if (TMR_INDEX (*expr_p))
10696 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10697 post_p, is_gimple_val, fb_rvalue);
10698 if (TMR_INDEX2 (*expr_p))
10699 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10700 post_p, is_gimple_val, fb_rvalue);
10701 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10702 ret = MIN (r0, r1);
10706 case NON_LVALUE_EXPR:
10707 /* This should have been stripped above. */
10708 gcc_unreachable ();
10711 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10714 case TRY_FINALLY_EXPR:
10715 case TRY_CATCH_EXPR:
10717 gimple_seq eval, cleanup;
10720 /* Calls to destructors are generated automatically in FINALLY/CATCH
10721 block. They should have location as UNKNOWN_LOCATION. However,
10722 gimplify_call_expr will reset these call stmts to input_location
10723 if it finds stmt's location is unknown. To prevent resetting for
10724 destructors, we set the input_location to unknown.
10725 Note that this only affects the destructor calls in FINALLY/CATCH
10726 block, and will automatically reset to its original value by the
10727 end of gimplify_expr. */
10728 input_location = UNKNOWN_LOCATION;
10729 eval = cleanup = NULL;
10730 location_t finally_loc = 0;
10731 /* The cleanup location can be extracted from STATEMENT_LIST_END
10732 location added especially for this purpose. */
10733 if (TREE_OPERAND (*expr_p, 0) &&
10734 TREE_CODE (TREE_OPERAND (*expr_p, 0)) == STATEMENT_LIST)
10736 const tree_statement_list_node* last_node =
10737 STATEMENT_LIST_TAIL(TREE_OPERAND (*expr_p, 0));
10740 TREE_CODE (last_node->stmt) == STATEMENT_LIST_END)
10741 finally_loc = EXPR_LOCATION(last_node->stmt);
10743 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10744 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10745 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10746 if (gimple_seq_empty_p (cleanup))
10748 gimple_seq_add_seq (pre_p, eval);
10752 try_ = gimple_build_try (eval, cleanup,
10753 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10754 ? GIMPLE_TRY_FINALLY
10755 : GIMPLE_TRY_CATCH);
10756 if (EXPR_HAS_LOCATION (save_expr))
10757 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10758 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10759 gimple_set_location (try_, saved_location);
10760 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10761 gimple_try_set_catch_is_cleanup (try_,
10762 TRY_CATCH_IS_CLEANUP (*expr_p));
10764 gimple *last_in_seq = gimple_seq_last_stmt (cleanup);
10765 gimple_set_location(last_in_seq, finally_loc);
10767 gimplify_seq_add_stmt (pre_p, try_);
10772 case CLEANUP_POINT_EXPR:
10773 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10777 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10783 gimple_seq handler = NULL;
10784 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10785 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10786 gimplify_seq_add_stmt (pre_p, c);
10791 case EH_FILTER_EXPR:
10794 gimple_seq failure = NULL;
10796 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10797 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10798 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10799 gimplify_seq_add_stmt (pre_p, ehf);
10806 enum gimplify_status r0, r1;
10807 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10808 post_p, is_gimple_val, fb_rvalue);
10809 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10810 post_p, is_gimple_val, fb_rvalue);
10811 TREE_SIDE_EFFECTS (*expr_p) = 0;
10812 ret = MIN (r0, r1);
10817 /* We get here when taking the address of a label. We mark
10818 the label as "forced"; meaning it can never be removed and
10819 it is a potential target for any computed goto. */
10820 FORCED_LABEL (*expr_p) = 1;
10824 case STATEMENT_LIST:
10825 ret = gimplify_statement_list (expr_p, pre_p);
10828 case STATEMENT_LIST_END:
10832 case WITH_SIZE_EXPR:
10834 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10835 post_p == &internal_post ? NULL : post_p,
10836 gimple_test_f, fallback);
10837 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10838 is_gimple_val, fb_rvalue);
10845 ret = gimplify_var_or_parm_decl (expr_p);
10849 /* When within an OMP context, notice uses of variables. */
10850 if (gimplify_omp_ctxp)
10851 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10856 /* Allow callbacks into the gimplifier during optimization. */
10861 gimplify_omp_parallel (expr_p, pre_p);
10866 gimplify_omp_task (expr_p, pre_p);
10874 case OMP_DISTRIBUTE:
10877 ret = gimplify_omp_for (expr_p, pre_p);
10881 gimplify_oacc_cache (expr_p, pre_p);
10886 gimplify_oacc_declare (expr_p, pre_p);
10890 case OACC_HOST_DATA:
10893 case OACC_PARALLEL:
10897 case OMP_TARGET_DATA:
10899 gimplify_omp_workshare (expr_p, pre_p);
10903 case OACC_ENTER_DATA:
10904 case OACC_EXIT_DATA:
10906 case OMP_TARGET_UPDATE:
10907 case OMP_TARGET_ENTER_DATA:
10908 case OMP_TARGET_EXIT_DATA:
10909 gimplify_omp_target_update (expr_p, pre_p);
10915 case OMP_TASKGROUP:
10919 gimple_seq body = NULL;
10922 gimplify_and_add (OMP_BODY (*expr_p), &body);
10923 switch (TREE_CODE (*expr_p))
10926 g = gimple_build_omp_section (body);
10929 g = gimple_build_omp_master (body);
10931 case OMP_TASKGROUP:
10933 gimple_seq cleanup = NULL;
10935 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10936 g = gimple_build_call (fn, 0);
10937 gimple_seq_add_stmt (&cleanup, g);
10938 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10940 gimple_seq_add_stmt (&body, g);
10941 g = gimple_build_omp_taskgroup (body);
10945 g = gimplify_omp_ordered (*expr_p, body);
10948 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10949 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10950 gimplify_adjust_omp_clauses (pre_p, body,
10951 &OMP_CRITICAL_CLAUSES (*expr_p),
10953 g = gimple_build_omp_critical (body,
10954 OMP_CRITICAL_NAME (*expr_p),
10955 OMP_CRITICAL_CLAUSES (*expr_p));
10958 gcc_unreachable ();
10960 gimplify_seq_add_stmt (pre_p, g);
10966 case OMP_ATOMIC_READ:
10967 case OMP_ATOMIC_CAPTURE_OLD:
10968 case OMP_ATOMIC_CAPTURE_NEW:
10969 ret = gimplify_omp_atomic (expr_p, pre_p);
10972 case TRANSACTION_EXPR:
10973 ret = gimplify_transaction (expr_p, pre_p);
10976 case TRUTH_AND_EXPR:
10977 case TRUTH_OR_EXPR:
10978 case TRUTH_XOR_EXPR:
10980 tree orig_type = TREE_TYPE (*expr_p);
10981 tree new_type, xop0, xop1;
10982 *expr_p = gimple_boolify (*expr_p);
10983 new_type = TREE_TYPE (*expr_p);
10984 if (!useless_type_conversion_p (orig_type, new_type))
10986 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10991 /* Boolified binary truth expressions are semantically equivalent
10992 to bitwise binary expressions. Canonicalize them to the
10993 bitwise variant. */
10994 switch (TREE_CODE (*expr_p))
10996 case TRUTH_AND_EXPR:
10997 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10999 case TRUTH_OR_EXPR:
11000 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11002 case TRUTH_XOR_EXPR:
11003 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11008 /* Now make sure that operands have compatible type to
11009 expression's new_type. */
11010 xop0 = TREE_OPERAND (*expr_p, 0);
11011 xop1 = TREE_OPERAND (*expr_p, 1);
11012 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11013 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11016 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11017 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11020 /* Continue classified as tcc_binary. */
11024 case VEC_COND_EXPR:
11026 enum gimplify_status r0, r1, r2;
11028 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11029 post_p, is_gimple_condexpr, fb_rvalue);
11030 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11031 post_p, is_gimple_val, fb_rvalue);
11032 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11033 post_p, is_gimple_val, fb_rvalue);
11035 ret = MIN (MIN (r0, r1), r2);
11036 recalculate_side_effects (*expr_p);
11041 case VEC_PERM_EXPR:
11042 /* Classified as tcc_expression. */
11045 case POINTER_PLUS_EXPR:
11047 enum gimplify_status r0, r1;
11048 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11049 post_p, is_gimple_val, fb_rvalue);
11050 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11051 post_p, is_gimple_val, fb_rvalue);
11052 recalculate_side_effects (*expr_p);
11053 ret = MIN (r0, r1);
11057 case CILK_SYNC_STMT:
11059 if (!fn_contains_cilk_spawn_p (cfun))
11061 error_at (EXPR_LOCATION (*expr_p),
11062 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11067 gimplify_cilk_sync (expr_p, pre_p);
11074 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11076 case tcc_comparison:
11077 /* Handle comparison of objects of non scalar mode aggregates
11078 with a call to memcmp. It would be nice to only have to do
11079 this for variable-sized objects, but then we'd have to allow
11080 the same nest of reference nodes we allow for MODIFY_EXPR and
11081 that's too complex.
11083 Compare scalar mode aggregates as scalar mode values. Using
11084 memcmp for them would be very inefficient at best, and is
11085 plain wrong if bitfields are involved. */
11087 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11089 /* Vector comparisons need no boolification. */
11090 if (TREE_CODE (type) == VECTOR_TYPE)
11092 else if (!AGGREGATE_TYPE_P (type))
11094 tree org_type = TREE_TYPE (*expr_p);
11095 *expr_p = gimple_boolify (*expr_p);
11096 if (!useless_type_conversion_p (org_type,
11097 TREE_TYPE (*expr_p)))
11099 *expr_p = fold_convert_loc (input_location,
11100 org_type, *expr_p);
11106 else if (TYPE_MODE (type) != BLKmode)
11107 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11109 ret = gimplify_variable_sized_compare (expr_p);
11114 /* If *EXPR_P does not need to be special-cased, handle it
11115 according to its class. */
11117 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11118 post_p, is_gimple_val, fb_rvalue);
11124 enum gimplify_status r0, r1;
11126 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11127 post_p, is_gimple_val, fb_rvalue);
11128 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11129 post_p, is_gimple_val, fb_rvalue);
11131 ret = MIN (r0, r1);
11137 enum gimplify_status r0, r1, r2;
11139 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11140 post_p, is_gimple_val, fb_rvalue);
11141 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11142 post_p, is_gimple_val, fb_rvalue);
11143 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11144 post_p, is_gimple_val, fb_rvalue);
11146 ret = MIN (MIN (r0, r1), r2);
11150 case tcc_declaration:
11153 goto dont_recalculate;
11156 gcc_unreachable ();
11159 recalculate_side_effects (*expr_p);
11165 gcc_assert (*expr_p || ret != GS_OK);
11167 while (ret == GS_OK);
11169 /* If we encountered an error_mark somewhere nested inside, either
11170 stub out the statement or propagate the error back out. */
11171 if (ret == GS_ERROR)
11178 /* This was only valid as a return value from the langhook, which
11179 we handled. Make sure it doesn't escape from any other context. */
11180 gcc_assert (ret != GS_UNHANDLED);
11182 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
11184 /* We aren't looking for a value, and we don't have a valid
11185 statement. If it doesn't have side-effects, throw it away. */
11186 if (!TREE_SIDE_EFFECTS (*expr_p))
11188 else if (!TREE_THIS_VOLATILE (*expr_p))
11190 /* This is probably a _REF that contains something nested that
11191 has side effects. Recurse through the operands to find it. */
11192 enum tree_code code = TREE_CODE (*expr_p);
11196 case COMPONENT_REF:
11197 case REALPART_EXPR:
11198 case IMAGPART_EXPR:
11199 case VIEW_CONVERT_EXPR:
11200 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11201 gimple_test_f, fallback);
11205 case ARRAY_RANGE_REF:
11206 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11207 gimple_test_f, fallback);
11208 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11209 gimple_test_f, fallback);
11213 /* Anything else with side-effects must be converted to
11214 a valid statement before we get here. */
11215 gcc_unreachable ();
11220 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11221 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
11223 /* Historically, the compiler has treated a bare reference
11224 to a non-BLKmode volatile lvalue as forcing a load. */
11225 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
11227 /* Normally, we do not want to create a temporary for a
11228 TREE_ADDRESSABLE type because such a type should not be
11229 copied by bitwise-assignment. However, we make an
11230 exception here, as all we are doing here is ensuring that
11231 we read the bytes that make up the type. We use
11232 create_tmp_var_raw because create_tmp_var will abort when
11233 given a TREE_ADDRESSABLE type. */
11234 tree tmp = create_tmp_var_raw (type, "vol");
11235 gimple_add_tmp_var (tmp);
11236 gimplify_assign (tmp, *expr_p, pre_p);
11240 /* We can't do anything useful with a volatile reference to
11241 an incomplete type, so just throw it away. Likewise for
11242 a BLKmode type, since any implicit inner load should
11243 already have been turned into an explicit one by the
11244 gimplification process. */
11248 /* If we are gimplifying at the statement level, we're done. Tack
11249 everything together and return. */
11250 if (fallback == fb_none || is_statement)
11252 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11253 it out for GC to reclaim it. */
11254 *expr_p = NULL_TREE;
11256 if (!gimple_seq_empty_p (internal_pre)
11257 || !gimple_seq_empty_p (internal_post))
11259 gimplify_seq_add_seq (&internal_pre, internal_post);
11260 gimplify_seq_add_seq (pre_p, internal_pre);
11263 /* The result of gimplifying *EXPR_P is going to be the last few
11264 statements in *PRE_P and *POST_P. Add location information
11265 to all the statements that were added by the gimplification
11267 if (!gimple_seq_empty_p (*pre_p))
11268 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11270 if (!gimple_seq_empty_p (*post_p))
11271 annotate_all_with_location_after (*post_p, post_last_gsi,
11277 #ifdef ENABLE_GIMPLE_CHECKING
11280 enum tree_code code = TREE_CODE (*expr_p);
11281 /* These expressions should already be in gimple IR form. */
11282 gcc_assert (code != MODIFY_EXPR
11283 && code != ASM_EXPR
11284 && code != BIND_EXPR
11285 && code != CATCH_EXPR
11286 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11287 && code != EH_FILTER_EXPR
11288 && code != GOTO_EXPR
11289 && code != LABEL_EXPR
11290 && code != LOOP_EXPR
11291 && code != SWITCH_EXPR
11292 && code != TRY_FINALLY_EXPR
11293 && code != OACC_PARALLEL
11294 && code != OACC_KERNELS
11295 && code != OACC_DATA
11296 && code != OACC_HOST_DATA
11297 && code != OACC_DECLARE
11298 && code != OACC_UPDATE
11299 && code != OACC_ENTER_DATA
11300 && code != OACC_EXIT_DATA
11301 && code != OACC_CACHE
11302 && code != OMP_CRITICAL
11304 && code != OACC_LOOP
11305 && code != OMP_MASTER
11306 && code != OMP_TASKGROUP
11307 && code != OMP_ORDERED
11308 && code != OMP_PARALLEL
11309 && code != OMP_SECTIONS
11310 && code != OMP_SECTION
11311 && code != OMP_SINGLE);
11315 /* Otherwise we're gimplifying a subexpression, so the resulting
11316 value is interesting. If it's a valid operand that matches
11317 GIMPLE_TEST_F, we're done. Unless we are handling some
11318 post-effects internally; if that's the case, we need to copy into
11319 a temporary before adding the post-effects to POST_P. */
11320 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11323 /* Otherwise, we need to create a new temporary for the gimplified
11326 /* We can't return an lvalue if we have an internal postqueue. The
11327 object the lvalue refers to would (probably) be modified by the
11328 postqueue; we need to copy the value out first, which means an
11330 if ((fallback & fb_lvalue)
11331 && gimple_seq_empty_p (internal_post)
11332 && is_gimple_addressable (*expr_p))
11334 /* An lvalue will do. Take the address of the expression, store it
11335 in a temporary, and replace the expression with an INDIRECT_REF of
11337 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11338 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11339 *expr_p = build_simple_mem_ref (tmp);
11341 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11343 /* An rvalue will do. Assign the gimplified expression into a
11344 new temporary TMP and replace the original expression with
11345 TMP. First, make sure that the expression has a type so that
11346 it can be assigned into a temporary. */
11347 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11348 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11352 #ifdef ENABLE_GIMPLE_CHECKING
11353 if (!(fallback & fb_mayfail))
11355 fprintf (stderr, "gimplification failed:\n");
11356 print_generic_expr (stderr, *expr_p, 0);
11357 debug_tree (*expr_p);
11358 internal_error ("gimplification failed");
11361 gcc_assert (fallback & fb_mayfail);
11363 /* If this is an asm statement, and the user asked for the
11364 impossible, don't die. Fail and let gimplify_asm_expr
11370 /* Make sure the temporary matches our predicate. */
11371 gcc_assert ((*gimple_test_f) (*expr_p));
11373 if (!gimple_seq_empty_p (internal_post))
11375 annotate_all_with_location (internal_post, input_location);
11376 gimplify_seq_add_seq (pre_p, internal_post);
11380 input_location = saved_location;
11384 /* Look through TYPE for variable-sized objects and gimplify each such
11385 size that we find. Add to LIST_P any statements generated. */
11388 gimplify_type_sizes (tree type, gimple_seq *list_p)
11392 if (type == NULL || type == error_mark_node)
11395 /* We first do the main variant, then copy into any other variants. */
11396 type = TYPE_MAIN_VARIANT (type);
11398 /* Avoid infinite recursion. */
11399 if (TYPE_SIZES_GIMPLIFIED (type))
11402 TYPE_SIZES_GIMPLIFIED (type) = 1;
11404 switch (TREE_CODE (type))
11407 case ENUMERAL_TYPE:
11410 case FIXED_POINT_TYPE:
11411 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11412 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11414 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11416 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11417 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11422 /* These types may not have declarations, so handle them here. */
11423 gimplify_type_sizes (TREE_TYPE (type), list_p);
11424 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11425 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11426 with assigned stack slots, for -O1+ -g they should be tracked
11428 if (!(TYPE_NAME (type)
11429 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11430 && DECL_IGNORED_P (TYPE_NAME (type)))
11431 && TYPE_DOMAIN (type)
11432 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11434 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11435 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11436 DECL_IGNORED_P (t) = 0;
11437 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11438 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11439 DECL_IGNORED_P (t) = 0;
11445 case QUAL_UNION_TYPE:
11446 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11447 if (TREE_CODE (field) == FIELD_DECL)
11449 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11450 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11451 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11452 gimplify_type_sizes (TREE_TYPE (field), list_p);
11457 case REFERENCE_TYPE:
11458 /* We used to recurse on the pointed-to type here, which turned out to
11459 be incorrect because its definition might refer to variables not
11460 yet initialized at this point if a forward declaration is involved.
11462 It was actually useful for anonymous pointed-to types to ensure
11463 that the sizes evaluation dominates every possible later use of the
11464 values. Restricting to such types here would be safe since there
11465 is no possible forward declaration around, but would introduce an
11466 undesirable middle-end semantic to anonymity. We then defer to
11467 front-ends the responsibility of ensuring that the sizes are
11468 evaluated both early and late enough, e.g. by attaching artificial
11469 type declarations to the tree. */
11476 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11477 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11479 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11481 TYPE_SIZE (t) = TYPE_SIZE (type);
11482 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11483 TYPE_SIZES_GIMPLIFIED (t) = 1;
11487 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11488 a size or position, has had all of its SAVE_EXPRs evaluated.
11489 We add any required statements to *STMT_P. */
11492 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11494 tree expr = *expr_p;
11496 /* We don't do anything if the value isn't there, is constant, or contains
11497 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11498 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11499 will want to replace it with a new variable, but that will cause problems
11500 if this type is from outside the function. It's OK to have that here. */
11501 if (is_gimple_sizepos (expr))
11504 *expr_p = unshare_expr (expr);
11506 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
11509 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11510 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11511 is true, also gimplify the parameters. */
11514 gimplify_body (tree fndecl, bool do_parms)
11516 location_t saved_location = input_location;
11517 gimple_seq parm_stmts, seq;
11518 gimple *outer_stmt;
11520 struct cgraph_node *cgn;
11522 timevar_push (TV_TREE_GIMPLIFY);
11524 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11526 default_rtl_profile ();
11528 gcc_assert (gimplify_ctxp == NULL);
11529 push_gimplify_context ();
11531 if (flag_openacc || flag_openmp)
11533 gcc_assert (gimplify_omp_ctxp == NULL);
11534 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11535 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11538 /* Unshare most shared trees in the body and in that of any nested functions.
11539 It would seem we don't have to do this for nested functions because
11540 they are supposed to be output and then the outer function gimplified
11541 first, but the g++ front end doesn't always do it that way. */
11542 unshare_body (fndecl);
11543 unvisit_body (fndecl);
11545 cgn = cgraph_node::get (fndecl);
11546 if (cgn && cgn->origin)
11547 nonlocal_vlas = new hash_set<tree>;
11549 /* Make sure input_location isn't set to something weird. */
11550 input_location = DECL_SOURCE_LOCATION (fndecl);
11552 /* Resolve callee-copies. This has to be done before processing
11553 the body so that DECL_VALUE_EXPR gets processed correctly. */
11554 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11556 /* Gimplify the function's body. */
11558 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11559 outer_stmt = gimple_seq_first_stmt (seq);
11562 outer_stmt = gimple_build_nop ();
11563 gimplify_seq_add_stmt (&seq, outer_stmt);
11566 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11567 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11568 if (gimple_code (outer_stmt) == GIMPLE_BIND
11569 && gimple_seq_first (seq) == gimple_seq_last (seq))
11570 outer_bind = as_a <gbind *> (outer_stmt);
11572 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11574 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11576 /* If we had callee-copies statements, insert them at the beginning
11577 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11578 if (!gimple_seq_empty_p (parm_stmts))
11582 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11583 gimple_bind_set_body (outer_bind, parm_stmts);
11585 for (parm = DECL_ARGUMENTS (current_function_decl);
11586 parm; parm = DECL_CHAIN (parm))
11587 if (DECL_HAS_VALUE_EXPR_P (parm))
11589 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11590 DECL_IGNORED_P (parm) = 0;
11596 if (nonlocal_vla_vars)
11598 /* tree-nested.c may later on call declare_vars (..., true);
11599 which relies on BLOCK_VARS chain to be the tail of the
11600 gimple_bind_vars chain. Ensure we don't violate that
11602 if (gimple_bind_block (outer_bind)
11603 == DECL_INITIAL (current_function_decl))
11604 declare_vars (nonlocal_vla_vars, outer_bind, true);
11606 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11607 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11608 nonlocal_vla_vars);
11609 nonlocal_vla_vars = NULL_TREE;
11611 delete nonlocal_vlas;
11612 nonlocal_vlas = NULL;
11615 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11616 && gimplify_omp_ctxp)
11618 delete_omp_context (gimplify_omp_ctxp);
11619 gimplify_omp_ctxp = NULL;
11622 pop_gimplify_context (outer_bind);
11623 gcc_assert (gimplify_ctxp == NULL);
11625 if (flag_checking && !seen_error ())
11626 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11628 timevar_pop (TV_TREE_GIMPLIFY);
11629 input_location = saved_location;
11634 typedef char *char_p; /* For DEF_VEC_P. */
11636 /* Return whether we should exclude FNDECL from instrumentation. */
11639 flag_instrument_functions_exclude_p (tree fndecl)
11643 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11644 if (v && v->length () > 0)
11650 name = lang_hooks.decl_printable_name (fndecl, 0);
11651 FOR_EACH_VEC_ELT (*v, i, s)
11652 if (strstr (name, s) != NULL)
11656 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11657 if (v && v->length () > 0)
11663 name = DECL_SOURCE_FILE (fndecl);
11664 FOR_EACH_VEC_ELT (*v, i, s)
11665 if (strstr (name, s) != NULL)
11672 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11673 node for the function we want to gimplify.
11675 Return the sequence of GIMPLE statements corresponding to the body
11679 gimplify_function_tree (tree fndecl)
11685 gcc_assert (!gimple_body (fndecl));
11687 if (DECL_STRUCT_FUNCTION (fndecl))
11688 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11690 push_struct_function (fndecl);
11692 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11694 cfun->curr_properties |= PROP_gimple_lva;
11696 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11698 /* Preliminarily mark non-addressed complex variables as eligible
11699 for promotion to gimple registers. We'll transform their uses
11700 as we find them. */
11701 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11702 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11703 && !TREE_THIS_VOLATILE (parm)
11704 && !needs_to_live_in_memory (parm))
11705 DECL_GIMPLE_REG_P (parm) = 1;
11708 ret = DECL_RESULT (fndecl);
11709 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11710 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11711 && !needs_to_live_in_memory (ret))
11712 DECL_GIMPLE_REG_P (ret) = 1;
11714 bind = gimplify_body (fndecl, true);
11716 /* The tree body of the function is no longer needed, replace it
11717 with the new GIMPLE body. */
11719 gimple_seq_add_stmt (&seq, bind);
11720 gimple_set_body (fndecl, seq);
11722 /* If we're instrumenting function entry/exit, then prepend the call to
11723 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11724 catch the exit hook. */
11725 /* ??? Add some way to ignore exceptions for this TFE. */
11726 if (flag_instrument_function_entry_exit
11727 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11728 /* Do not instrument extern inline functions. */
11729 && !(DECL_DECLARED_INLINE_P (fndecl)
11730 && DECL_EXTERNAL (fndecl)
11731 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
11732 && !flag_instrument_functions_exclude_p (fndecl))
11737 gimple_seq cleanup = NULL, body = NULL;
11741 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11742 call = gimple_build_call (x, 1, integer_zero_node);
11743 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11744 gimple_call_set_lhs (call, tmp_var);
11745 gimplify_seq_add_stmt (&cleanup, call);
11746 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11747 call = gimple_build_call (x, 2,
11748 build_fold_addr_expr (current_function_decl),
11750 gimplify_seq_add_stmt (&cleanup, call);
11751 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11753 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11754 call = gimple_build_call (x, 1, integer_zero_node);
11755 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11756 gimple_call_set_lhs (call, tmp_var);
11757 gimplify_seq_add_stmt (&body, call);
11758 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11759 call = gimple_build_call (x, 2,
11760 build_fold_addr_expr (current_function_decl),
11762 gimplify_seq_add_stmt (&body, call);
11763 gimplify_seq_add_stmt (&body, tf);
11764 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11765 /* Clear the block for BIND, since it is no longer directly inside
11766 the function, but within a try block. */
11767 gimple_bind_set_block (bind, NULL);
11769 /* Replace the current function body with the body
11770 wrapped in the try/finally TF. */
11772 gimple_seq_add_stmt (&seq, new_bind);
11773 gimple_set_body (fndecl, seq);
11777 if ((flag_sanitize & SANITIZE_THREAD) != 0
11778 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11780 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11781 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11782 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11783 /* Clear the block for BIND, since it is no longer directly inside
11784 the function, but within a try block. */
11785 gimple_bind_set_block (bind, NULL);
11786 /* Replace the current function body with the body
11787 wrapped in the try/finally TF. */
11789 gimple_seq_add_stmt (&seq, new_bind);
11790 gimple_set_body (fndecl, seq);
11793 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11794 cfun->curr_properties |= PROP_gimple_any;
11798 dump_function (TDI_generic, fndecl);
11801 /* Return a dummy expression of type TYPE in order to keep going after an
11805 dummy_object (tree type)
11807 tree t = build_int_cst (build_pointer_type (type), 0);
11808 return build2 (MEM_REF, type, t, t);
11811 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11812 builtin function, but a very special sort of operator. */
11814 enum gimplify_status
11815 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11816 gimple_seq *post_p ATTRIBUTE_UNUSED)
11818 tree promoted_type, have_va_type;
11819 tree valist = TREE_OPERAND (*expr_p, 0);
11820 tree type = TREE_TYPE (*expr_p);
11821 tree t, tag, aptag;
11822 location_t loc = EXPR_LOCATION (*expr_p);
11824 /* Verify that valist is of the proper type. */
11825 have_va_type = TREE_TYPE (valist);
11826 if (have_va_type == error_mark_node)
11828 have_va_type = targetm.canonical_va_list_type (have_va_type);
11830 if (have_va_type == NULL_TREE)
11832 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11836 /* Generate a diagnostic for requesting data of a type that cannot
11837 be passed through `...' due to type promotion at the call site. */
11838 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11841 static bool gave_help;
11843 /* Use the expansion point to handle cases such as passing bool (defined
11844 in a system header) through `...'. */
11845 source_location xloc
11846 = expansion_point_location_if_in_system_header (loc);
11848 /* Unfortunately, this is merely undefined, rather than a constraint
11849 violation, so we cannot make this an error. If this call is never
11850 executed, the program is still strictly conforming. */
11851 warned = warning_at (xloc, 0,
11852 "%qT is promoted to %qT when passed through %<...%>",
11853 type, promoted_type);
11854 if (!gave_help && warned)
11857 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
11858 promoted_type, type);
11861 /* We can, however, treat "undefined" any way we please.
11862 Call abort to encourage the user to fix the program. */
11864 inform (xloc, "if this code is reached, the program will abort");
11865 /* Before the abort, allow the evaluation of the va_list
11866 expression to exit or longjmp. */
11867 gimplify_and_add (valist, pre_p);
11868 t = build_call_expr_loc (loc,
11869 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11870 gimplify_and_add (t, pre_p);
11872 /* This is dead code, but go ahead and finish so that the
11873 mode of the result comes out right. */
11874 *expr_p = dummy_object (type);
11875 return GS_ALL_DONE;
11878 tag = build_int_cst (build_pointer_type (type), 0);
11879 aptag = build_int_cst (TREE_TYPE (valist), 0);
11881 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11882 valist, tag, aptag);
11884 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11885 needs to be expanded. */
11886 cfun->curr_properties &= ~PROP_gimple_lva;
11891 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11893 DST/SRC are the destination and source respectively. You can pass
11894 ungimplified trees in DST or SRC, in which case they will be
11895 converted to a gimple operand if necessary.
11897 This function returns the newly created GIMPLE_ASSIGN tuple. */
11900 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11902 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11903 gimplify_and_add (t, seq_p);
11905 return gimple_seq_last_stmt (*seq_p);
11909 gimplify_hasher::hash (const elt_t *p)
11912 return iterative_hash_expr (t, 0);
11916 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11920 enum tree_code code = TREE_CODE (t1);
11922 if (TREE_CODE (t2) != code
11923 || TREE_TYPE (t1) != TREE_TYPE (t2))
11926 if (!operand_equal_p (t1, t2, 0))
11929 /* Only allow them to compare equal if they also hash equal; otherwise
11930 results are nondeterminate, and we fail bootstrap comparison. */
11931 gcc_checking_assert (hash (p1) == hash (p2));